* refactor(chalice): upgraded dependencies

* refactor(chalice): upgraded dependencies
feat(chalice): support heatmaps

* feat(chalice): support table-of-browsers showing user-count

* feat(chalice): support table-of-devices showing user-count

* feat(chalice): support table-of-URLs showing user-count

* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* refactor(chalice): upgraded dependencies

* refactor(chalice): upgraded dependencies
feat(chalice): support heatmaps

* feat(chalice): support table-of-browsers showing user-count

* feat(chalice): support table-of-devices showing user-count

* feat(chalice): support table-of-URLs showing user-count

* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* refactor(chalice): refactored search sessions hooks

* refactor(DB): DB delta

* refactor(DB): DB delta

* refactor(DB): DB delta

* refactor(chalice): refactored schemas

* refactor(chalice): refactored schemas
refactor(chalice): cleaned scripts
feat(chalice): search sessions by CSS selector (PG)
This commit is contained in:
Kraiem Taha Yassine 2024-07-18 17:57:37 +02:00 committed by GitHub
parent 5cc6915ed6
commit dd5ff6bad8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
56 changed files with 1484 additions and 1381 deletions

View file

@ -229,5 +229,5 @@ def get_predefined_values():
"unit": "count" if v.endswith(".count") else "ms",
"predefined": True,
"metricId": None,
"seriesId": None} for v in values if v != schemas.AlertColumn.custom]
"seriesId": None} for v in values if v != schemas.AlertColumn.CUSTOM]
return values

View file

@ -14,60 +14,60 @@ from chalicelib.utils.TimeUTC import TimeUTC
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
LeftToDb = {
schemas.AlertColumn.performance__dom_content_loaded__average: {
schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"},
schemas.AlertColumn.performance__first_meaningful_paint__average: {
schemas.AlertColumn.PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"},
schemas.AlertColumn.performance__page_load_time__average: {
schemas.AlertColumn.PERFORMANCE__PAGE_LOAD_TIME__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"},
schemas.AlertColumn.performance__dom_build_time__average: {
schemas.AlertColumn.PERFORMANCE__DOM_BUILD_TIME__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(dom_building_time,0))"},
schemas.AlertColumn.performance__speed_index__average: {
schemas.AlertColumn.PERFORMANCE__SPEED_INDEX__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"},
schemas.AlertColumn.performance__page_response_time__average: {
schemas.AlertColumn.PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(response_time,0))"},
schemas.AlertColumn.performance__ttfb__average: {
schemas.AlertColumn.PERFORMANCE__TTFB__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(first_paint_time,0))"},
schemas.AlertColumn.performance__time_to_render__average: {
schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(visually_complete,0))"},
schemas.AlertColumn.performance__image_load_time__average: {
schemas.AlertColumn.PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"},
schemas.AlertColumn.performance__request_load_time__average: {
schemas.AlertColumn.PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"},
schemas.AlertColumn.resources__load_time__average: {
schemas.AlertColumn.RESOURCES__LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))"},
schemas.AlertColumn.resources__missing__count: {
schemas.AlertColumn.RESOURCES__MISSING__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'"},
schemas.AlertColumn.errors__4xx_5xx__count: {
schemas.AlertColumn.ERRORS__4XX_5XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
"condition": "status/100!=2"},
schemas.AlertColumn.errors__4xx__count: {
schemas.AlertColumn.ERRORS__4XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=4"},
schemas.AlertColumn.errors__5xx__count: {
schemas.AlertColumn.ERRORS__5XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=5"},
schemas.AlertColumn.errors__javascript__impacted_sessions__count: {
schemas.AlertColumn.ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
schemas.AlertColumn.performance__crashes__count: {
schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: {
"table": "public.sessions",
"formula": "COUNT(DISTINCT session_id)",
"condition": "errors_count > 0 AND duration>0"},
schemas.AlertColumn.errors__javascript__count: {
schemas.AlertColumn.ERRORS__JAVASCRIPT__COUNT: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False},
schemas.AlertColumn.errors__backend__count: {
schemas.AlertColumn.ERRORS__BACKEND__COUNT: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False},
}
@ -87,7 +87,7 @@ def can_check(a) -> bool:
now = TimeUTC.now()
repetitionBase = a["options"]["currentPeriod"] \
if a["detectionMethod"] == schemas.AlertDetectionMethod.change \
if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \
and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
else a["options"]["previousPeriod"]
@ -110,7 +110,7 @@ def Build(a):
main_table = ""
if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = schemas.SortOrderType.desc
a["filter"]["order"] = schemas.SortOrderType.DESC
a["filter"]["startDate"] = 0
a["filter"]["endDate"] = TimeUTC.now()
try:
@ -136,7 +136,7 @@ def Build(a):
is_ss = main_table == "public.sessions"
q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold:
if a["detectionMethod"] == schemas.AlertDetectionMethod.THRESHOLD:
if a["seriesId"] is not None:
q += f""" FROM ({subQ}) AS stat"""
else:
@ -144,7 +144,7 @@ def Build(a):
{"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}) AS stat"""
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
else:
if a["change"] == schemas.AlertDetectionType.change:
if a["change"] == schemas.AlertDetectionType.CHANGE:
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})-({sub2})) AS value"

View file

@ -52,7 +52,7 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche
"sort": {"key": body.sort, "order": body.order}
}
for f in body.filters:
if f.type == schemas.LiveFilterType.metadata:
if f.type == schemas.LiveFilterType.METADATA:
data["filter"][f.source] = {"values": f.value, "operator": f.operator}
else:
@ -281,23 +281,23 @@ def session_exists(project_id, session_id):
def __change_keys(key):
return {
"PAGETITLE": schemas.LiveFilterType.page_title.value,
"PAGETITLE": schemas.LiveFilterType.PAGE_TITLE.value,
"ACTIVE": "active",
"LIVE": "live",
"SESSIONID": schemas.LiveFilterType.session_id.value,
"METADATA": schemas.LiveFilterType.metadata.value,
"USERID": schemas.LiveFilterType.user_id.value,
"USERUUID": schemas.LiveFilterType.user_UUID.value,
"SESSIONID": schemas.LiveFilterType.SESSION_ID.value,
"METADATA": schemas.LiveFilterType.METADATA.value,
"USERID": schemas.LiveFilterType.USER_ID.value,
"USERUUID": schemas.LiveFilterType.USER_UUID.value,
"PROJECTKEY": "projectKey",
"REVID": schemas.LiveFilterType.rev_id.value,
"REVID": schemas.LiveFilterType.REV_ID.value,
"TIMESTAMP": "timestamp",
"TRACKERVERSION": schemas.LiveFilterType.tracker_version.value,
"TRACKERVERSION": schemas.LiveFilterType.TRACKER_VERSION.value,
"ISSNIPPET": "isSnippet",
"USEROS": schemas.LiveFilterType.user_os.value,
"USERBROWSER": schemas.LiveFilterType.user_browser.value,
"USERBROWSERVERSION": schemas.LiveFilterType.user_browser_version.value,
"USERDEVICE": schemas.LiveFilterType.user_device.value,
"USERDEVICETYPE": schemas.LiveFilterType.user_device_type.value,
"USERCOUNTRY": schemas.LiveFilterType.user_country.value,
"USEROS": schemas.LiveFilterType.USER_OS.value,
"USERBROWSER": schemas.LiveFilterType.USER_BROWSER.value,
"USERBROWSERVERSION": schemas.LiveFilterType.USER_BROWSER_VERSION.value,
"USERDEVICE": schemas.LiveFilterType.USER_DEVICE.value,
"USERDEVICETYPE": schemas.LiveFilterType.USER_DEVICE_TYPE.value,
"USERCOUNTRY": schemas.LiveFilterType.USER_COUNTRY.value,
"PROJECTID": "projectId"
}.get(key.upper(), key)

View file

@ -8,23 +8,23 @@ TABLE = "public.autocomplete"
def __get_autocomplete_table(value, project_id):
autocomplete_events = [schemas.FilterType.rev_id,
schemas.EventType.click,
schemas.FilterType.user_device,
schemas.FilterType.user_id,
schemas.FilterType.user_browser,
schemas.FilterType.user_os,
schemas.EventType.custom,
schemas.FilterType.user_country,
schemas.FilterType.user_city,
schemas.FilterType.user_state,
schemas.EventType.location,
schemas.EventType.input]
autocomplete_events = [schemas.FilterType.REV_ID,
schemas.EventType.CLICK,
schemas.FilterType.USER_DEVICE,
schemas.FilterType.USER_ID,
schemas.FilterType.USER_BROWSER,
schemas.FilterType.USER_OS,
schemas.EventType.CUSTOM,
schemas.FilterType.USER_COUNTRY,
schemas.FilterType.USER_CITY,
schemas.FilterType.USER_STATE,
schemas.EventType.LOCATION,
schemas.EventType.INPUT]
autocomplete_events.sort()
sub_queries = []
c_list = []
for e in autocomplete_events:
if e == schemas.FilterType.user_country:
if e == schemas.FilterType.USER_COUNTRY:
c_list = countries.get_country_code_autocomplete(value)
if len(c_list) > 0:
sub_queries.append(f"""(SELECT DISTINCT ON(value) '{e.value}' AS _type, value
@ -72,7 +72,7 @@ def __get_autocomplete_table(value, project_id):
def __generic_query(typename, value_length=None):
if typename == schemas.FilterType.user_country:
if typename == schemas.FilterType.USER_COUNTRY:
return f"""SELECT DISTINCT value, type
FROM {TABLE}
WHERE
@ -127,7 +127,7 @@ def __generic_autocomplete_metas(typename):
params = {"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)}
if typename == schemas.FilterType.user_country:
if typename == schemas.FilterType.USER_COUNTRY:
params["value"] = tuple(countries.get_country_code_autocomplete(text))
if len(params["value"]) == 0:
return []

View file

@ -15,12 +15,12 @@ class MSTeams(BaseCollaboration):
@classmethod
def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None,
webhook_type=schemas.WebhookType.msteams):
webhook_type=schemas.WebhookType.MSTEAMS):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if cls.say_hello(data.url):
return webhook.add(tenant_id=tenant_id,
endpoint=data.url.unicode_string(),
webhook_type=schemas.WebhookType.msteams,
webhook_type=schemas.WebhookType.MSTEAMS,
name=data.name)
return None
@ -157,9 +157,9 @@ class MSTeams(BaseCollaboration):
def get_integration(cls, tenant_id, integration_id=None):
if integration_id is not None:
return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id,
webhook_type=schemas.WebhookType.msteams)
webhook_type=schemas.WebhookType.MSTEAMS)
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.msteams)
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.MSTEAMS)
if integrations is None or len(integrations) == 0:
return None
return integrations[0]

View file

@ -13,12 +13,12 @@ class Slack(BaseCollaboration):
@classmethod
def add(cls, tenant_id, data: schemas.AddCollaborationSchema):
if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None,
webhook_type=schemas.WebhookType.slack):
webhook_type=schemas.WebhookType.SLACK):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if cls.say_hello(data.url):
return webhook.add(tenant_id=tenant_id,
endpoint=data.url.unicode_string(),
webhook_type=schemas.WebhookType.slack,
webhook_type=schemas.WebhookType.SLACK,
name=data.name)
return None
@ -118,9 +118,9 @@ class Slack(BaseCollaboration):
def get_integration(cls, tenant_id, integration_id=None):
if integration_id is not None:
return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id,
webhook_type=schemas.WebhookType.slack)
webhook_type=schemas.WebhookType.SLACK)
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.slack)
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.SLACK)
if integrations is None or len(integrations) == 0:
return None
return integrations[0]

View file

@ -139,14 +139,14 @@ def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int =
def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int):
supported = {
schemas.MetricOfTable.sessions: __get_table_of_sessions,
schemas.MetricOfTable.errors: __get_table_of_errors,
schemas.MetricOfTable.user_id: __get_table_of_user_ids,
schemas.MetricOfTable.issues: __get_table_of_issues,
schemas.MetricOfTable.user_browser: __get_table_of_browsers,
schemas.MetricOfTable.user_device: __get_table_of_devises,
schemas.MetricOfTable.user_country: __get_table_of_countries,
schemas.MetricOfTable.visited_url: __get_table_of_urls,
schemas.MetricOfTable.SESSIONS: __get_table_of_sessions,
schemas.MetricOfTable.ERRORS: __get_table_of_errors,
schemas.MetricOfTable.USER_ID: __get_table_of_user_ids,
schemas.MetricOfTable.ISSUES: __get_table_of_issues,
schemas.MetricOfTable.USER_BROWSER: __get_table_of_browsers,
schemas.MetricOfTable.USER_DEVICE: __get_table_of_devises,
schemas.MetricOfTable.USER_COUNTRY: __get_table_of_countries,
schemas.MetricOfTable.VISITED_URL: __get_table_of_urls,
}
return supported.get(data.metric_of, not_supported)(project_id=project_id, data=data, user_id=user_id)
@ -158,12 +158,12 @@ def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
data=data.model_dump())
supported = {
schemas.MetricType.timeseries: __get_timeseries_chart,
schemas.MetricType.table: __get_table_chart,
schemas.MetricType.heat_map: __get_heat_map_chart,
schemas.MetricType.funnel: __get_funnel_chart,
schemas.MetricType.insights: not_supported,
schemas.MetricType.pathAnalysis: __get_path_analysis_chart
schemas.MetricType.TIMESERIES: __get_timeseries_chart,
schemas.MetricType.TABLE: __get_table_chart,
schemas.MetricType.HEAT_MAP: __get_heat_map_chart,
schemas.MetricType.FUNNEL: __get_funnel_chart,
schemas.MetricType.INSIGHTS: not_supported,
schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_chart
}
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
@ -273,18 +273,18 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card
filters=filters
)
# ---- To make issues response close to the chart response
search_data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.events_count,
operator=schemas.MathOperator._greater,
search_data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
operator=schemas.MathOperator.GREATER,
value=[1]))
if len(data.start_point) == 0:
search_data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.location,
operator=schemas.SearchEventOperator._is_any,
search_data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
operator=schemas.SearchEventOperator.IS_ANY,
value=[]))
# ---- End
for s in data.excludes:
search_data.events.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=schemas.SearchEventOperator._not_on,
operator=schemas.SearchEventOperator.NOT_ON,
value=s.value))
result = sessions.search_table_of_individual_issues(project_id=project_id, data=search_data)
return result
@ -293,15 +293,15 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
if data.is_predefined:
return not_supported()
if data.metric_of == schemas.MetricOfTable.issues:
if data.metric_of == schemas.MetricOfTable.ISSUES:
return __get_table_of_issues(project_id=project_id, user_id=user_id, data=data)
supported = {
schemas.MetricType.timeseries: not_supported,
schemas.MetricType.table: not_supported,
schemas.MetricType.heat_map: not_supported,
schemas.MetricType.funnel: __get_funnel_issues,
schemas.MetricType.insights: not_supported,
schemas.MetricType.pathAnalysis: __get_path_analysis_issues,
schemas.MetricType.TIMESERIES: not_supported,
schemas.MetricType.TABLE: not_supported,
schemas.MetricType.HEAT_MAP: not_supported,
schemas.MetricType.FUNNEL: __get_funnel_issues,
schemas.MetricType.INSIGHTS: not_supported,
schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_issues,
}
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
@ -317,7 +317,7 @@ def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
session_data = None
if data.metric_type == schemas.MetricType.heat_map:
if data.metric_type == schemas.MetricType.HEAT_MAP:
if data.session_id is not None:
session_data = {"sessionId": data.session_id}
else:
@ -336,7 +336,7 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data}
params["default_config"] = json.dumps(data.default_config.model_dump())
params["card_info"] = None
if data.metric_type == schemas.MetricType.pathAnalysis:
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
@ -399,9 +399,9 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
params["d_series_ids"] = tuple(d_series_ids)
params["card_info"] = None
params["session_data"] = json.dumps(metric["data"])
if data.metric_type == schemas.MetricType.pathAnalysis:
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
elif data.metric_type == schemas.MetricType.heat_map:
elif data.metric_type == schemas.MetricType.HEAT_MAP:
if data.session_id is not None:
params["session_data"] = json.dumps({"sessionId": data.session_id})
elif metric.get("data") and metric["data"].get("sessionId"):
@ -465,7 +465,7 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser
if data.query is not None and len(data.query) > 0:
constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
op=schemas.SearchEventOperator.CONTAINS)
with pg_client.PostgresClient() as cur:
sub_join = ""
if include_series:
@ -592,7 +592,7 @@ def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data:
for s in row["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
row = helper.dict_to_camel_case(row)
if row["metricType"] == schemas.MetricType.pathAnalysis:
if row["metricType"] == schemas.MetricType.PATH_ANALYSIS:
row = __get_path_analysis_attributes(row=row)
return row
@ -691,7 +691,7 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessi
return custom_metrics_predefined.get_metric(key=metric.metric_of,
project_id=project_id,
data=data.model_dump())
elif metric.metric_type == schemas.MetricType.heat_map:
elif metric.metric_type == schemas.MetricType.HEAT_MAP:
if raw_metric["data"] and raw_metric["data"].get("sessionId"):
return heatmaps.get_selected_session(project_id=project_id,
session_id=raw_metric["data"]["sessionId"])

View file

@ -12,51 +12,51 @@ logger = logging.getLogger(__name__)
def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
supported = {schemas.MetricOfWebVitals.COUNT_SESSIONS: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.AVG_IMAGE_LOAD_TIME: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.AVG_PAGE_LOAD_TIME: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.AVG_REQUEST_LOAD_TIME: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.AVG_DOM_CONTENT_LOAD_START: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.AVG_FIRST_CONTENTFUL_PIXEL: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.AVG_VISITED_PAGES: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.AVG_SESSION_DURATION: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.AVG_PAGES_DOM_BUILDTIME: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.AVG_PAGES_RESPONSE_TIME: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.AVG_RESPONSE_TIME: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.AVG_FIRST_PAINT: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.AVG_DOM_CONTENT_LOADED: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.AVG_TILL_FIRST_BYTE: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.AVG_TIME_TO_INTERACTIVE: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.COUNT_REQUESTS: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.AVG_TIME_TO_RENDER: metrics.get_time_to_render,
schemas.MetricOfWebVitals.AVG_USED_JS_HEAP_SIZE: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type,
schemas.MetricOfWebVitals.count_users: metrics.get_unique_users,}
schemas.MetricOfWebVitals.AVG_FPS: metrics.get_avg_fps,
schemas.MetricOfErrors.IMPACTED_SESSIONS_BY_JS_ERRORS: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.DOMAINS_ERRORS_4XX: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.DOMAINS_ERRORS_5XX: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.ERRORS_PER_DOMAINS: metrics.get_errors_per_domains,
schemas.MetricOfErrors.CALLS_ERRORS: metrics.get_calls_errors,
schemas.MetricOfErrors.ERRORS_PER_TYPE: metrics.get_errors_per_type,
schemas.MetricOfErrors.RESOURCES_BY_PARTY: metrics.get_resources_by_party,
schemas.MetricOfPerformance.SPEED_LOCATION: metrics.get_speed_index_location,
schemas.MetricOfPerformance.SLOWEST_DOMAINS: metrics.get_slowest_domains,
schemas.MetricOfPerformance.SESSIONS_PER_BROWSER: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.TIME_TO_RENDER: metrics.get_time_to_render,
schemas.MetricOfPerformance.IMPACTED_SESSIONS_BY_SLOW_PAGES: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.MEMORY_CONSUMPTION: metrics.get_memory_consumption,
schemas.MetricOfPerformance.CPU: metrics.get_avg_cpu,
schemas.MetricOfPerformance.FPS: metrics.get_avg_fps,
schemas.MetricOfPerformance.CRASHES: metrics.get_crashes,
schemas.MetricOfPerformance.RESOURCES_VS_VISUALLY_COMPLETE: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.PAGES_DOM_BUILDTIME: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.PAGES_RESPONSE_TIME: metrics.get_pages_response_time,
schemas.MetricOfPerformance.PAGES_RESPONSE_TIME_DISTRIBUTION: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.MISSING_RESOURCES: metrics.get_missing_resources_trend,
schemas.MetricOfResources.SLOWEST_RESOURCES: metrics.get_slowest_resources,
schemas.MetricOfResources.RESOURCES_LOADING_TIME: metrics.get_resources_loading_time,
schemas.MetricOfResources.RESOURCE_TYPE_VS_RESPONSE_END: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.RESOURCES_COUNT_BY_TYPE: metrics.get_resources_count_by_type,
schemas.MetricOfWebVitals.COUNT_USERS: metrics.get_unique_users, }
return supported.get(key, lambda *args: None)(project_id=project_id, **data)

View file

@ -420,18 +420,18 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
if platform == schemas.PlatformType.mobile:
if platform == schemas.PlatformType.MOBILE:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == schemas.PlatformType.desktop:
elif platform == schemas.PlatformType.DESKTOP:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
def __get_sort_key(key):
return {
schemas.ErrorSort.occurrence: "max_datetime",
schemas.ErrorSort.users_count: "users",
schemas.ErrorSort.sessions_count: "sessions"
schemas.ErrorSort.OCCURRENCE: "max_datetime",
schemas.ErrorSort.USERS_COUNT: "users",
schemas.ErrorSort.SESSIONS_COUNT: "sessions"
}.get(key, 'max_datetime')
@ -443,7 +443,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
if f.type == schemas.FilterType.PLATFORM and len(f.value) > 0:
platform = f.value[0]
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
@ -472,7 +472,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = schemas.SortOrderType.desc
order = schemas.SortOrderType.DESC
if data.order is not None:
order = data.order
extra_join = ""
@ -483,7 +483,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.status != schemas.ErrorStatus.all:
if data.status != schemas.ErrorStatus.ALL:
pg_sub_query.append("status = %(error_status)s")
params["error_status"] = data.status
if data.limit is not None and data.page is not None:
@ -502,7 +502,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
op=schemas.SearchEventOperator.CONTAINS)
main_pg_query = f"""SELECT full_count,
error_id,

View file

@ -55,7 +55,7 @@ def __get_grouped_clickrage(rows, session_id, project_id):
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with pg_client.PostgresClient() as cur:
rows = []
if event_type is None or event_type == schemas.EventType.click:
if event_type is None or event_type == schemas.EventType.CLICK:
cur.execute(cur.mogrify("""\
SELECT
c.*,
@ -69,7 +69,7 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type:
rows += cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
if event_type is None or event_type == schemas.EventType.input:
if event_type is None or event_type == schemas.EventType.INPUT:
cur.execute(cur.mogrify("""
SELECT
i.*,
@ -81,7 +81,7 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type:
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if event_type is None or event_type == schemas.EventType.location:
if event_type is None or event_type == schemas.EventType.LOCATION:
cur.execute(cur.mogrify("""\
SELECT
l.*,
@ -115,26 +115,26 @@ def _search_tags(project_id, value, key=None, source=None):
class EventType:
CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path")
CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name")
REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="path")
GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name")
STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name")
TAG = Event(ui_type=schemas.EventType.tag, table="events.tags", column="tag_id")
ERROR = Event(ui_type=schemas.EventType.error, table="events.errors",
CLICK = Event(ui_type=schemas.EventType.CLICK, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.INPUT, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.LOCATION, table="events.pages", column="path")
CUSTOM = Event(ui_type=schemas.EventType.CUSTOM, table="events_common.customs", column="name")
REQUEST = Event(ui_type=schemas.EventType.REQUEST, table="events_common.requests", column="path")
GRAPHQL = Event(ui_type=schemas.EventType.GRAPHQL, table="events.graphql", column="name")
STATEACTION = Event(ui_type=schemas.EventType.STATE_ACTION, table="events.state_actions", column="name")
TAG = Event(ui_type=schemas.EventType.TAG, table="events.tags", column="tag_id")
ERROR = Event(ui_type=schemas.EventType.ERROR, table="events.errors",
column=None) # column=None because errors are searched by name or message
METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None)
METADATA = Event(ui_type=schemas.FilterType.METADATA, table="public.sessions", column=None)
# MOBILE
CLICK_MOBILE = Event(ui_type=schemas.EventType.click_mobile, table="events_ios.taps", column="label")
INPUT_MOBILE = Event(ui_type=schemas.EventType.input_mobile, table="events_ios.inputs", column="label")
VIEW_MOBILE = Event(ui_type=schemas.EventType.view_mobile, table="events_ios.views", column="name")
SWIPE_MOBILE = Event(ui_type=schemas.EventType.swipe_mobile, table="events_ios.swipes", column="label")
CUSTOM_MOBILE = Event(ui_type=schemas.EventType.custom_mobile, table="events_common.customs", column="name")
REQUEST_MOBILE = Event(ui_type=schemas.EventType.request_mobile, table="events_common.requests", column="path")
CRASH_MOBILE = Event(ui_type=schemas.EventType.error_mobile, table="events_common.crashes",
column=None) # column=None because errors are searched by name or message
CLICK_MOBILE = Event(ui_type=schemas.EventType.CLICK_MOBILE, table="events_ios.taps", column="label")
INPUT_MOBILE = Event(ui_type=schemas.EventType.INPUT_MOBILE, table="events_ios.inputs", column="label")
VIEW_MOBILE = Event(ui_type=schemas.EventType.VIEW_MOBILE, table="events_ios.views", column="name")
SWIPE_MOBILE = Event(ui_type=schemas.EventType.SWIPE_MOBILE, table="events_ios.swipes", column="label")
CUSTOM_MOBILE = Event(ui_type=schemas.EventType.CUSTOM_MOBILE, table="events_common.customs", column="name")
REQUEST_MOBILE = Event(ui_type=schemas.EventType.REQUEST_MOBILE, table="events_common.requests", column="path")
CRASH_MOBILE = Event(ui_type=schemas.EventType.ERROR_MOBILE, table="events_common.crashes",
column=None) # column=None because errors are searched by name or message
SUPPORTED_TYPES = {

View file

@ -106,12 +106,12 @@ def prepare_constraints_params_to_search(data, project_id, user_id):
if data.query is not None and len(data.query) > 0:
constraints.append("flag_key ILIKE %(query)s")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
op=schemas.SearchEventOperator.CONTAINS)
return constraints, params
def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schemas.FeatureFlagSchema) -> Optional[int]:
if feature_flag_data.flag_type == schemas.FeatureFlagType.multi_variant and len(feature_flag_data.variants) == 0:
if feature_flag_data.flag_type == schemas.FeatureFlagType.MULTI_VARIANT and len(feature_flag_data.variants) == 0:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail="Variants are required for multi variant flag")
@ -193,7 +193,7 @@ def validate_unique_flag_key(feature_flag_data, project_id, exclude_id=None):
def validate_multi_variant_flag(feature_flag_data):
if feature_flag_data.flag_type == schemas.FeatureFlagType.multi_variant:
if feature_flag_data.flag_type == schemas.FeatureFlagType.MULTI_VARIANT:
if sum([v.rollout_percentage for v in feature_flag_data.variants]) > 100:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Sum of rollout percentage for variants cannot be greater than 100.")

View file

@ -7,10 +7,10 @@ from chalicelib.utils import sql_helper as sh
def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
schemas.EventType.location, schemas.EventType.custom,
schemas.EventType.click_mobile, schemas.EventType.input_mobile,
schemas.EventType.view_mobile, schemas.EventType.custom_mobile, ]
ALLOW_TYPES = [schemas.EventType.CLICK, schemas.EventType.INPUT,
schemas.EventType.LOCATION, schemas.EventType.CUSTOM,
schemas.EventType.CLICK_MOBILE, schemas.EventType.INPUT_MOBILE,
schemas.EventType.VIEW_MOBILE, schemas.EventType.CUSTOM_MOBILE, ]
return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
@ -24,7 +24,7 @@ def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]):
events = []
for e in f_events:
if e.operator is None:
e.operator = schemas.SearchEventOperator._is
e.operator = schemas.SearchEventOperator.IS
if not isinstance(e.value, list):
e.value = [e.value]
@ -47,10 +47,10 @@ def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilte
metric_of=metric_of)
insights = helper.list_to_camel_case(insights)
if len(insights) > 0:
if metric_of == schemas.MetricOfFunnels.session_count and total_drop_due_to_issues > (
if metric_of == schemas.MetricOfFunnels.SESSION_COUNT and total_drop_due_to_issues > (
insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]):
total_drop_due_to_issues = insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]
elif metric_of == schemas.MetricOfFunnels.user_count and total_drop_due_to_issues > (
elif metric_of == schemas.MetricOfFunnels.USER_COUNT and total_drop_due_to_issues > (
insights[0]["usersCount"] - insights[-1]["usersCount"]):
total_drop_due_to_issues = insights[0]["usersCount"] - insights[-1]["usersCount"]
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues

View file

@ -149,27 +149,27 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
no_platform = True
no_location = True
for f in data.filters:
if f.type == schemas.FilterType.platform:
if f.type == schemas.FilterType.PLATFORM:
no_platform = False
break
for f in data.events:
if f.type == schemas.EventType.location:
if f.type == schemas.EventType.LOCATION:
no_location = False
if len(f.value) == 0:
f.operator = schemas.SearchEventOperator._is_any
f.operator = schemas.SearchEventOperator.IS_ANY
break
if no_platform:
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.platform,
value=[schemas.PlatformType.desktop],
operator=schemas.SearchEventOperator._is))
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM,
value=[schemas.PlatformType.DESKTOP],
operator=schemas.SearchEventOperator.IS))
if no_location:
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.location,
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
value=[],
operator=schemas.SearchEventOperator._is_any))
operator=schemas.SearchEventOperator.IS_ANY))
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.events_count,
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
value=[0],
operator=schemas.MathOperator._greater))
operator=schemas.MathOperator.GREATER))
full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=data.bookmarked, issue=None,
@ -178,7 +178,7 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i
if len(exclude_sessions) > 0:
query_part += "\n AND session_id NOT IN %(exclude_sessions)s"
with pg_client.PostgresClient() as cur:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.DESC
data.sort = 'duration'
main_query = cur.mogrify(f"""SELECT *
FROM (SELECT {SESSION_PROJECTION_COLS}

View file

@ -3,7 +3,7 @@ from chalicelib.core import integration_base
from chalicelib.core.integration_github_issue import GithubIntegrationIssue
from chalicelib.utils import pg_client, helper
PROVIDER = schemas.IntegrationType.github
PROVIDER = schemas.IntegrationType.GITHUB
class GitHubIntegration(integration_base.BaseIntegration):

View file

@ -3,7 +3,7 @@ from chalicelib.core import integration_base
from chalicelib.core.integration_jira_cloud_issue import JIRACloudIntegrationIssue
from chalicelib.utils import pg_client, helper
PROVIDER = schemas.IntegrationType.jira
PROVIDER = schemas.IntegrationType.JIRA
def obfuscate_string(string):

View file

@ -9,52 +9,52 @@ def get_global_integrations_status(tenant_id, user_id, project_id):
SELECT EXISTS((SELECT 1
FROM public.oauth_authentication
WHERE user_id = %(user_id)s
AND provider = 'github')) AS {schemas.IntegrationType.github.value},
AND provider = 'github')) AS {schemas.IntegrationType.GITHUB.value},
EXISTS((SELECT 1
FROM public.jira_cloud
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira.value},
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.JIRA.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag.value},
AND provider='bugsnag')) AS {schemas.IntegrationType.BUGSNAG.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch.value},
AND provider='cloudwatch')) AS {schemas.IntegrationType.CLOUDWATCH.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='datadog')) AS {schemas.IntegrationType.datadog.value},
AND provider='datadog')) AS {schemas.IntegrationType.DATADOG.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='newrelic')) AS {schemas.IntegrationType.newrelic.value},
AND provider='newrelic')) AS {schemas.IntegrationType.NEWRELIC.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='rollbar')) AS {schemas.IntegrationType.rollbar.value},
AND provider='rollbar')) AS {schemas.IntegrationType.ROLLBAR.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sentry')) AS {schemas.IntegrationType.sentry.value},
AND provider='sentry')) AS {schemas.IntegrationType.SENTRY.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver.value},
AND provider='stackdriver')) AS {schemas.IntegrationType.STACKDRIVER.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sumologic')) AS {schemas.IntegrationType.sumologic.value},
AND provider='sumologic')) AS {schemas.IntegrationType.SUMOLOGIC.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch.value},
AND provider='elasticsearch')) AS {schemas.IntegrationType.ELASTICSEARCH.value},
EXISTS((SELECT 1
FROM public.webhooks
WHERE type='slack' AND deleted_at ISNULL)) AS {schemas.IntegrationType.slack.value},
WHERE type='slack' AND deleted_at ISNULL)) AS {schemas.IntegrationType.SLACK.value},
EXISTS((SELECT 1
FROM public.webhooks
WHERE type='msteams' AND deleted_at ISNULL)) AS {schemas.IntegrationType.ms_teams.value};""",
WHERE type='msteams' AND deleted_at ISNULL)) AS {schemas.IntegrationType.MS_TEAMS.value};""",
{"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id})
)
current_integrations = cur.fetchone()

View file

@ -94,25 +94,25 @@ def __get_meta_constraint(project_id, data):
else:
filter_type = f["key"].upper()
filter_type = [filter_type, "USER" + filter_type, filter_type[4:]]
if any(item in [schemas.FilterType.user_browser] \
if any(item in [schemas.FilterType.USER_BROWSER] \
for item in filter_type):
constraints.append(f"sessions.user_browser = %({f['key']}_{i})s")
elif any(item in [schemas.FilterType.user_os, schemas.FilterType.user_os_mobile] \
elif any(item in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE] \
for item in filter_type):
constraints.append(f"sessions.user_os = %({f['key']}_{i})s")
elif any(item in [schemas.FilterType.user_device, schemas.FilterType.user_device_mobile] \
elif any(item in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE] \
for item in filter_type):
constraints.append(f"sessions.user_device = %({f['key']}_{i})s")
elif any(item in [schemas.FilterType.user_country, schemas.FilterType.user_country_mobile] \
elif any(item in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE] \
for item in filter_type):
constraints.append(f"sessions.user_country = %({f['key']}_{i})s")
elif any(item in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile] \
elif any(item in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE] \
for item in filter_type):
constraints.append(f"sessions.user_id = %({f['key']}_{i})s")
elif any(item in [schemas.FilterType.user_anonymous_id, schemas.FilterType.user_anonymous_id_mobile] \
elif any(item in [schemas.FilterType.USER_ANONYMOUS_ID, schemas.FilterType.USER_ANONYMOUS_ID_MOBILE] \
for item in filter_type):
constraints.append(f"sessions.user_anonymous_id = %({f['key']}_{i})s")
elif any(item in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile] \
elif any(item in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE] \
for item in filter_type):
constraints.append(f"sessions.rev_id = %({f['key']}_{i})s")
return constraints
@ -167,7 +167,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
count = cur.fetchone()["count"]
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
return results
@ -1087,7 +1087,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
avg = cur.fetchone()["avg"]
else:
avg = 0
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond}
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.MILLISECOND}
def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1180,7 +1180,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
} for i, v in enumerate(quantiles_keys)
],
"extremeValues": [{"count": 0}],
"unit": schemas.TemplatePredefinedUnits.millisecond
"unit": schemas.TemplatePredefinedUnits.MILLISECOND
}
rows = helper.list_to_camel_case(rows)
_99 = result["percentiles"][-1]["responseTime"]
@ -1422,7 +1422,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, params))
avg = cur.fetchone()["avg"]
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.memory}
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.MEMORY}
def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1455,7 +1455,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
cur.execute(cur.mogrify(pg_query, params))
avg = cur.fetchone()["avg"]
return {"value": avg, "chart": helper.list_to_camel_case(rows),
"unit": schemas.TemplatePredefinedUnits.percentage}
"unit": schemas.TemplatePredefinedUnits.PERCENTAGE}
def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1488,7 +1488,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
WHERE {" AND ".join(pg_sub_query)};"""
cur.execute(cur.mogrify(pg_query, params))
avg = cur.fetchone()["avg"]
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.frame}
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.FRAME}
def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1559,7 +1559,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
versions.append({v["version"]: v["count"] / (r["total"] / 100)})
r["versions"] = versions
return {"chart": rows, "browsers": browsers, "unit": schemas.TemplatePredefinedUnits.count}
return {"chart": rows, "browsers": browsers, "unit": schemas.TemplatePredefinedUnits.COUNT}
def __get_neutral(rows, add_All_if_empty=True):
@ -1713,7 +1713,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
avg = cur.fetchone()["avg"]
else:
avg = 0
return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond}
return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.MILLISECOND}
def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -2551,7 +2551,7 @@ def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(d
previous = helper.dict_to_camel_case(row)
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
return results
@ -2911,7 +2911,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)}))
rows = cur.fetchall()
row["chart"] = rows
row["unit"] = schemas.TemplatePredefinedUnits.count
row["unit"] = schemas.TemplatePredefinedUnits.COUNT
return helper.dict_to_camel_case(row)
@ -2960,5 +2960,5 @@ def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
count = cur.fetchone()["count"]
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
return results

View file

@ -3,13 +3,13 @@ import schemas
def get_col(perf: schemas.PerformanceEventType):
return {
schemas.PerformanceEventType.location_dom_complete: {"column": "dom_building_time", "extraJoin": None},
schemas.PerformanceEventType.location_ttfb: {"column": "ttfb", "extraJoin": None},
schemas.PerformanceEventType.location_avg_cpu_load: {"column": "avg_cpu", "extraJoin": "events.performance"},
schemas.PerformanceEventType.location_avg_memory_usage: {"column": "avg_used_js_heap_size",
schemas.PerformanceEventType.LOCATION_DOM_COMPLETE: {"column": "dom_building_time", "extraJoin": None},
schemas.PerformanceEventType.LOCATION_TTFB: {"column": "ttfb", "extraJoin": None},
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD: {"column": "avg_cpu", "extraJoin": "events.performance"},
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE: {"column": "avg_used_js_heap_size",
"extraJoin": "events.performance"},
schemas.PerformanceEventType.fetch_failed: {"column": "success", "extraJoin": None},
schemas.PerformanceEventType.FETCH_FAILED: {"column": "success", "extraJoin": None},
# schemas.PerformanceEventType.fetch_duration: {"column": "duration", "extraJoin": None},
schemas.PerformanceEventType.location_largest_contentful_paint_time: {"column": "first_contentful_paint_time",
schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME: {"column": "first_contentful_paint_time",
"extraJoin": None}
}.get(perf)

View file

@ -70,10 +70,10 @@ def __transform_journey(rows, reverse_path=False):
JOURNEY_TYPES = {
schemas.ProductAnalyticsSelectedEventType.location: {"table": "events.pages", "column": "path"},
schemas.ProductAnalyticsSelectedEventType.click: {"table": "events.clicks", "column": "label"},
schemas.ProductAnalyticsSelectedEventType.input: {"table": "events.inputs", "column": "label"},
schemas.ProductAnalyticsSelectedEventType.custom_event: {"table": "events_common.customs", "column": "name"}
schemas.ProductAnalyticsSelectedEventType.LOCATION: {"table": "events.pages", "column": "path"},
schemas.ProductAnalyticsSelectedEventType.CLICK: {"table": "events.clicks", "column": "label"},
schemas.ProductAnalyticsSelectedEventType.INPUT: {"table": "events.inputs", "column": "label"},
schemas.ProductAnalyticsSelectedEventType.CUSTOM_EVENT: {"table": "events_common.customs", "column": "name"}
}
@ -92,10 +92,10 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sessions_conditions = ["start_ts>=%(startTimestamp)s", "start_ts<%(endTimestamp)s",
"project_id=%(project_id)s", "events_count > 1", "duration>0"]
if len(data.metric_value) == 0:
data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.location)
sub_events.append({"table": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["table"],
"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"],
"eventType": schemas.ProductAnalyticsSelectedEventType.location.value})
data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.LOCATION)
sub_events.append({"table": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.LOCATION]["table"],
"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.LOCATION]["column"],
"eventType": schemas.ProductAnalyticsSelectedEventType.LOCATION.value})
else:
for v in data.metric_value:
if JOURNEY_TYPES.get(v):
@ -149,49 +149,49 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
continue
# ---- meta-filters
if f.type == schemas.FilterType.user_browser:
if f.type == schemas.FilterType.USER_BROWSER:
if is_any:
sessions_conditions.append('user_browser IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_os]:
elif f.type in [schemas.FilterType.USER_OS]:
if is_any:
sessions_conditions.append('user_os IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_device]:
elif f.type in [schemas.FilterType.USER_DEVICE]:
if is_any:
sessions_conditions.append('user_device IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_country]:
elif f.type in [schemas.FilterType.USER_COUNTRY]:
if is_any:
sessions_conditions.append('user_country IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.user_city:
elif f.type == schemas.FilterType.USER_CITY:
if is_any:
sessions_conditions.append('user_city IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.user_state:
elif f.type == schemas.FilterType.USER_STATE:
if is_any:
sessions_conditions.append('user_state IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.utm_source]:
elif f.type in [schemas.FilterType.UTM_SOURCE]:
if is_any:
sessions_conditions.append('utm_source IS NOT NULL')
elif is_undefined:
@ -201,7 +201,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f'utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.utm_medium]:
elif f.type in [schemas.FilterType.UTM_MEDIUM]:
if is_any:
sessions_conditions.append('utm_medium IS NOT NULL')
elif is_undefined:
@ -211,7 +211,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f'utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.utm_campaign]:
elif f.type in [schemas.FilterType.UTM_CAMPAIGN]:
if is_any:
sessions_conditions.append('utm_campaign IS NOT NULL')
elif is_undefined:
@ -221,14 +221,14 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f'utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.duration:
elif f.type == schemas.FilterType.DURATION:
if len(f.value) > 0 and f.value[0] is not None:
sessions_conditions.append("duration >= %(minDuration)s")
extra_values["minDuration"] = f.value[0]
if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0:
sessions_conditions.append("duration <= %(maxDuration)s")
extra_values["maxDuration"] = f.value[1]
elif f.type == schemas.FilterType.referrer:
elif f.type == schemas.FilterType.REFERRER:
# extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
if is_any:
sessions_conditions.append('base_referrer IS NOT NULL')
@ -236,7 +236,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sessions_conditions.append(
sh.multi_conditions(f"base_referrer {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.metadata:
elif f.type == schemas.FilterType.METADATA:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -252,7 +252,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
f"{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]:
elif f.type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]:
if is_any:
sessions_conditions.append('user_id IS NOT NULL')
elif is_undefined:
@ -262,8 +262,8 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f"user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_mobile]:
elif f.type in [schemas.FilterType.USER_ANONYMOUS_ID,
schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]:
if is_any:
sessions_conditions.append('user_anonymous_id IS NOT NULL')
elif is_undefined:
@ -273,7 +273,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f"user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]:
elif f.type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]:
if is_any:
sessions_conditions.append('rev_id IS NOT NULL')
elif is_undefined:
@ -282,13 +282,13 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sessions_conditions.append(
sh.multi_conditions(f"rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.platform:
elif f.type == schemas.FilterType.PLATFORM:
# op = __ sh.get_sql_operator(f.operator)
sessions_conditions.append(
sh.multi_conditions(f"user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.issue:
elif f.type == schemas.FilterType.ISSUE:
if is_any:
sessions_conditions.append("array_length(issue_types, 1) > 0")
else:
@ -296,7 +296,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f"%({f_k})s {op} ANY (issue_types)", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.events_count:
elif f.type == schemas.FilterType.EVENTS_COUNT:
sessions_conditions.append(
sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))

View file

@ -40,7 +40,7 @@ COALESCE((SELECT TRUE
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False,
error_status=schemas.ErrorStatus.ALL, count_only=False, issue=None, ids_only=False,
platform="web"):
if data.bookmarked:
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project_id, user_id)
@ -74,12 +74,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = schemas.SortOrderType.desc.value
data.order = schemas.SortOrderType.DESC.value
else:
data.order = data.order
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})"
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.DESC else 'MAX'}({sort})"
else:
sort = 'start_ts'
@ -109,7 +109,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
full_args)
else:
if data.order is None:
data.order = schemas.SortOrderType.desc.value
data.order = schemas.SortOrderType.DESC.value
else:
data.order = data.order
sort = 'session_id'
@ -176,20 +176,20 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density, factor=1, decimal=True))
extra_event = None
if metric_of == schemas.MetricOfTable.visited_url:
if metric_of == schemas.MetricOfTable.VISITED_URL:
extra_event = "events.pages"
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
elif metric_of == schemas.MetricOfTable.ISSUES and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.ISSUE,
operator=schemas.SearchEventOperator.IS))
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event)
full_args["step_size"] = step_size
sessions = []
with pg_client.PostgresClient() as cur:
if metric_type == schemas.MetricType.timeseries:
if view_type == schemas.MetricTimeseriesViewType.line_chart:
if metric_of == schemas.MetricOfTimeseries.session_count:
if metric_type == schemas.MetricType.TIMESERIES:
if view_type == schemas.MetricTimeseriesViewType.LINE_CHART:
if metric_of == schemas.MetricOfTimeseries.SESSION_COUNT:
# main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT s.session_id, s.start_ts
{query_part})
@ -202,7 +202,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;""", full_args)
elif metric_of == schemas.MetricOfTimeseries.user_count:
elif metric_of == schemas.MetricOfTimeseries.USER_COUNT:
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT s.user_id, s.start_ts
{query_part}
AND s.user_id IS NOT NULL
@ -234,24 +234,24 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
logging.warning(data.model_dump_json())
logging.warning("--------------------")
raise err
if view_type == schemas.MetricTimeseriesViewType.line_chart:
if view_type == schemas.MetricTimeseriesViewType.LINE_CHART:
sessions = cur.fetchall()
else:
sessions = cur.fetchone()["count"]
elif metric_type == schemas.MetricType.table:
elif metric_type == schemas.MetricType.TABLE:
if isinstance(metric_of, schemas.MetricOfTable):
main_col = "user_id"
extra_col = ""
extra_where = ""
pre_query = ""
distinct_on = "s.session_id"
if metric_of == schemas.MetricOfTable.user_country:
if metric_of == schemas.MetricOfTable.USER_COUNTRY:
main_col = "user_country"
elif metric_of == schemas.MetricOfTable.user_device:
elif metric_of == schemas.MetricOfTable.USER_DEVICE:
main_col = "user_device"
elif metric_of == schemas.MetricOfTable.user_browser:
elif metric_of == schemas.MetricOfTable.USER_BROWSER:
main_col = "user_browser"
elif metric_of == schemas.MetricOfTable.issues:
elif metric_of == schemas.MetricOfTable.ISSUES:
main_col = "issue"
extra_col = f", UNNEST(s.issue_types) AS {main_col}"
if len(metric_value) > 0:
@ -261,7 +261,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
extra_where.append(f"{main_col} = %({arg_name})s")
full_args[arg_name] = metric_value[i]
extra_where = f"WHERE ({' OR '.join(extra_where)})"
elif metric_of == schemas.MetricOfTable.visited_url:
elif metric_of == schemas.MetricOfTable.VISITED_URL:
main_col = "path"
extra_col = ", path"
distinct_on += ",path"
@ -302,11 +302,11 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
density=density, factor=1, decimal=True))
extra_event = None
extra_conditions = None
if metric_of == schemas.MetricOfTable.visited_url:
if metric_of == schemas.MetricOfTable.VISITED_URL:
extra_event = "events.pages"
extra_conditions = {}
for e in data.events:
if e.type == schemas.EventType.location:
if e.type == schemas.EventType.LOCATION:
if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
"type": e.type,
@ -320,9 +320,9 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
extra_conditions[e.operator].value.append(v)
extra_conditions = list(extra_conditions.values())
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
elif metric_of == schemas.MetricOfTable.ISSUES and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.ISSUE,
operator=schemas.SearchEventOperator.IS))
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event, extra_conditions=extra_conditions)
@ -337,13 +337,13 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
extra_col = ""
extra_where = ""
distinct_on = "s.session_id"
if metric_of == schemas.MetricOfTable.user_country:
if metric_of == schemas.MetricOfTable.USER_COUNTRY:
main_col = "user_country"
elif metric_of == schemas.MetricOfTable.user_device:
elif metric_of == schemas.MetricOfTable.USER_DEVICE:
main_col = "user_device"
elif metric_of == schemas.MetricOfTable.user_browser:
elif metric_of == schemas.MetricOfTable.USER_BROWSER:
main_col = "user_browser"
elif metric_of == schemas.MetricOfTable.issues:
elif metric_of == schemas.MetricOfTable.ISSUES:
main_col = "issue"
extra_col = f", UNNEST(s.issue_types) AS {main_col}"
if len(metric_value) > 0:
@ -353,11 +353,11 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
extra_where.append(f"{main_col} = %({arg_name})s")
full_args[arg_name] = metric_value[i]
extra_where = f"WHERE ({' OR '.join(extra_where)})"
elif metric_of == schemas.MetricOfTable.visited_url:
elif metric_of == schemas.MetricOfTable.VISITED_URL:
main_col = "path"
extra_col = ", path"
distinct_on += ",path"
if metric_format == schemas.MetricExtendedFormatType.session_count:
if metric_format == schemas.MetricExtendedFormatType.SESSION_COUNT:
main_query = f"""SELECT COUNT(*) AS count,
COALESCE(SUM(users_sessions.session_count),0) AS count,
COALESCE(JSONB_AGG(users_sessions)
@ -448,15 +448,15 @@ def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema,
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details,
schemas.EventType.graphql] \
or event.type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,
schemas.PerformanceEventType.location_ttfb,
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
schemas.EventType.GRAPHQL] \
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME,
schemas.PerformanceEventType.LOCATION_TTFB,
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD,
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE
] and (event.source is None or len(event.source) == 0) \
or event.type in [schemas.EventType.request_details, schemas.EventType.graphql] and (
or event.type in [schemas.EventType.REQUEST_DETAILS, schemas.EventType.GRAPHQL] and (
event.filters is None or len(event.filters) == 0))
@ -483,7 +483,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
f_k = f"f_value{i}"
full_args = {**full_args, **sh.multi_values(f.value, value_key=f_k)}
op = sh.get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.events_count] else f.operator.value
if filter_type not in [schemas.FilterType.EVENTS_COUNT] else f.operator.value
is_any = sh.isAny_opreator(f.operator)
is_undefined = sh.isUndefined_operator(f.operator)
if not is_any and not is_undefined and len(f.value) == 0:
@ -491,7 +491,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
is_not = False
if sh.is_negation_operator(f.operator):
is_not = True
if filter_type == schemas.FilterType.user_browser:
if filter_type == schemas.FilterType.USER_BROWSER:
if is_any:
extra_constraints.append('s.user_browser IS NOT NULL')
ss_constraints.append('ms.user_browser IS NOT NULL')
@ -502,7 +502,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_mobile]:
elif filter_type in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE]:
if is_any:
extra_constraints.append('s.user_os IS NOT NULL')
ss_constraints.append('ms.user_os IS NOT NULL')
@ -512,7 +512,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
ss_constraints.append(
sh.multi_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_mobile]:
elif filter_type in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE]:
if is_any:
extra_constraints.append('s.user_device IS NOT NULL')
ss_constraints.append('ms.user_device IS NOT NULL')
@ -522,7 +522,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
ss_constraints.append(
sh.multi_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_mobile]:
elif filter_type in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE]:
if is_any:
extra_constraints.append('s.user_country IS NOT NULL')
ss_constraints.append('ms.user_country IS NOT NULL')
@ -533,7 +533,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.user_city:
elif filter_type == schemas.FilterType.USER_CITY:
if is_any:
extra_constraints.append('s.user_city IS NOT NULL')
ss_constraints.append('ms.user_city IS NOT NULL')
@ -544,7 +544,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f'ms.user_city {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.user_state:
elif filter_type == schemas.FilterType.USER_STATE:
if is_any:
extra_constraints.append('s.user_state IS NOT NULL')
ss_constraints.append('ms.user_state IS NOT NULL')
@ -555,7 +555,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f'ms.user_state {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_source]:
elif filter_type in [schemas.FilterType.UTM_SOURCE]:
if is_any:
extra_constraints.append('s.utm_source IS NOT NULL')
ss_constraints.append('ms.utm_source IS NOT NULL')
@ -569,7 +569,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
ss_constraints.append(
sh.multi_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_medium]:
elif filter_type in [schemas.FilterType.UTM_MEDIUM]:
if is_any:
extra_constraints.append('s.utm_medium IS NOT NULL')
ss_constraints.append('ms.utm_medium IS NOT NULL')
@ -583,7 +583,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
ss_constraints.append(
sh.multi_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_campaign]:
elif filter_type in [schemas.FilterType.UTM_CAMPAIGN]:
if is_any:
extra_constraints.append('s.utm_campaign IS NOT NULL')
ss_constraints.append('ms.utm_campaign IS NOT NULL')
@ -598,7 +598,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.duration:
elif filter_type == schemas.FilterType.DURATION:
if len(f.value) > 0 and f.value[0] is not None:
extra_constraints.append("s.duration >= %(minDuration)s")
ss_constraints.append("ms.duration >= %(minDuration)s")
@ -607,7 +607,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
extra_constraints.append("s.duration <= %(maxDuration)s")
ss_constraints.append("ms.duration <= %(maxDuration)s")
full_args["maxDuration"] = f.value[1]
elif filter_type == schemas.FilterType.referrer:
elif filter_type == schemas.FilterType.REFERRER:
# extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
if is_any:
extra_constraints.append('s.base_referrer IS NOT NULL')
@ -636,7 +636,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(
f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]:
elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]:
if is_any:
extra_constraints.append('s.user_id IS NOT NULL')
ss_constraints.append('ms.user_id IS NOT NULL')
@ -650,8 +650,8 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
ss_constraints.append(
sh.multi_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_mobile]:
elif filter_type in [schemas.FilterType.USER_ANONYMOUS_ID,
schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]:
if is_any:
extra_constraints.append('s.user_anonymous_id IS NOT NULL')
ss_constraints.append('ms.user_anonymous_id IS NOT NULL')
@ -665,7 +665,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
ss_constraints.append(
sh.multi_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]:
elif filter_type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]:
if is_any:
extra_constraints.append('s.rev_id IS NOT NULL')
ss_constraints.append('ms.rev_id IS NOT NULL')
@ -678,7 +678,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
ss_constraints.append(
sh.multi_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.platform:
elif filter_type == schemas.FilterType.PLATFORM:
# op = __ sh.get_sql_operator(f.operator)
extra_constraints.append(
sh.multi_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
@ -686,7 +686,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
ss_constraints.append(
sh.multi_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.issue:
elif filter_type == schemas.FilterType.ISSUE:
if is_any:
extra_constraints.append("array_length(s.issue_types, 1) > 0")
ss_constraints.append("array_length(ms.issue_types, 1) > 0")
@ -701,7 +701,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.events_count:
elif filter_type == schemas.FilterType.EVENTS_COUNT:
extra_constraints.append(
sh.multi_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
@ -719,7 +719,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
valid_events_count += 1
events_query_from = []
event_index = 0
or_events = data.events_order == schemas.SearchEventOrder._or
or_events = data.events_order == schemas.SearchEventOrder.OR
# events_joiner = " FULL JOIN " if or_events else " INNER JOIN LATERAL "
events_joiner = " UNION " if or_events else " INNER JOIN LATERAL "
for i, event in enumerate(data.events):
@ -746,7 +746,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
event_from = "%s"
event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s",
"main.session_id=event_0.session_id"]
if data.events_order == schemas.SearchEventOrder._then:
if data.events_order == schemas.SearchEventOrder.THEN:
event_where.append(f"event_{event_index - 1}.timestamp <= main.timestamp")
e_k = f"e_value{i}"
s_k = e_k + "_source"
@ -760,9 +760,9 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
if platform == "web":
event_from = event_from % f"{events.EventType.CLICK.table} AS main "
if not is_any:
if event.operator == schemas.ClickEventExtraOperator._on_selector:
if schemas.ClickEventExtraOperator.has_value(event.operator):
event_where.append(
sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k))
sh.multi_conditions(f"main.selector {op} %({e_k})s", event.value, value_key=e_k))
else:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value,
@ -898,7 +898,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f"main.{events.EventType.SWIPE_MOBILE.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == schemas.PerformanceEventType.fetch_failed:
elif event_type == schemas.PerformanceEventType.FETCH_FAILED:
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
if not is_any:
event_where.append(
@ -921,11 +921,11 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
# event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
# _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
# event.source, value_key=e_k))
elif event_type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,
schemas.PerformanceEventType.location_ttfb,
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
elif event_type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME,
schemas.PerformanceEventType.LOCATION_TTFB,
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD,
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE
]:
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
col = performance_event.get_col(event_type)
@ -947,7 +947,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.EventType.request_details:
elif event_type == schemas.EventType.REQUEST_DETAILS:
event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
apply = False
for j, f in enumerate(event.filters):
@ -958,31 +958,31 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType._url:
if f.type == schemas.FetchFilterType.FETCH_URL:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text",
f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._status_code:
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
event_where.append(
sh.multi_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._method:
elif f.type == schemas.FetchFilterType.FETCH_METHOD:
event_where.append(
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._duration:
elif f.type == schemas.FetchFilterType.FETCH_DURATION:
event_where.append(
sh.multi_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._request_body:
elif f.type == schemas.FetchFilterType.FETCH_REQUEST_BODY:
event_where.append(
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._response_body:
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
event_where.append(
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value,
value_key=e_k_f))
@ -991,7 +991,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
logging.warning(f"undefined FETCH filter: {f.type}")
if not apply:
continue
elif event_type == schemas.EventType.graphql:
elif event_type == schemas.EventType.GRAPHQL:
event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main "
for j, f in enumerate(event.filters):
is_any = sh.isAny_opreator(f.operator)
@ -1001,17 +1001,17 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_graphql{j}"
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType._name:
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
event_where.append(
sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._method:
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
event_where.append(
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._request_body:
elif f.type == schemas.GraphqlFilterType.GRAPHQL_REQUEST_BODY:
event_where.append(
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
elif f.type == schemas.GraphqlFilterType._response_body:
elif f.type == schemas.GraphqlFilterType.GRAPHQL_RESPONSE_BODY:
event_where.append(
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
else:
@ -1191,8 +1191,8 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
available_keys = metadata.get_keys_by_projects(project_ids)
for i in available_keys:
available_keys[i]["user_id"] = schemas.FilterType.user_id
available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id
available_keys[i]["user_id"] = schemas.FilterType.USER_ID
available_keys[i]["user_anonymous_id"] = schemas.FilterType.USER_ANONYMOUS_ID
results = {}
for i in project_ids:
if m_key not in available_keys[i].values():

View file

@ -3,65 +3,65 @@ from chalicelib.core import autocomplete
from chalicelib.utils.event_filter_definition import SupportedFilter
SUPPORTED_TYPES = {
schemas.FilterType.user_os: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os)),
schemas.FilterType.user_browser: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser)),
schemas.FilterType.user_device: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device)),
schemas.FilterType.user_country: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country)),
schemas.FilterType.user_city: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_city),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_city)),
schemas.FilterType.user_state: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_state),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_state)),
schemas.FilterType.user_id: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id)),
schemas.FilterType.user_anonymous_id: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id)),
schemas.FilterType.rev_id: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id)),
schemas.FilterType.referrer: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer)),
schemas.FilterType.utm_campaign: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign)),
schemas.FilterType.utm_medium: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium)),
schemas.FilterType.utm_source: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source)),
schemas.FilterType.USER_OS: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS)),
schemas.FilterType.USER_BROWSER: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_BROWSER),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_BROWSER)),
schemas.FilterType.USER_DEVICE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE)),
schemas.FilterType.USER_COUNTRY: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY)),
schemas.FilterType.USER_CITY: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_CITY),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_CITY)),
schemas.FilterType.USER_STATE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_STATE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_STATE)),
schemas.FilterType.USER_ID: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID)),
schemas.FilterType.USER_ANONYMOUS_ID: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID)),
schemas.FilterType.REV_ID: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID)),
schemas.FilterType.REFERRER: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REFERRER),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REFERRER)),
schemas.FilterType.UTM_CAMPAIGN: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_CAMPAIGN),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_CAMPAIGN)),
schemas.FilterType.UTM_MEDIUM: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_MEDIUM),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_MEDIUM)),
schemas.FilterType.UTM_SOURCE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE)),
# IOS
schemas.FilterType.user_os_mobile: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_mobile)),
schemas.FilterType.user_device_mobile: SupportedFilter(
schemas.FilterType.USER_OS_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE)),
schemas.FilterType.USER_DEVICE_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(
typename=schemas.FilterType.user_device_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device_mobile)),
schemas.FilterType.user_country_mobile: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_mobile)),
schemas.FilterType.user_id_mobile: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_mobile)),
schemas.FilterType.user_anonymous_id_mobile: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_mobile)),
schemas.FilterType.rev_id_mobile: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_mobile)),
typename=schemas.FilterType.USER_DEVICE_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE_MOBILE)),
schemas.FilterType.USER_COUNTRY_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY_MOBILE)),
schemas.FilterType.USER_ID_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID_MOBILE)),
schemas.FilterType.USER_ANONYMOUS_ID_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID_MOBILE)),
schemas.FilterType.REV_ID_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID_MOBILE)),
}

View file

@ -57,29 +57,29 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
is_not = False
if sh.is_negation_operator(f.operator):
is_not = True
if filter_type == schemas.FilterType.user_browser:
if filter_type == schemas.FilterType.USER_BROWSER:
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_mobile]:
elif filter_type in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE]:
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_mobile]:
elif filter_type in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE]:
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_mobile]:
elif filter_type in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE]:
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type == schemas.FilterType.duration:
elif filter_type == schemas.FilterType.DURATION:
if len(f.value) > 0 and f.value[0] is not None:
first_stage_extra_constraints.append(f's.duration >= %(minDuration)s')
values["minDuration"] = f.value[0]
if len(f["value"]) > 1 and f.value[1] is not None and int(f.value[1]) > 0:
first_stage_extra_constraints.append('s.duration <= %(maxDuration)s')
values["maxDuration"] = f.value[1]
elif filter_type == schemas.FilterType.referrer:
elif filter_type == schemas.FilterType.REFERRER:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"]
first_stage_extra_constraints.append(
@ -94,16 +94,16 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
f's.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s', f.value,
is_not=is_not, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]:
elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]:
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_id {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_mobile]:
elif filter_type in [schemas.FilterType.USER_ANONYMOUS_ID,
schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]:
first_stage_extra_constraints.append(
sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]:
elif filter_type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]:
first_stage_extra_constraints.append(
sh.multi_conditions(f's.rev_id {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
@ -111,7 +111,7 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id)
for s in stages:
if s.operator is None:
s.operator = schemas.SearchEventOperator._is
s.operator = schemas.SearchEventOperator.IS
if not isinstance(s.value, list):
s.value = [s.value]
@ -431,7 +431,7 @@ def count_users(rows, n_stages, user_key="user_uuid"):
return users_count
def get_stages(stages, rows, metric_of=schemas.MetricOfFunnels.session_count):
def get_stages(stages, rows, metric_of=schemas.MetricOfFunnels.SESSION_COUNT):
n_stages = len(stages)
if metric_of == "sessionCount":
base_counts = count_sessions(rows, n_stages)

View file

@ -107,7 +107,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
return w
def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = schemas.WebhookType.webhook,
def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = schemas.WebhookType.WEBHOOK,
tenant_id: Optional[int] = None) -> bool:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1

View file

@ -143,15 +143,18 @@ def string_to_sql_like_with_op(value, op):
return _value.replace("%", "%%")
likable_operators = [schemas.SearchEventOperator._starts_with, schemas.SearchEventOperator._ends_with,
schemas.SearchEventOperator._contains, schemas.SearchEventOperator._not_contains]
likable_operators = [schemas.SearchEventOperator.STARTS_WITH, schemas.SearchEventOperator.ENDS_WITH,
schemas.SearchEventOperator.CONTAINS, schemas.SearchEventOperator.NOT_CONTAINS,
schemas.ClickEventExtraOperator.STARTS_WITH, schemas.ClickEventExtraOperator.ENDS_WITH,
schemas.ClickEventExtraOperator.CONTAINS, schemas.ClickEventExtraOperator.NOT_CONTAINS]
def is_likable(op: schemas.SearchEventOperator):
def is_likable(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator]):
return op in likable_operators
def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator):
def values_for_operator(value: Union[str, list],
op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator]):
if not is_likable(op):
return value
if isinstance(value, list):
@ -162,11 +165,12 @@ def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator
else:
if value is None:
return value
if op == schemas.SearchEventOperator._starts_with:
if op in (schemas.SearchEventOperator.STARTS_WITH, schemas.ClickEventExtraOperator.STARTS_WITH):
return f"{value}%"
elif op == schemas.SearchEventOperator._ends_with:
elif op in (schemas.SearchEventOperator.ENDS_WITH, schemas.ClickEventExtraOperator.ENDS_WITH):
return f"%{value}"
elif op == schemas.SearchEventOperator._contains or op == schemas.SearchEventOperator._not_contains:
elif op in (schemas.SearchEventOperator.CONTAINS, schemas.SearchEventOperator.NOT_CONTAINS,
schemas.ClickEventExtraOperator.CONTAINS, schemas.ClickEventExtraOperator.NOT_CONTAINS):
return f"%{value}%"
return value
@ -278,22 +282,22 @@ def old_search_payload_to_flat(values):
def custom_alert_to_front(values):
# to support frontend format for payload
if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.custom:
if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.CUSTOM:
values["query"]["left"] = values["seriesId"]
values["seriesId"] = None
return values
def __time_value(row):
row["unit"] = schemas.TemplatePredefinedUnits.millisecond
row["unit"] = schemas.TemplatePredefinedUnits.MILLISECOND
factor = 1
if row["value"] > TimeUTC.MS_MINUTE:
row["value"] = row["value"] / TimeUTC.MS_MINUTE
row["unit"] = schemas.TemplatePredefinedUnits.minute
row["unit"] = schemas.TemplatePredefinedUnits.MINUTE
factor = TimeUTC.MS_MINUTE
elif row["value"] > 1 * 1000:
row["value"] = row["value"] / 1000
row["unit"] = schemas.TemplatePredefinedUnits.second
row["unit"] = schemas.TemplatePredefinedUnits.SECOND
factor = 1000
if "chart" in row and factor > 1:

View file

@ -5,23 +5,31 @@ import schemas
def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator]):
return {
schemas.SearchEventOperator._is: "=",
schemas.SearchEventOperator._is_any: "IN",
schemas.SearchEventOperator._on: "=",
schemas.SearchEventOperator._on_any: "IN",
schemas.SearchEventOperator._is_not: "!=",
schemas.SearchEventOperator._not_on: "!=",
schemas.SearchEventOperator._contains: "ILIKE",
schemas.SearchEventOperator._not_contains: "NOT ILIKE",
schemas.SearchEventOperator._starts_with: "ILIKE",
schemas.SearchEventOperator._ends_with: "ILIKE",
schemas.SearchEventOperator.IS: "=",
schemas.SearchEventOperator.ON: "=",
schemas.SearchEventOperator.ON_ANY: "IN",
schemas.SearchEventOperator.IS_NOT: "!=",
schemas.SearchEventOperator.NOT_ON: "!=",
schemas.SearchEventOperator.CONTAINS: "ILIKE",
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
# Selector operators:
schemas.ClickEventExtraOperator.IS: "=",
schemas.ClickEventExtraOperator.IS_NOT: "!=",
schemas.ClickEventExtraOperator.CONTAINS: "ILIKE",
schemas.ClickEventExtraOperator.NOT_CONTAINS: "NOT ILIKE",
schemas.ClickEventExtraOperator.STARTS_WITH: "ILIKE",
schemas.ClickEventExtraOperator.ENDS_WITH: "ILIKE",
}.get(op, "=")
def is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_not,
schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains]
return op in [schemas.SearchEventOperator.IS_NOT,
schemas.SearchEventOperator.NOT_ON,
schemas.SearchEventOperator.NOT_CONTAINS,
schemas.ClickEventExtraOperator.IS_NOT,
schemas.ClickEventExtraOperator.NOT_CONTAINS]
def reverse_sql_operator(op):
@ -46,8 +54,8 @@ def multi_values(values, value_key="value"):
def isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any]
return op in [schemas.SearchEventOperator.ON_ANY, schemas.SearchEventOperator.IS_ANY]
def isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]
return op in [schemas.SearchEventOperator.IS_UNDEFINED]

View file

@ -1,45 +0,0 @@
import string
jsonb = "'::jsonb,'"
dash = '", "'
dash_nl = ',\n'
dash_key = ")s, %("
def __filter(s, chars, l):
s = filter(lambda c: c in chars, s)
s = "".join(s)
if len(s) == 0:
return None
return s[0:l]
__keyword_chars = string.ascii_lowercase + string.ascii_uppercase + string.digits + "_"
def keyword(s):
if not isinstance(s, str):
return None
s = s.strip().replace(" ", "_")
return __filter(s, __keyword_chars, 30)
__pattern_chars = string.ascii_lowercase + string.ascii_uppercase + string.digits + "_-/*."
def pattern(s):
if not isinstance(s, str):
return None
return __filter(s, __pattern_chars, 1000)
def join(*args):
return '\x00'.join(args)
def split(s):
return s.split('\x00')
def hexed(n):
return hex(n)[2:]

View file

@ -19,7 +19,7 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/autocomplete', tags=["events"])
@app.get('/{projectId}/autocomplete', tags=["autocomplete"])
@app.get('/{projectId}/events/search', tags=["events"])
def events_search(projectId: int, q: str,
type: Union[schemas.FilterType, schemas.EventType,
@ -27,25 +27,28 @@ def events_search(projectId: int, q: str,
schemas.GraphqlFilterType, str] = None,
key: str = None, source: str = None, live: bool = False,
context: schemas.CurrentContext = Depends(OR_context)):
if len(q) == 0:
if len(q) == 0 and not type:
return {"data": []}
elif type:
# TODO: return to values related to type
pass
if live:
return assist.autocomplete(project_id=projectId, q=q,
key=key if key is not None else type)
if type in [schemas.FetchFilterType._url]:
type = schemas.EventType.request
elif type in [schemas.GraphqlFilterType._name]:
type = schemas.EventType.graphql
if type in [schemas.FetchFilterType.FETCH_URL]:
type = schemas.EventType.REQUEST
elif type in [schemas.GraphqlFilterType.GRAPHQL_NAME]:
type = schemas.EventType.GRAPHQL
elif isinstance(type, schemas.PerformanceEventType):
if type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,
schemas.PerformanceEventType.location_ttfb,
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
if type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME,
schemas.PerformanceEventType.LOCATION_TTFB,
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD,
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE
]:
type = schemas.EventType.location
elif type in [schemas.PerformanceEventType.fetch_failed]:
type = schemas.EventType.request
type = schemas.EventType.LOCATION
elif type in [schemas.PerformanceEventType.FETCH_FAILED]:
type = schemas.EventType.REQUEST
else:
return {"data": []}
@ -73,12 +76,12 @@ def integration_notify(projectId: int, integration: str, webhookId: int, source:
"user": context.email, "comment": comment, "project_id": projectId,
"integration_id": webhookId,
"project_name": context.project.name}
if integration == schemas.WebhookType.slack:
if integration == schemas.WebhookType.SLACK:
if source == "sessions":
return Slack.share_session(session_id=sourceId, **args)
elif source == "errors":
return Slack.share_error(error_id=sourceId, **args)
elif integration == schemas.WebhookType.msteams:
elif integration == schemas.WebhookType.MSTEAMS:
if source == "sessions":
return MSTeams.share_session(session_id=sourceId, **args)
elif source == "errors":
@ -712,7 +715,7 @@ def get_boarding_state_integrations(context: schemas.CurrentContext = Depends(OR
@app.get('/integrations/slack/channels', tags=["integrations"])
def get_slack_channels(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.slack)}
return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.SLACK)}
@app.get('/integrations/slack/{integrationId}', tags=["integrations"])
@ -809,7 +812,7 @@ def get_limits(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/integrations/msteams/channels', tags=["integrations"])
def get_msteams_channels(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.msteams)}
return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.MSTEAMS)}
@app.post('/integrations/msteams', tags=['integrations'])

File diff suppressed because it is too large Load diff

View file

@ -107,7 +107,7 @@ class TestFeatureFlag:
schemas.SearchFlagsSchema(
limit=15,
user_id=123,
order=schemas.SortOrderType.desc,
order=schemas.SortOrderType.DESC,
query="search term",
is_active=True
)

1
ee/api/.gitignore vendored
View file

@ -250,7 +250,6 @@ Pipfile.lock
/chalicelib/utils/storage/generators.py
/chalicelib/utils/storage/interface.py
/chalicelib/utils/storage/s3.py
/chalicelib/utils/strings.py
/chalicelib/utils/TimeUTC.py
/crons/__init__.py
/crons/core_crons.py

View file

@ -18,60 +18,60 @@ else:
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
LeftToDb = {
schemas.AlertColumn.performance__dom_content_loaded__average: {
schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"},
schemas.AlertColumn.performance__first_meaningful_paint__average: {
schemas.AlertColumn.PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"},
schemas.AlertColumn.performance__page_load_time__average: {
schemas.AlertColumn.PERFORMANCE__PAGE_LOAD_TIME__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"},
schemas.AlertColumn.performance__dom_build_time__average: {
schemas.AlertColumn.PERFORMANCE__DOM_BUILD_TIME__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(dom_building_time,0))"},
schemas.AlertColumn.performance__speed_index__average: {
schemas.AlertColumn.PERFORMANCE__SPEED_INDEX__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"},
schemas.AlertColumn.performance__page_response_time__average: {
schemas.AlertColumn.PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(response_time,0))"},
schemas.AlertColumn.performance__ttfb__average: {
schemas.AlertColumn.PERFORMANCE__TTFB__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(first_paint_time,0))"},
schemas.AlertColumn.performance__time_to_render__average: {
schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(visually_complete,0))"},
schemas.AlertColumn.performance__image_load_time__average: {
schemas.AlertColumn.PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"},
schemas.AlertColumn.performance__request_load_time__average: {
schemas.AlertColumn.PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"},
schemas.AlertColumn.resources__load_time__average: {
schemas.AlertColumn.RESOURCES__LOAD_TIME__AVERAGE: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))"},
schemas.AlertColumn.resources__missing__count: {
schemas.AlertColumn.RESOURCES__MISSING__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'"},
schemas.AlertColumn.errors__4xx_5xx__count: {
schemas.AlertColumn.ERRORS__4XX_5XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
"condition": "status/100!=2"},
schemas.AlertColumn.errors__4xx__count: {
schemas.AlertColumn.ERRORS__4XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=4"},
schemas.AlertColumn.errors__5xx__count: {
schemas.AlertColumn.ERRORS__5XX__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=5"},
schemas.AlertColumn.errors__javascript__impacted_sessions__count: {
schemas.AlertColumn.ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
schemas.AlertColumn.performance__crashes__count: {
schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: {
"table": "public.sessions",
"formula": "COUNT(DISTINCT session_id)",
"condition": "errors_count > 0 AND duration>0"},
schemas.AlertColumn.errors__javascript__count: {
schemas.AlertColumn.ERRORS__JAVASCRIPT__COUNT: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False},
schemas.AlertColumn.errors__backend__count: {
schemas.AlertColumn.ERRORS__BACKEND__COUNT: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False},
}
@ -91,7 +91,7 @@ def can_check(a) -> bool:
now = TimeUTC.now()
repetitionBase = a["options"]["currentPeriod"] \
if a["detectionMethod"] == schemas.AlertDetectionMethod.change \
if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \
and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
else a["options"]["previousPeriod"]
@ -114,7 +114,7 @@ def Build(a):
main_table = ""
if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = schemas.SortOrderType.desc
a["filter"]["order"] = schemas.SortOrderType.DESC
a["filter"]["startDate"] = 0
a["filter"]["endDate"] = TimeUTC.now()
try:
@ -140,7 +140,7 @@ def Build(a):
is_ss = main_table == "public.sessions"
q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold:
if a["detectionMethod"] == schemas.AlertDetectionMethod.THRESHOLD:
if a["seriesId"] is not None:
q += f""" FROM ({subQ}) AS stat"""
else:
@ -148,7 +148,7 @@ def Build(a):
{"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}) AS stat"""
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
else:
if a["change"] == schemas.AlertDetectionType.change:
if a["change"] == schemas.AlertDetectionType.CHANGE:
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})-({sub2})) AS value"

View file

@ -13,101 +13,101 @@ from chalicelib.utils.TimeUTC import TimeUTC
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
LeftToDb = {
schemas.AlertColumn.performance__dom_content_loaded__average: {
schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "COALESCE(AVG(NULLIF(dom_content_loaded_event_time ,0)),0)",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__first_meaningful_paint__average: {
schemas.AlertColumn.PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__page_load_time__average: {
schemas.AlertColumn.PERFORMANCE__PAGE_LOAD_TIME__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(load_event_time ,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__dom_build_time__average: {
schemas.AlertColumn.PERFORMANCE__DOM_BUILD_TIME__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(dom_building_time,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__speed_index__average: {
schemas.AlertColumn.PERFORMANCE__SPEED_INDEX__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(speed_index,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__page_response_time__average: {
schemas.AlertColumn.PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(response_time,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__ttfb__average: {
schemas.AlertColumn.PERFORMANCE__TTFB__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(first_contentful_paint_time,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__time_to_render__average: {
schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages",
"formula": "AVG(NULLIF(visually_complete,0))",
"eventType": "LOCATION"
},
schemas.AlertColumn.performance__image_load_time__average: {
schemas.AlertColumn.PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "AVG(NULLIF(resources.duration,0))",
"condition": "type='img'"
},
schemas.AlertColumn.performance__request_load_time__average: {
schemas.AlertColumn.PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "AVG(NULLIF(resources.duration,0))",
"condition": "type='fetch'"
},
schemas.AlertColumn.resources__load_time__average: {
schemas.AlertColumn.RESOURCES__LOAD_TIME__AVERAGE: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "AVG(NULLIF(resources.duration,0))"
},
schemas.AlertColumn.resources__missing__count: {
schemas.AlertColumn.RESOURCES__MISSING__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources",
"formula": "COUNT(DISTINCT url_hostpath)",
"condition": "success= FALSE AND type='img'"
},
schemas.AlertColumn.errors__4xx_5xx__count: {
schemas.AlertColumn.ERRORS__4XX_5XX__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests",
"eventType": "REQUEST",
"formula": "COUNT(1)",
"condition": "intDiv(requests.status, 100)!=2"
},
schemas.AlertColumn.errors__4xx__count: {
schemas.AlertColumn.ERRORS__4XX__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests",
"eventType": "REQUEST",
"formula": "COUNT(1)",
"condition": "intDiv(requests.status, 100)==4"
},
schemas.AlertColumn.errors__5xx__count: {
schemas.AlertColumn.ERRORS__5XX__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests",
"eventType": "REQUEST",
"formula": "COUNT(1)",
"condition": "intDiv(requests.status, 100)==5"
},
schemas.AlertColumn.errors__javascript__impacted_sessions__count: {
schemas.AlertColumn.ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS errors",
"eventType": "ERROR",
"formula": "COUNT(DISTINCT session_id)",
"condition": "source='js_exception'"
},
schemas.AlertColumn.performance__crashes__count: {
schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_sessions_table(timestamp)} AS sessions",
"formula": "COUNT(DISTINCT session_id)",
"condition": "duration>0 AND errors_count>0"
},
schemas.AlertColumn.errors__javascript__count: {
schemas.AlertColumn.ERRORS__JAVASCRIPT__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS errors",
"eventType": "ERROR",
"formula": "COUNT(DISTINCT session_id)",
"condition": "source='js_exception'"
},
schemas.AlertColumn.errors__backend__count: {
schemas.AlertColumn.ERRORS__BACKEND__COUNT: {
"table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS errors",
"eventType": "ERROR",
"formula": "COUNT(DISTINCT session_id)",
@ -122,7 +122,7 @@ def Build(a):
full_args = {}
if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = schemas.SortOrderType.desc
a["filter"]["order"] = schemas.SortOrderType.DESC
a["filter"]["startDate"] = 0
a["filter"]["endDate"] = TimeUTC.now()
try:
@ -148,7 +148,7 @@ def Build(a):
q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold:
if a["detectionMethod"] == schemas.AlertDetectionMethod.THRESHOLD:
if a["seriesId"] is not None:
q += f""" FROM ({subQ}) AS stat"""
else:
@ -157,7 +157,7 @@ def Build(a):
AND datetime<=toDateTime(%(now)s/1000) ) AS stat"""
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
else:
if a["change"] == schemas.AlertDetectionType.change:
if a["change"] == schemas.AlertDetectionType.CHANGE:
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})-({sub2})) AS value"

View file

@ -60,7 +60,7 @@ def search_records(project_id: int, data: schemas.AssistRecordSearchPayloadSchem
if data.query is not None and len(data.query) > 0:
conditions.append("(users.name ILIKE %(query)s OR assist_records.name ILIKE %(query)s)")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
op=schemas.SearchEventOperator.CONTAINS)
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT COUNT(assist_records.record_id) OVER () AS count,
record_id, user_id, session_id, assist_records.created_at,

View file

@ -8,23 +8,23 @@ TABLE = "experimental.autocomplete"
def __get_autocomplete_table(value, project_id):
autocomplete_events = [schemas.FilterType.rev_id,
schemas.EventType.click,
schemas.FilterType.user_device,
schemas.FilterType.user_id,
schemas.FilterType.user_browser,
schemas.FilterType.user_os,
schemas.EventType.custom,
schemas.FilterType.user_country,
schemas.FilterType.user_city,
schemas.FilterType.user_state,
schemas.EventType.location,
schemas.EventType.input]
autocomplete_events = [schemas.FilterType.REV_ID,
schemas.EventType.CLICK,
schemas.FilterType.USER_DEVICE,
schemas.FilterType.USER_ID,
schemas.FilterType.USER_BROWSER,
schemas.FilterType.USER_OS,
schemas.EventType.CUSTOM,
schemas.FilterType.USER_COUNTRY,
schemas.FilterType.USER_CITY,
schemas.FilterType.USER_STATE,
schemas.EventType.LOCATION,
schemas.EventType.INPUT]
autocomplete_events.sort()
sub_queries = []
c_list = []
for e in autocomplete_events:
if e == schemas.FilterType.user_country:
if e == schemas.FilterType.USER_COUNTRY:
c_list = countries.get_country_code_autocomplete(value)
if len(c_list) > 0:
sub_queries.append(f"""(SELECT DISTINCT ON(value) '{e.value}' AS _type, value
@ -73,7 +73,7 @@ def __get_autocomplete_table(value, project_id):
def __generic_query(typename, value_length=None):
if typename == schemas.FilterType.user_country:
if typename == schemas.FilterType.USER_COUNTRY:
return f"""SELECT DISTINCT value, type
FROM {TABLE}
WHERE
@ -128,7 +128,7 @@ def __generic_autocomplete_metas(typename):
params = {"project_id": project_id, "value": helper.string_to_sql_like(text),
"svalue": helper.string_to_sql_like("^" + text)}
if typename == schemas.FilterType.user_country:
if typename == schemas.FilterType.USER_COUNTRY:
params["value"] = tuple(countries.get_country_code_autocomplete(text))
if len(params["value"]) == 0:
return []

View file

@ -159,14 +159,14 @@ def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int =
def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int):
supported = {
schemas.MetricOfTable.sessions: __get_table_of_sessions,
schemas.MetricOfTable.errors: __get_table_of_errors,
schemas.MetricOfTable.user_id: __get_table_of_user_ids,
schemas.MetricOfTable.issues: __get_table_of_issues,
schemas.MetricOfTable.SESSIONS: __get_table_of_sessions,
schemas.MetricOfTable.ERRORS: __get_table_of_errors,
schemas.MetricOfTable.USER_ID: __get_table_of_user_ids,
schemas.MetricOfTable.ISSUES: __get_table_of_issues,
schemas.MetricOfTable.user_browser: __get_table_of_browsers,
schemas.MetricOfTable.user_device: __get_table_of_devises,
schemas.MetricOfTable.user_country: __get_table_of_countries,
schemas.MetricOfTable.visited_url: __get_table_of_urls,
schemas.MetricOfTable.USER_DEVICE: __get_table_of_devises,
schemas.MetricOfTable.USER_COUNTRY: __get_table_of_countries,
schemas.MetricOfTable.VISITED_URL: __get_table_of_urls,
}
return supported.get(data.metric_of, not_supported)(project_id=project_id, data=data, user_id=user_id)
@ -178,12 +178,12 @@ def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
data=data.model_dump())
supported = {
schemas.MetricType.timeseries: __get_timeseries_chart,
schemas.MetricType.table: __get_table_chart,
schemas.MetricType.heat_map: __get_heat_map_chart,
schemas.MetricType.funnel: __get_funnel_chart,
schemas.MetricType.insights: __get_insights_chart,
schemas.MetricType.pathAnalysis: __get_path_analysis_chart
schemas.MetricType.TIMESERIES: __get_timeseries_chart,
schemas.MetricType.TABLE: __get_table_chart,
schemas.MetricType.HEAT_MAP: __get_heat_map_chart,
schemas.MetricType.FUNNEL: __get_funnel_chart,
schemas.MetricType.INSIGHTS: __get_insights_chart,
schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_chart
}
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
@ -293,18 +293,18 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card
filters=filters
)
# ---- To make issues response close to the chart response
search_data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.events_count,
operator=schemas.MathOperator._greater,
search_data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
operator=schemas.MathOperator.GREATER,
value=[1]))
if len(data.start_point) == 0:
search_data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.location,
operator=schemas.SearchEventOperator._is_any,
search_data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
operator=schemas.SearchEventOperator.IS_ANY,
value=[]))
# ---- End
for s in data.excludes:
search_data.events.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=schemas.SearchEventOperator._not_on,
operator=schemas.SearchEventOperator.NOT_ON,
value=s.value))
result = sessions.search_table_of_individual_issues(project_id=project_id, data=search_data)
return result
@ -313,15 +313,15 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
if data.is_predefined:
return not_supported()
if data.metric_of == schemas.MetricOfTable.issues:
if data.metric_of == schemas.MetricOfTable.ISSUES:
return __get_table_of_issues(project_id=project_id, user_id=user_id, data=data)
supported = {
schemas.MetricType.timeseries: not_supported,
schemas.MetricType.table: not_supported,
schemas.MetricType.heat_map: not_supported,
schemas.MetricType.funnel: __get_funnel_issues,
schemas.MetricType.insights: not_supported,
schemas.MetricType.pathAnalysis: __get_path_analysis_issues,
schemas.MetricType.TIMESERIES: not_supported,
schemas.MetricType.TABLE: not_supported,
schemas.MetricType.HEAT_MAP: not_supported,
schemas.MetricType.FUNNEL: __get_funnel_issues,
schemas.MetricType.INSIGHTS: not_supported,
schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_issues,
}
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
@ -337,7 +337,7 @@ def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
session_data = None
if data.metric_type == schemas.MetricType.heat_map:
if data.metric_type == schemas.MetricType.HEAT_MAP:
if data.session_id is not None:
session_data = {"sessionId": data.session_id}
else:
@ -370,7 +370,7 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data}
params["default_config"] = json.dumps(data.default_config.model_dump())
params["card_info"] = None
if data.metric_type == schemas.MetricType.pathAnalysis:
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
@ -433,9 +433,9 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
params["d_series_ids"] = tuple(d_series_ids)
params["card_info"] = None
params["session_data"] = json.dumps(metric["data"])
if data.metric_type == schemas.MetricType.pathAnalysis:
if data.metric_type == schemas.MetricType.PATH_ANALYSIS:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
elif data.metric_type == schemas.MetricType.heat_map:
elif data.metric_type == schemas.MetricType.HEAT_MAP:
if data.session_id is not None:
params["session_data"] = json.dumps({"sessionId": data.session_id})
elif metric.get("data") and metric["data"].get("sessionId"):
@ -499,7 +499,7 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser
if data.query is not None and len(data.query) > 0:
constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
op=schemas.SearchEventOperator.CONTAINS)
with pg_client.PostgresClient() as cur:
sub_join = ""
if include_series:
@ -641,7 +641,7 @@ def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data:
for s in row["series"]:
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
row = helper.dict_to_camel_case(row)
if row["metricType"] == schemas.MetricType.pathAnalysis:
if row["metricType"] == schemas.MetricType.PATH_ANALYSIS:
row = __get_path_analysis_attributes(row=row)
return row
@ -740,7 +740,7 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessi
return custom_metrics_predefined.get_metric(key=metric.metric_of,
project_id=project_id,
data=data.model_dump())
elif metric.metric_type == schemas.MetricType.heat_map:
elif metric.metric_type == schemas.MetricType.HEAT_MAP:
if raw_metric["data"] and raw_metric["data"].get("sessionId"):
return heatmaps.get_selected_session(project_id=project_id,
session_id=raw_metric["data"]["sessionId"])

View file

@ -427,18 +427,18 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
if platform == schemas.PlatformType.mobile:
if platform == schemas.PlatformType.MOBILE:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == schemas.PlatformType.desktop:
elif platform == schemas.PlatformType.DESKTOP:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
def __get_sort_key(key):
return {
schemas.ErrorSort.occurrence: "max_datetime",
schemas.ErrorSort.users_count: "users",
schemas.ErrorSort.sessions_count: "sessions"
schemas.ErrorSort.OCCURRENCE: "max_datetime",
schemas.ErrorSort.USERS_COUNT: "users",
schemas.ErrorSort.SESSIONS_COUNT: "sessions"
}.get(key, 'max_datetime')
@ -450,7 +450,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
if f.type == schemas.FilterType.PLATFORM and len(f.value) > 0:
platform = f.value[0]
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
@ -479,7 +479,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
sort = __get_sort_key('datetime')
if data.sort is not None:
sort = __get_sort_key(data.sort)
order = schemas.SortOrderType.desc
order = schemas.SortOrderType.DESC
if data.order is not None:
order = data.order
extra_join = ""
@ -490,7 +490,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.status != schemas.ErrorStatus.all:
if data.status != schemas.ErrorStatus.ALL:
pg_sub_query.append("status = %(error_status)s")
params["error_status"] = data.status
if data.limit is not None and data.page is not None:
@ -509,7 +509,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
if data.query is not None and len(data.query) > 0:
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
params["error_query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
op=schemas.SearchEventOperator.CONTAINS)
main_pg_query = f"""SELECT full_count,
error_id,

View file

@ -20,31 +20,31 @@ def _multiple_values(values, value_key="value"):
def __get_sql_operator(op: schemas.SearchEventOperator):
return {
schemas.SearchEventOperator._is: "=",
schemas.SearchEventOperator._is_any: "IN",
schemas.SearchEventOperator._on: "=",
schemas.SearchEventOperator._on_any: "IN",
schemas.SearchEventOperator._is_not: "!=",
schemas.SearchEventOperator._not_on: "!=",
schemas.SearchEventOperator._contains: "ILIKE",
schemas.SearchEventOperator._not_contains: "NOT ILIKE",
schemas.SearchEventOperator._starts_with: "ILIKE",
schemas.SearchEventOperator._ends_with: "ILIKE",
schemas.SearchEventOperator.IS: "=",
schemas.SearchEventOperator.IS_ANY: "IN",
schemas.SearchEventOperator.ON: "=",
schemas.SearchEventOperator.ON_ANY: "IN",
schemas.SearchEventOperator.IS_NOT: "!=",
schemas.SearchEventOperator.NOT_ON: "!=",
schemas.SearchEventOperator.CONTAINS: "ILIKE",
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
}.get(op, "=")
def _isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any]
return op in [schemas.SearchEventOperator.ON_ANY, schemas.SearchEventOperator.IS_ANY]
def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]
return op in [schemas.SearchEventOperator.IS_UNDEFINED]
def __is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_not,
schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains]
return op in [schemas.SearchEventOperator.IS_NOT,
schemas.SearchEventOperator.NOT_ON,
schemas.SearchEventOperator.NOT_CONTAINS]
def _multiple_conditions(condition, values, value_key="value", is_not=False):
@ -501,9 +501,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n
if time_constraint:
ch_sub_query += [f"{table_name}datetime >= toDateTime(%({startTime_arg_name})s/1000)",
f"{table_name}datetime < toDateTime(%({endTime_arg_name})s/1000)"]
if platform == schemas.PlatformType.mobile:
if platform == schemas.PlatformType.MOBILE:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == schemas.PlatformType.desktop:
elif platform == schemas.PlatformType.DESKTOP:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
@ -515,9 +515,9 @@ def __get_step_size(startTimestamp, endTimestamp, density):
def __get_sort_key(key):
return {
schemas.ErrorSort.occurrence: "max_datetime",
schemas.ErrorSort.users_count: "users",
schemas.ErrorSort.sessions_count: "sessions"
schemas.ErrorSort.OCCURRENCE: "max_datetime",
schemas.ErrorSort.USERS_COUNT: "users",
schemas.ErrorSort.SESSIONS_COUNT: "sessions"
}.get(key, 'max_datetime')
@ -534,9 +534,9 @@ def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_ar
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
if platform == schemas.PlatformType.mobile:
if platform == schemas.PlatformType.MOBILE:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == schemas.PlatformType.desktop:
elif platform == schemas.PlatformType.DESKTOP:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
@ -547,7 +547,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
platform = None
for f in data.filters:
if f.type == schemas.FilterType.platform and len(f.value) > 0:
if f.type == schemas.FilterType.PLATFORM and len(f.value) > 0:
platform = f.value[0]
ch_sessions_sub_query = __get_basic_constraints(platform, type_condition=False)
# ignore platform for errors table
@ -567,7 +567,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
if len(data.events) > 0:
errors_condition_count = 0
for i, e in enumerate(data.events):
if e.type == schemas.EventType.error:
if e.type == schemas.EventType.ERROR:
errors_condition_count += 1
is_any = _isAny_opreator(e.operator)
op = __get_sql_operator(e.operator)
@ -596,7 +596,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
f_k = f"f_value{i}"
params = {**params, f_k: f.value, **_multiple_values(f.value, value_key=f_k)}
op = __get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.events_count] else f.operator
if filter_type not in [schemas.FilterType.EVENTS_COUNT] else f.operator
is_any = _isAny_opreator(f.operator)
is_undefined = _isUndefined_operator(f.operator)
if not is_any and not is_undefined and len(f.value) == 0:
@ -604,7 +604,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
is_not = False
if __is_negation_operator(f.operator):
is_not = True
if filter_type == schemas.FilterType.user_browser:
if filter_type == schemas.FilterType.USER_BROWSER:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_browser)')
else:
@ -612,14 +612,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
_multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_mobile]:
elif filter_type in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_os)')
else:
ch_sessions_sub_query.append(
_multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_mobile]:
elif filter_type in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_device)')
else:
@ -627,7 +627,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
_multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_mobile]:
elif filter_type in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_country)')
else:
@ -636,7 +636,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_source]:
elif filter_type in [schemas.FilterType.UTM_SOURCE]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.utm_source)')
elif is_undefined:
@ -646,7 +646,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
_multiple_conditions(f's.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_medium]:
elif filter_type in [schemas.FilterType.UTM_MEDIUM]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.utm_medium)')
elif is_undefined:
@ -655,7 +655,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
ch_sessions_sub_query.append(
_multiple_conditions(f's.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_campaign]:
elif filter_type in [schemas.FilterType.UTM_CAMPAIGN]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.utm_campaign)')
elif is_undefined:
@ -665,7 +665,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
_multiple_conditions(f's.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.duration:
elif filter_type == schemas.FilterType.DURATION:
if len(f.value) > 0 and f.value[0] is not None:
ch_sessions_sub_query.append("s.duration >= %(minDuration)s")
params["minDuration"] = f.value[0]
@ -673,14 +673,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
ch_sessions_sub_query.append("s.duration <= %(maxDuration)s")
params["maxDuration"] = f.value[1]
elif filter_type == schemas.FilterType.referrer:
elif filter_type == schemas.FilterType.REFERRER:
# extra_from += f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"
if is_any:
referrer_constraint = 'isNotNull(s.base_referrer)'
else:
referrer_constraint = _multiple_conditions(f"s.base_referrer {op} %({f_k})s", f.value,
is_not=is_not, value_key=f_k)
elif filter_type == schemas.FilterType.metadata:
elif filter_type == schemas.FilterType.METADATA:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -696,7 +696,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)",
f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]:
elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_id)')
elif is_undefined:
@ -705,8 +705,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
ch_sessions_sub_query.append(
_multiple_conditions(f"s.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_mobile]:
elif filter_type in [schemas.FilterType.USER_ANONYMOUS_ID,
schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.user_anonymous_id)')
elif is_undefined:
@ -717,7 +717,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]:
elif filter_type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]:
if is_any:
ch_sessions_sub_query.append('isNotNull(s.rev_id)')
elif is_undefined:
@ -727,7 +727,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
_multiple_conditions(f"s.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.platform:
elif filter_type == schemas.FilterType.PLATFORM:
# op = __get_sql_operator(f.operator)
ch_sessions_sub_query.append(
_multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
@ -743,7 +743,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
# if is_not:
# extra_constraints[-1] = f"not({extra_constraints[-1]})"
# ss_constraints[-1] = f"not({ss_constraints[-1]})"
elif filter_type == schemas.FilterType.events_count:
elif filter_type == schemas.FilterType.EVENTS_COUNT:
ch_sessions_sub_query.append(
_multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))

View file

@ -61,7 +61,7 @@ def __get_grouped_clickrage(rows, session_id, project_id):
def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None):
with pg_client.PostgresClient() as cur:
rows = []
if event_type is None or event_type == schemas.EventType.click:
if event_type is None or event_type == schemas.EventType.CLICK:
cur.execute(cur.mogrify("""\
SELECT
c.*,
@ -75,7 +75,7 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type:
rows += cur.fetchall()
if group_clickrage:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
if event_type is None or event_type == schemas.EventType.input:
if event_type is None or event_type == schemas.EventType.INPUT:
cur.execute(cur.mogrify("""
SELECT
i.*,
@ -87,7 +87,7 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type:
{"project_id": project_id, "session_id": session_id})
)
rows += cur.fetchall()
if event_type is None or event_type == schemas.EventType.location:
if event_type is None or event_type == schemas.EventType.LOCATION:
cur.execute(cur.mogrify("""\
SELECT
l.*,
@ -121,26 +121,26 @@ def _search_tags(project_id, value, key=None, source=None):
class EventType:
CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path")
CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name")
REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="path")
GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name")
STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name")
TAG = Event(ui_type=schemas.EventType.tag, table="events.tags", column="tag_id")
ERROR = Event(ui_type=schemas.EventType.error, table="events.errors",
CLICK = Event(ui_type=schemas.EventType.CLICK, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.INPUT, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.LOCATION, table="events.pages", column="path")
CUSTOM = Event(ui_type=schemas.EventType.CUSTOM, table="events_common.customs", column="name")
REQUEST = Event(ui_type=schemas.EventType.REQUEST, table="events_common.requests", column="path")
GRAPHQL = Event(ui_type=schemas.EventType.GRAPHQL, table="events.graphql", column="name")
STATEACTION = Event(ui_type=schemas.EventType.STATE_ACTION, table="events.state_actions", column="name")
TAG = Event(ui_type=schemas.EventType.TAG, table="events.tags", column="tag_id")
ERROR = Event(ui_type=schemas.EventType.ERROR, table="events.errors",
column=None) # column=None because errors are searched by name or message
METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None)
METADATA = Event(ui_type=schemas.FilterType.METADATA, table="public.sessions", column=None)
# MOBILE
CLICK_MOBILE = Event(ui_type=schemas.EventType.click_mobile, table="events_ios.taps", column="label")
INPUT_MOBILE = Event(ui_type=schemas.EventType.input_mobile, table="events_ios.inputs", column="label")
VIEW_MOBILE = Event(ui_type=schemas.EventType.view_mobile, table="events_ios.views", column="name")
SWIPE_MOBILE = Event(ui_type=schemas.EventType.swipe_mobile, table="events_ios.swipes", column="label")
CUSTOM_MOBILE = Event(ui_type=schemas.EventType.custom_mobile, table="events_common.customs", column="name")
REQUEST_MOBILE = Event(ui_type=schemas.EventType.request_mobile, table="events_common.requests", column="path")
CRASH_MOBILE = Event(ui_type=schemas.EventType.error_mobile, table="events_common.crashes",
column=None) # column=None because errors are searched by name or message
CLICK_MOBILE = Event(ui_type=schemas.EventType.CLICK_MOBILE, table="events_ios.taps", column="label")
INPUT_MOBILE = Event(ui_type=schemas.EventType.INPUT_MOBILE, table="events_ios.inputs", column="label")
VIEW_MOBILE = Event(ui_type=schemas.EventType.VIEW_MOBILE, table="events_ios.views", column="name")
SWIPE_MOBILE = Event(ui_type=schemas.EventType.SWIPE_MOBILE, table="events_ios.swipes", column="label")
CUSTOM_MOBILE = Event(ui_type=schemas.EventType.CUSTOM_MOBILE, table="events_common.customs", column="name")
REQUEST_MOBILE = Event(ui_type=schemas.EventType.REQUEST_MOBILE, table="events_common.requests", column="path")
CRASH_MOBILE = Event(ui_type=schemas.EventType.ERROR_MOBILE, table="events_common.crashes",
column=None) # column=None because errors are searched by name or message
SUPPORTED_TYPES = {

View file

@ -165,27 +165,27 @@ if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
no_platform = True
no_location = True
for f in data.filters:
if f.type == schemas.FilterType.platform:
if f.type == schemas.FilterType.PLATFORM:
no_platform = False
break
for f in data.events:
if f.type == schemas.EventType.location:
if f.type == schemas.EventType.LOCATION:
no_location = False
if len(f.value) == 0:
f.operator = schemas.SearchEventOperator._is_any
f.operator = schemas.SearchEventOperator.IS_ANY
break
if no_platform:
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.platform,
value=[schemas.PlatformType.desktop],
operator=schemas.SearchEventOperator._is))
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM,
value=[schemas.PlatformType.DESKTOP],
operator=schemas.SearchEventOperator.IS))
if no_location:
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.location,
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
value=[],
operator=schemas.SearchEventOperator._is_any))
operator=schemas.SearchEventOperator.IS_ANY))
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.events_count,
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
value=[0],
operator=schemas.MathOperator._greater))
operator=schemas.MathOperator.GREATER))
full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=data.bookmarked, issue=None,
@ -194,7 +194,7 @@ if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
if len(exclude_sessions) > 0:
query_part += "\n AND session_id NOT IN %(exclude_sessions)s"
with pg_client.PostgresClient() as cur:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.DESC
data.sort = 'duration'
main_query = cur.mogrify(f"""SELECT *
FROM (SELECT {SESSION_PROJECTION_COLS}
@ -295,27 +295,27 @@ else:
no_platform = True
no_location = True
for f in data.filters:
if f.type == schemas.FilterType.platform:
if f.type == schemas.FilterType.PLATFORM:
no_platform = False
break
for f in data.events:
if f.type == schemas.EventType.location:
if f.type == schemas.EventType.LOCATION:
no_location = False
if len(f.value) == 0:
f.operator = schemas.SearchEventOperator._is_any
f.operator = schemas.SearchEventOperator.IS_ANY
break
if no_platform:
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.platform,
value=[schemas.PlatformType.desktop],
operator=schemas.SearchEventOperator._is))
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM,
value=[schemas.PlatformType.DESKTOP],
operator=schemas.SearchEventOperator.IS))
if no_location:
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.location,
data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION,
value=[],
operator=schemas.SearchEventOperator._is_any))
operator=schemas.SearchEventOperator.IS_ANY))
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.events_count,
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT,
value=[0],
operator=schemas.MathOperator._greater))
operator=schemas.MathOperator.GREATER))
full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=None, errors_only=False,
favorite_only=data.bookmarked, issue=None,
@ -324,7 +324,7 @@ else:
if len(exclude_sessions) > 0:
query_part += "\n AND session_id NOT IN (%(exclude_sessions)s)"
with ch_client.ClickHouseClient() as cur:
data.order = schemas.SortOrderType.desc
data.order = schemas.SortOrderType.DESC
data.sort = 'duration'
main_query = cur.format(f"""SELECT *
FROM (SELECT {SESSION_PROJECTION_COLS}
@ -360,7 +360,7 @@ else:
logger.info("couldn't find an existing replay after 3 iterations for heatmap")
session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"],
event_type=schemas.EventType.location)
event_type=schemas.EventType.LOCATION)
else:
return None

View file

@ -9,52 +9,52 @@ def get_global_integrations_status(tenant_id, user_id, project_id):
SELECT EXISTS((SELECT 1
FROM public.oauth_authentication
WHERE user_id = %(user_id)s
AND provider = 'github')) AS {schemas.IntegrationType.github.value},
AND provider = 'github')) AS {schemas.IntegrationType.GITHUB.value},
EXISTS((SELECT 1
FROM public.jira_cloud
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira.value},
WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.JIRA.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag.value},
AND provider='bugsnag')) AS {schemas.IntegrationType.BUGSNAG.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch.value},
AND provider='cloudwatch')) AS {schemas.IntegrationType.CLOUDWATCH.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='datadog')) AS {schemas.IntegrationType.datadog.value},
AND provider='datadog')) AS {schemas.IntegrationType.DATADOG.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='newrelic')) AS {schemas.IntegrationType.newrelic.value},
AND provider='newrelic')) AS {schemas.IntegrationType.NEWRELIC.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='rollbar')) AS {schemas.IntegrationType.rollbar.value},
AND provider='rollbar')) AS {schemas.IntegrationType.ROLLBAR.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sentry')) AS {schemas.IntegrationType.sentry.value},
AND provider='sentry')) AS {schemas.IntegrationType.SENTRY.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver.value},
AND provider='stackdriver')) AS {schemas.IntegrationType.STACKDRIVER.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='sumologic')) AS {schemas.IntegrationType.sumologic.value},
AND provider='sumologic')) AS {schemas.IntegrationType.SUMOLOGIC.value},
EXISTS((SELECT 1
FROM public.integrations
WHERE project_id=%(project_id)s
AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch.value},
AND provider='elasticsearch')) AS {schemas.IntegrationType.ELASTICSEARCH.value},
EXISTS((SELECT 1
FROM public.webhooks
WHERE type='slack' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.slack.value},
WHERE type='slack' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.SLACK.value},
EXISTS((SELECT 1
FROM public.webhooks
WHERE type='msteams' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.ms_teams.value};""",
WHERE type='msteams' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.MS_TEAMS.value};""",
{"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id})
)
current_integrations = cur.fetchone()

View file

@ -201,7 +201,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
count = count[0]["count"]
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
return results
@ -1067,7 +1067,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=-
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages
WHERE {" AND ".join(ch_sub_query)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond}
return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.MILLISECOND}
def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1147,7 +1147,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now(
quantiles[0]["values"][i]) else 0)} for i, v in enumerate(quantiles_keys)
],
"extremeValues": [{"count": 0}],
"unit": schemas.TemplatePredefinedUnits.millisecond
"unit": schemas.TemplatePredefinedUnits.MILLISECOND
}
if len(rows) > 0:
rows = helper.list_to_camel_case(rows)
@ -1376,7 +1376,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
end_time=endTimestamp,
density=density,
neutral={"value": 0})),
"unit": schemas.TemplatePredefinedUnits.memory}
"unit": schemas.TemplatePredefinedUnits.MEMORY}
def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1409,7 +1409,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
end_time=endTimestamp,
density=density,
neutral={"value": 0})),
"unit": schemas.TemplatePredefinedUnits.percentage}
"unit": schemas.TemplatePredefinedUnits.PERCENTAGE}
def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1442,7 +1442,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
end_time=endTimestamp,
density=density,
neutral={"value": 0})),
"unit": schemas.TemplatePredefinedUnits.frame}
"unit": schemas.TemplatePredefinedUnits.FRAME}
def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -1511,7 +1511,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
density=density,
neutral={"value": 0}),
"browsers": browsers,
"unit": schemas.TemplatePredefinedUnits.count}
"unit": schemas.TemplatePredefinedUnits.COUNT}
return result
@ -1652,7 +1652,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
WHERE {" AND ".join(ch_sub_query)};"""
avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0
return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond}
return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.MILLISECOND}
def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
@ -2432,7 +2432,7 @@ def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(d
if len(rows) > 0:
previous = helper.dict_to_camel_case(rows[0])
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
return results
@ -2623,7 +2623,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_
end_time=endTimestamp,
density=density, neutral={"value": 0})
result["chart"] = rows
result["unit"] = schemas.TemplatePredefinedUnits.count
result["unit"] = schemas.TemplatePredefinedUnits.COUNT
return helper.dict_to_camel_case(result)
@ -2847,5 +2847,5 @@ def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
count = count[0]["count"]
results["progress"] = helper.__progress(old_val=count, new_val=results["value"])
results["unit"] = schemas.TemplatePredefinedUnits.count
results["unit"] = schemas.TemplatePredefinedUnits.COUNT
return results

View file

@ -72,10 +72,10 @@ def __transform_journey(rows, reverse_path=False):
JOURNEY_TYPES = {
schemas.ProductAnalyticsSelectedEventType.location: {"eventType": "LOCATION", "column": "url_path"},
schemas.ProductAnalyticsSelectedEventType.click: {"eventType": "CLICK", "column": "label"},
schemas.ProductAnalyticsSelectedEventType.input: {"eventType": "INPUT", "column": "label"},
schemas.ProductAnalyticsSelectedEventType.custom_event: {"eventType": "CUSTOM", "column": "name"}
schemas.ProductAnalyticsSelectedEventType.LOCATION: {"eventType": "LOCATION", "column": "url_path"},
schemas.ProductAnalyticsSelectedEventType.CLICK: {"eventType": "CLICK", "column": "label"},
schemas.ProductAnalyticsSelectedEventType.INPUT: {"eventType": "INPUT", "column": "label"},
schemas.ProductAnalyticsSelectedEventType.CUSTOM_EVENT: {"eventType": "CUSTOM", "column": "name"}
}
@ -92,9 +92,9 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
start_points_conditions = []
step_0_conditions = []
if len(data.metric_value) == 0:
data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.location)
sub_events.append({"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"],
"eventType": schemas.ProductAnalyticsSelectedEventType.location.value})
data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.LOCATION)
sub_events.append({"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.LOCATION]["column"],
"eventType": schemas.ProductAnalyticsSelectedEventType.LOCATION.value})
else:
for v in data.metric_value:
if JOURNEY_TYPES.get(v):
@ -161,49 +161,49 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
continue
# ---- meta-filters
if f.type == schemas.FilterType.user_browser:
if f.type == schemas.FilterType.USER_BROWSER:
if is_any:
sessions_conditions.append('isNotNull(user_browser)')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_os]:
elif f.type in [schemas.FilterType.USER_OS]:
if is_any:
sessions_conditions.append('isNotNull(user_os)')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_device]:
elif f.type in [schemas.FilterType.USER_DEVICE]:
if is_any:
sessions_conditions.append('isNotNull(user_device)')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_country]:
elif f.type in [schemas.FilterType.USER_COUNTRY]:
if is_any:
sessions_conditions.append('isNotNull(user_country)')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.user_city:
elif f.type == schemas.FilterType.USER_CITY:
if is_any:
sessions_conditions.append('isNotNull(user_city)')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.user_state:
elif f.type == schemas.FilterType.USER_STATE:
if is_any:
sessions_conditions.append('isNotNull(user_state)')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.utm_source]:
elif f.type in [schemas.FilterType.UTM_SOURCE]:
if is_any:
sessions_conditions.append('isNotNull(utm_source)')
elif is_undefined:
@ -213,7 +213,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f'utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.utm_medium]:
elif f.type in [schemas.FilterType.UTM_MEDIUM]:
if is_any:
sessions_conditions.append('isNotNull(utm_medium)')
elif is_undefined:
@ -223,7 +223,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f'utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.utm_campaign]:
elif f.type in [schemas.FilterType.UTM_CAMPAIGN]:
if is_any:
sessions_conditions.append('isNotNull(utm_campaign)')
elif is_undefined:
@ -233,14 +233,14 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f'utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.duration:
elif f.type == schemas.FilterType.DURATION:
if len(f.value) > 0 and f.value[0] is not None:
sessions_conditions.append("duration >= %(minDuration)s")
extra_values["minDuration"] = f.value[0]
if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0:
sessions_conditions.append("duration <= %(maxDuration)s")
extra_values["maxDuration"] = f.value[1]
elif f.type == schemas.FilterType.referrer:
elif f.type == schemas.FilterType.REFERRER:
# extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
if is_any:
sessions_conditions.append('isNotNull(base_referrer)')
@ -248,7 +248,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sessions_conditions.append(
sh.multi_conditions(f"base_referrer {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.metadata:
elif f.type == schemas.FilterType.METADATA:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
@ -264,7 +264,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
f"{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)",
f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]:
elif f.type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]:
if is_any:
sessions_conditions.append('isNotNull(user_id)')
elif is_undefined:
@ -274,8 +274,8 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f"user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_mobile]:
elif f.type in [schemas.FilterType.USER_ANONYMOUS_ID,
schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]:
if is_any:
sessions_conditions.append('isNotNull(user_anonymous_id)')
elif is_undefined:
@ -285,7 +285,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f"user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]:
elif f.type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]:
if is_any:
sessions_conditions.append('isNotNull(rev_id)')
elif is_undefined:
@ -294,13 +294,13 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sessions_conditions.append(
sh.multi_conditions(f"rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.platform:
elif f.type == schemas.FilterType.PLATFORM:
# op = __ sh.get_sql_operator(f.operator)
sessions_conditions.append(
sh.multi_conditions(f"user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.issue:
elif f.type == schemas.FilterType.ISSUE:
if is_any:
sessions_conditions.append("array_length(issue_types, 1) > 0")
else:
@ -308,7 +308,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sh.multi_conditions(f"has(issue_types,%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.events_count:
elif f.type == schemas.FilterType.EVENTS_COUNT:
sessions_conditions.append(
sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))

View file

@ -5,7 +5,7 @@ import schemas
from chalicelib.core import permissions
from chalicelib.utils.storage import StorageClient
SCOPES = SecurityScopes([schemas.Permissions.dev_tools])
SCOPES = SecurityScopes([schemas.Permissions.DEV_TOOLS])
def __get_devtools_keys(project_id, session_id):

View file

@ -58,23 +58,23 @@ SESSION_PROJECTION_COLS_CH_MAP = """\
def __get_sql_operator(op: schemas.SearchEventOperator):
return {
schemas.SearchEventOperator._is: "=",
schemas.SearchEventOperator._is_any: "IN",
schemas.SearchEventOperator._on: "=",
schemas.SearchEventOperator._on_any: "IN",
schemas.SearchEventOperator._is_not: "!=",
schemas.SearchEventOperator._not_on: "!=",
schemas.SearchEventOperator._contains: "ILIKE",
schemas.SearchEventOperator._not_contains: "NOT ILIKE",
schemas.SearchEventOperator._starts_with: "ILIKE",
schemas.SearchEventOperator._ends_with: "ILIKE",
schemas.SearchEventOperator.IS: "=",
schemas.SearchEventOperator.IS_ANY: "IN",
schemas.SearchEventOperator.ON: "=",
schemas.SearchEventOperator.ON_ANY: "IN",
schemas.SearchEventOperator.IS_NOT: "!=",
schemas.SearchEventOperator.NOT_ON: "!=",
schemas.SearchEventOperator.CONTAINS: "ILIKE",
schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE",
schemas.SearchEventOperator.STARTS_WITH: "ILIKE",
schemas.SearchEventOperator.ENDS_WITH: "ILIKE",
}.get(op, "=")
def __is_negation_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_not,
schemas.SearchEventOperator._not_on,
schemas.SearchEventOperator._not_contains]
return op in [schemas.SearchEventOperator.IS_NOT,
schemas.SearchEventOperator.NOT_ON,
schemas.SearchEventOperator.NOT_CONTAINS]
def __reverse_sql_operator(op):
@ -99,16 +99,16 @@ def _multiple_values(values, value_key="value"):
def _isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any]
return op in [schemas.SearchEventOperator.ON_ANY, schemas.SearchEventOperator.IS_ANY]
def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._is_undefined]
return op in [schemas.SearchEventOperator.IS_UNDEFINED]
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False,
error_status=schemas.ErrorStatus.ALL, count_only=False, issue=None, ids_only=False,
platform="web"):
if data.bookmarked:
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project_id, user_id)
@ -143,12 +143,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
elif data.group_by_user:
g_sort = "count(full_sessions)"
if data.order is None:
data.order = schemas.SortOrderType.desc.value
data.order = schemas.SortOrderType.DESC.value
else:
data.order = data.order
if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})"
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.DESC else 'MAX'}({sort})"
else:
sort = 'start_ts'
@ -179,7 +179,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
full_args)
else:
if data.order is None:
data.order = schemas.SortOrderType.desc.value
data.order = schemas.SortOrderType.DESC.value
else:
data.order = data.order
sort = 'session_id'
@ -251,25 +251,25 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density))
extra_event = None
if metric_of == schemas.MetricOfTable.visited_url:
if metric_of == schemas.MetricOfTable.VISITED_URL:
extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path
FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev
WHERE ev.datetime >= toDateTime(%(startDate)s / 1000)
AND ev.datetime <= toDateTime(%(endDate)s / 1000)
AND ev.project_id = %(project_id)s
AND ev.event_type = 'LOCATION'"""
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
elif metric_of == schemas.MetricOfTable.ISSUES and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.ISSUE,
operator=schemas.SearchEventOperator.IS))
full_args, query_part = search_query_parts_ch(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event)
full_args["step_size"] = step_size
sessions = []
with ch_client.ClickHouseClient() as cur:
if metric_type == schemas.MetricType.timeseries:
if view_type == schemas.MetricTimeseriesViewType.line_chart:
if metric_of == schemas.MetricOfTimeseries.session_count:
if metric_type == schemas.MetricType.TIMESERIES:
if view_type == schemas.MetricTimeseriesViewType.LINE_CHART:
if metric_of == schemas.MetricOfTimeseries.SESSION_COUNT:
query = f"""SELECT toUnixTimestamp(
toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second)
) * 1000 AS timestamp,
@ -279,7 +279,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
{query_part}) AS processed_sessions
GROUP BY timestamp
ORDER BY timestamp;"""
elif metric_of == schemas.MetricOfTimeseries.user_count:
elif metric_of == schemas.MetricOfTimeseries.USER_COUNT:
query = f"""SELECT toUnixTimestamp(
toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second)
) * 1000 AS timestamp,
@ -302,12 +302,12 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
logging.debug(main_query)
logging.debug("--------------------")
sessions = cur.execute(main_query)
if view_type == schemas.MetricTimeseriesViewType.line_chart:
if view_type == schemas.MetricTimeseriesViewType.LINE_CHART:
sessions = metrics.__complete_missing_steps(start_time=data.startTimestamp, end_time=data.endTimestamp,
density=density, neutral={"count": 0}, rows=sessions)
else:
sessions = sessions[0]["count"] if len(sessions) > 0 else 0
elif metric_type == schemas.MetricType.table:
elif metric_type == schemas.MetricType.TABLE:
full_args["limit_s"] = 0
full_args["limit_e"] = 200
if isinstance(metric_of, schemas.MetricOfTable):
@ -315,16 +315,16 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
extra_col = "s.user_id"
extra_where = ""
pre_query = ""
if metric_of == schemas.MetricOfTable.user_country:
if metric_of == schemas.MetricOfTable.USER_COUNTRY:
main_col = "user_country"
extra_col = "s.user_country"
elif metric_of == schemas.MetricOfTable.user_device:
elif metric_of == schemas.MetricOfTable.USER_DEVICE:
main_col = "user_device"
extra_col = "s.user_device"
elif metric_of == schemas.MetricOfTable.user_browser:
main_col = "user_browser"
extra_col = "s.user_browser"
elif metric_of == schemas.MetricOfTable.issues:
elif metric_of == schemas.MetricOfTable.ISSUES:
main_col = "issue"
extra_col = f"arrayJoin(s.issue_types) AS {main_col}"
if len(metric_value) > 0:
@ -334,7 +334,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
extra_where.append(f"{main_col} = %({arg_name})s")
full_args[arg_name] = metric_value[i]
extra_where = f"WHERE ({' OR '.join(extra_where)})"
elif metric_of == schemas.MetricOfTable.visited_url:
elif metric_of == schemas.MetricOfTable.VISITED_URL:
main_col = "url_path"
extra_col = "s.url_path"
main_query = cur.format(f"""{pre_query}
@ -373,7 +373,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
extra_event = None
extra_deduplication = []
extra_conditions = None
if metric_of == schemas.MetricOfTable.visited_url:
if metric_of == schemas.MetricOfTable.VISITED_URL:
extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path
FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev
WHERE ev.datetime >= toDateTime(%(startDate)s / 1000)
@ -383,7 +383,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
extra_deduplication.append("url_path")
extra_conditions = {}
for e in data.events:
if e.type == schemas.EventType.location:
if e.type == schemas.EventType.LOCATION:
if e.operator not in extra_conditions:
extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({
"type": e.type,
@ -397,9 +397,9 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
extra_conditions[e.operator].value.append(v)
extra_conditions = list(extra_conditions.values())
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
elif metric_of == schemas.MetricOfTable.ISSUES and len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.ISSUE,
operator=schemas.SearchEventOperator.IS))
full_args, query_part = search_query_parts_ch(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None, extra_event=extra_event,
@ -416,16 +416,16 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
main_col = "user_id"
extra_col = "s.user_id"
extra_where = ""
if metric_of == schemas.MetricOfTable.user_country:
if metric_of == schemas.MetricOfTable.USER_COUNTRY:
main_col = "user_country"
extra_col = "s.user_country"
elif metric_of == schemas.MetricOfTable.user_device:
elif metric_of == schemas.MetricOfTable.USER_DEVICE:
main_col = "user_device"
extra_col = "s.user_device"
elif metric_of == schemas.MetricOfTable.user_browser:
main_col = "user_browser"
extra_col = "s.user_browser"
elif metric_of == schemas.MetricOfTable.issues:
elif metric_of == schemas.MetricOfTable.ISSUES:
main_col = "issue"
extra_col = f"arrayJoin(s.issue_types) AS {main_col}"
if len(metric_value) > 0:
@ -435,11 +435,11 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
extra_where.append(f"{main_col} = %({arg_name})s")
full_args[arg_name] = metric_value[i]
extra_where = f"WHERE ({' OR '.join(extra_where)})"
elif metric_of == schemas.MetricOfTable.visited_url:
elif metric_of == schemas.MetricOfTable.VISITED_URL:
main_col = "url_path"
extra_col = "s.url_path"
if metric_format == schemas.MetricExtendedFormatType.session_count:
if metric_format == schemas.MetricExtendedFormatType.SESSION_COUNT:
main_query = f"""SELECT COUNT(DISTINCT {main_col}) OVER () AS main_count,
{main_col} AS name,
count(DISTINCT session_id) AS session_count,
@ -528,44 +528,44 @@ def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema,
def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details,
schemas.EventType.graphql] \
or event.type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,
schemas.PerformanceEventType.location_ttfb,
schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS,
schemas.EventType.GRAPHQL] \
or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME,
schemas.PerformanceEventType.LOCATION_TTFB,
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD,
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE
] and (event.source is None or len(event.source) == 0) \
or event.type in [schemas.EventType.request_details, schemas.EventType.graphql] and (
or event.type in [schemas.EventType.REQUEST_DETAILS, schemas.EventType.GRAPHQL] and (
event.filters is None or len(event.filters) == 0))
def __get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEventType], platform="web"):
defs = {
schemas.EventType.click: "CLICK",
schemas.EventType.input: "INPUT",
schemas.EventType.location: "LOCATION",
schemas.PerformanceEventType.location_dom_complete: "LOCATION",
schemas.PerformanceEventType.location_largest_contentful_paint_time: "LOCATION",
schemas.PerformanceEventType.location_ttfb: "LOCATION",
schemas.EventType.custom: "CUSTOM",
schemas.EventType.request: "REQUEST",
schemas.EventType.request_details: "REQUEST",
schemas.PerformanceEventType.fetch_failed: "REQUEST",
schemas.EventType.state_action: "STATEACTION",
schemas.EventType.error: "ERROR",
schemas.PerformanceEventType.location_avg_cpu_load: 'PERFORMANCE',
schemas.PerformanceEventType.location_avg_memory_usage: 'PERFORMANCE'
schemas.EventType.CLICK: "CLICK",
schemas.EventType.INPUT: "INPUT",
schemas.EventType.LOCATION: "LOCATION",
schemas.PerformanceEventType.LOCATION_DOM_COMPLETE: "LOCATION",
schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME: "LOCATION",
schemas.PerformanceEventType.LOCATION_TTFB: "LOCATION",
schemas.EventType.CUSTOM: "CUSTOM",
schemas.EventType.REQUEST: "REQUEST",
schemas.EventType.REQUEST_DETAILS: "REQUEST",
schemas.PerformanceEventType.FETCH_FAILED: "REQUEST",
schemas.EventType.STATE_ACTION: "STATEACTION",
schemas.EventType.ERROR: "ERROR",
schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD: 'PERFORMANCE',
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE: 'PERFORMANCE'
}
defs_mobile = {
schemas.EventType.click: "TAP",
schemas.EventType.input: "INPUT",
schemas.EventType.location: "VIEW",
schemas.EventType.custom: "CUSTOM",
schemas.EventType.request: "REQUEST",
schemas.EventType.request_details: "REQUEST",
schemas.PerformanceEventType.fetch_failed: "REQUEST",
schemas.EventType.error: "CRASH",
schemas.EventType.CLICK: "TAP",
schemas.EventType.INPUT: "INPUT",
schemas.EventType.LOCATION: "VIEW",
schemas.EventType.CUSTOM: "CUSTOM",
schemas.EventType.REQUEST: "REQUEST",
schemas.EventType.REQUEST_DETAILS: "REQUEST",
schemas.PerformanceEventType.FETCH_FAILED: "REQUEST",
schemas.EventType.ERROR: "CRASH",
}
if platform != "web" and event_type in defs_mobile:
return defs_mobile.get(event_type)
@ -581,7 +581,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if issue:
data.filters.append(
schemas.SessionSearchFilterSchema(value=[issue['type']],
type=schemas.FilterType.issue.value,
type=schemas.FilterType.ISSUE.value,
operator='is')
)
ss_constraints = []
@ -622,7 +622,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
f_k = f"f_value{i}"
full_args = {**full_args, f_k: f.value, **_multiple_values(f.value, value_key=f_k)}
op = __get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.events_count] else f.operator.value
if filter_type not in [schemas.FilterType.EVENTS_COUNT] else f.operator.value
is_any = _isAny_opreator(f.operator)
is_undefined = _isUndefined_operator(f.operator)
if not is_any and not is_undefined and len(f.value) == 0:
@ -630,7 +630,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
is_not = False
if __is_negation_operator(f.operator):
is_not = True
if filter_type == schemas.FilterType.user_browser:
if filter_type == schemas.FilterType.USER_BROWSER:
if is_any:
extra_constraints.append('isNotNull(s.user_browser)')
ss_constraints.append('isNotNull(ms.user_browser)')
@ -640,7 +640,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_mobile]:
elif filter_type in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE]:
if is_any:
extra_constraints.append('isNotNull(s.user_os)')
ss_constraints.append('isNotNull(ms.user_os)')
@ -650,7 +650,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_mobile]:
elif filter_type in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE]:
if is_any:
extra_constraints.append('isNotNull(s.user_device)')
ss_constraints.append('isNotNull(ms.user_device)')
@ -660,7 +660,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_mobile]:
elif filter_type in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE]:
if is_any:
extra_constraints.append('isNotNull(s.user_country)')
ss_constraints.append('isNotNull(ms.user_country)')
@ -670,7 +670,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in schemas.FilterType.user_city:
elif filter_type in schemas.FilterType.USER_CITY:
if is_any:
extra_constraints.append('isNotNull(s.user_city)')
ss_constraints.append('isNotNull(ms.user_city)')
@ -680,7 +680,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f'ms.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in schemas.FilterType.user_state:
elif filter_type in schemas.FilterType.USER_STATE:
if is_any:
extra_constraints.append('isNotNull(s.user_state)')
ss_constraints.append('isNotNull(ms.user_state)')
@ -690,7 +690,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f'ms.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.utm_source]:
elif filter_type in [schemas.FilterType.UTM_SOURCE]:
if is_any:
extra_constraints.append('isNotNull(s.utm_source)')
ss_constraints.append('isNotNull(ms.utm_source)')
@ -704,7 +704,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f'ms.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_medium]:
elif filter_type in [schemas.FilterType.UTM_MEDIUM]:
if is_any:
extra_constraints.append('isNotNull(s.utm_medium)')
ss_constraints.append('isNotNull(ms.utm_medium)')
@ -718,7 +718,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f'ms.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_campaign]:
elif filter_type in [schemas.FilterType.UTM_CAMPAIGN]:
if is_any:
extra_constraints.append('isNotNull(s.utm_campaign)')
ss_constraints.append('isNotNull(ms.utm_campaign)')
@ -733,7 +733,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
_multiple_conditions(f'ms.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.duration:
elif filter_type == schemas.FilterType.DURATION:
if len(f.value) > 0 and f.value[0] is not None:
extra_constraints.append("s.duration >= %(minDuration)s")
ss_constraints.append("ms.duration >= %(minDuration)s")
@ -742,7 +742,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
extra_constraints.append("s.duration <= %(maxDuration)s")
ss_constraints.append("ms.duration <= %(maxDuration)s")
full_args["maxDuration"] = f.value[1]
elif filter_type == schemas.FilterType.referrer:
elif filter_type == schemas.FilterType.REFERRER:
if is_any:
extra_constraints.append('isNotNull(s.base_referrer)')
ss_constraints.append('isNotNull(ms.base_referrer)')
@ -774,7 +774,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
_multiple_conditions(
f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)",
f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]:
elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]:
if is_any:
extra_constraints.append('isNotNull(s.user_id)')
ss_constraints.append('isNotNull(ms.user_id)')
@ -788,8 +788,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f"ms.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_mobile]:
elif filter_type in [schemas.FilterType.USER_ANONYMOUS_ID,
schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]:
if is_any:
extra_constraints.append('isNotNull(s.user_anonymous_id)')
ss_constraints.append('isNotNull(ms.user_anonymous_id)')
@ -803,7 +803,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f"ms.user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]:
elif filter_type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]:
if is_any:
extra_constraints.append('isNotNull(s.rev_id)')
ss_constraints.append('isNotNull(ms.rev_id)')
@ -817,7 +817,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f"ms.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.platform:
elif filter_type == schemas.FilterType.PLATFORM:
# op = __get_sql_operator(f.operator)
extra_constraints.append(
_multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
@ -825,7 +825,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(
_multiple_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.issue:
elif filter_type == schemas.FilterType.ISSUE:
if is_any:
extra_constraints.append("notEmpty(s.issue_types)")
ss_constraints.append("notEmpty(ms.issue_types)")
@ -842,7 +842,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if is_not:
extra_constraints[-1] = f"not({extra_constraints[-1]})"
ss_constraints[-1] = f"not({ss_constraints[-1]})"
elif filter_type == schemas.FilterType.events_count:
elif filter_type == schemas.FilterType.EVENTS_COUNT:
extra_constraints.append(
_multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
@ -871,7 +871,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions = []
events_conditions_not = []
event_index = 0
or_events = data.events_order == schemas.SearchEventOrder._or
or_events = data.events_order == schemas.SearchEventOrder.OR
for i, event in enumerate(data.events):
event_type = event.type
is_any = _isAny_opreator(event.operator)
@ -915,7 +915,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event_where.append(f"main.event_type='{__get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
if not is_any:
if event.operator == schemas.ClickEventExtraOperator._on_selector:
if event.operator == schemas.ClickEventExtraOperator.SELECTOR_IS:
event_where.append(
_multiple_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
@ -1201,7 +1201,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == schemas.PerformanceEventType.fetch_failed:
elif event_type == schemas.PerformanceEventType.FETCH_FAILED:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
_column = 'url_path'
event_where.append(f"main.event_type='{__get_event_type(event_type, platform=platform)}'")
@ -1239,9 +1239,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
# _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
# event.source, value_key=e_k))
# TODO: isNot for PerformanceEvent
elif event_type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,
schemas.PerformanceEventType.location_ttfb]:
elif event_type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE,
schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME,
schemas.PerformanceEventType.LOCATION_TTFB]:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
event_where.append(f"main.event_type='{__get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -1263,8 +1263,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"].append(event_where[-1])
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
# TODO: isNot for PerformanceEvent
elif event_type in [schemas.PerformanceEventType.location_avg_cpu_load,
schemas.PerformanceEventType.location_avg_memory_usage]:
elif event_type in [schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD,
schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE]:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
event_where.append(f"main.event_type='{__get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -1337,7 +1337,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
# event.source, value_key=e_k)
# event_index += 1
# TODO: no isNot for RequestDetails
elif event_type == schemas.EventType.request_details:
elif event_type == schemas.EventType.REQUEST_DETAILS:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
event_where.append(f"main.event_type='{__get_event_type(event_type, platform=platform)}'")
events_conditions.append({"type": event_where[-1]})
@ -1351,35 +1351,35 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
op = __get_sql_operator(f.operator)
e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType._url:
if f.type == schemas.FetchFilterType.FETCH_URL:
event_where.append(
_multiple_conditions(f"main.url_path {op} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType._status_code:
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
event_where.append(
_multiple_conditions(f"main.status {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType._method:
elif f.type == schemas.FetchFilterType.FETCH_METHOD:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType._duration:
elif f.type == schemas.FetchFilterType.FETCH_DURATION:
event_where.append(
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType._request_body:
elif f.type == schemas.FetchFilterType.FETCH_REQUEST_BODY:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType._response_body:
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
@ -1391,7 +1391,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
else:
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
# TODO: no isNot for GraphQL
elif event_type == schemas.EventType.graphql:
elif event_type == schemas.EventType.GRAPHQL:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
event_where.append(f"main.event_type='GRAPHQL'")
events_conditions.append({"type": event_where[-1]})
@ -1404,20 +1404,20 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
op = __get_sql_operator(f.operator)
e_k_f = e_k + f"_graphql{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType._name:
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
event_where.append(
_multiple_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType._method:
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType._request_body:
elif f.type == schemas.GraphqlFilterType.GRAPHQL_REQUEST_BODY:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType._response_body:
elif f.type == schemas.GraphqlFilterType.GRAPHQL_RESPONSE_BODY:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
@ -1454,7 +1454,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
) AS event_{event_index} {"ON(TRUE)" if event_index > 0 else ""}\
""")
else:
if data.events_order == schemas.SearchEventOrder._then:
if data.events_order == schemas.SearchEventOrder.THEN:
pass
else:
events_query_from.append(f"""\
@ -1466,10 +1466,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
""")
event_index += 1
# limit THEN-events to 7 in CH because sequenceMatch cannot take more arguments
if event_index == 7 and data.events_order == schemas.SearchEventOrder._then:
if event_index == 7 and data.events_order == schemas.SearchEventOrder.THEN:
break
if event_index < 2:
data.events_order = schemas.SearchEventOrder._or
data.events_order = schemas.SearchEventOrder.OR
if len(events_extra_join) > 0:
if event_index < 2:
events_extra_join = f"INNER JOIN ({events_extra_join}) AS main1 USING(error_id)"
@ -1480,7 +1480,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
FROM {exp_ch_helper.get_user_favorite_sessions_table()} AS user_favorite_sessions
WHERE user_id = %(userId)s)""")
if data.events_order in [schemas.SearchEventOrder._then, schemas.SearchEventOrder._and]:
if data.events_order in [schemas.SearchEventOrder.THEN, schemas.SearchEventOrder.AND]:
sequence_pattern = [f'(?{i + 1}){c.get("time", "")}' for i, c in enumerate(events_conditions)]
sub_join = ""
type_conditions = []
@ -1525,7 +1525,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
del _value_conditions_not
del value_conditions_not
if data.events_order == schemas.SearchEventOrder._then:
if data.events_order == schemas.SearchEventOrder.THEN:
having = f"""HAVING sequenceMatch('{''.join(sequence_pattern)}')(main.datetime,{','.join(sequence_conditions)})"""
else:
having = f"""HAVING {" AND ".join([f"countIf({c})>0" for c in list(set(sequence_conditions))])}"""
@ -1706,8 +1706,8 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
available_keys = metadata.get_keys_by_projects(project_ids)
for i in available_keys:
available_keys[i]["user_id"] = schemas.FilterType.user_id
available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id
available_keys[i]["user_id"] = schemas.FilterType.USER_ID
available_keys[i]["user_anonymous_id"] = schemas.FilterType.USER_ANONYMOUS_ID
results = {}
for i in project_ids:
if m_key not in available_keys[i].values():

View file

@ -158,7 +158,7 @@ def query_requests_by_period(project_id, start_time, end_time, filters: Optional
for n in names_:
if n is None:
continue
data_ = {'category': schemas.InsightCategories.network, 'name': n,
data_ = {'category': schemas.InsightCategories.NETWORK, 'name': n,
'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True}
for n_, v in ratio:
if n == n_:
@ -258,7 +258,7 @@ def query_most_errors_by_period(project_id, start_time, end_time,
for n in names_:
if n is None:
continue
data_ = {'category': schemas.InsightCategories.errors, 'name': n,
data_ = {'category': schemas.InsightCategories.ERRORS, 'name': n,
'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True}
for n_, v in ratio:
if n == n_:
@ -338,7 +338,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time,
output = list()
if cpu_oldvalue is not None or cpu_newvalue is not None:
output.append({'category': schemas.InsightCategories.resources,
output.append({'category': schemas.InsightCategories.RESOURCES,
'name': 'cpu',
'value': cpu_newvalue,
'oldValue': cpu_oldvalue,
@ -346,7 +346,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time,
cpu_newvalue - cpu_oldvalue) / cpu_oldvalue if cpu_ratio is not None else cpu_ratio,
'isNew': True if cpu_newvalue is not None and cpu_oldvalue is None else False})
if mem_oldvalue is not None or mem_newvalue is not None:
output.append({'category': schemas.InsightCategories.resources,
output.append({'category': schemas.InsightCategories.RESOURCES,
'name': 'memory',
'value': mem_newvalue,
'oldValue': mem_oldvalue,
@ -423,7 +423,7 @@ def query_click_rage_by_period(project_id, start_time, end_time,
for n in names_:
if n is None:
continue
data_ = {'category': schemas.InsightCategories.rage, 'name': n,
data_ = {'category': schemas.InsightCategories.RAGE, 'name': n,
'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True}
for n_, v in ratio:
if n == n_:
@ -452,16 +452,16 @@ def fetch_selected(project_id, data: schemas.GetInsightsSchema):
if len(data.series) > 0:
filters = data.series[0].filter
if schemas.InsightCategories.errors in data.metricValue:
if schemas.InsightCategories.ERRORS in data.metricValue:
output += query_most_errors_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters)
if schemas.InsightCategories.network in data.metricValue:
if schemas.InsightCategories.NETWORK in data.metricValue:
output += query_requests_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters)
if schemas.InsightCategories.rage in data.metricValue:
if schemas.InsightCategories.RAGE in data.metricValue:
output += query_click_rage_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters)
if schemas.InsightCategories.resources in data.metricValue:
if schemas.InsightCategories.RESOURCES in data.metricValue:
output += query_cpu_memory_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters)
return output

View file

@ -8,65 +8,65 @@ else:
from . import autocomplete as autocomplete
SUPPORTED_TYPES = {
schemas.FilterType.user_os: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os)),
schemas.FilterType.user_browser: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser)),
schemas.FilterType.user_device: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device)),
schemas.FilterType.user_country: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country)),
schemas.FilterType.user_city: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_city),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_city)),
schemas.FilterType.user_state: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_state),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_state)),
schemas.FilterType.user_id: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id)),
schemas.FilterType.user_anonymous_id: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id)),
schemas.FilterType.rev_id: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id)),
schemas.FilterType.referrer: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer)),
schemas.FilterType.utm_campaign: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign)),
schemas.FilterType.utm_medium: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium)),
schemas.FilterType.utm_source: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source)),
schemas.FilterType.USER_OS: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS)),
schemas.FilterType.USER_BROWSER: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_BROWSER),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_BROWSER)),
schemas.FilterType.USER_DEVICE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE)),
schemas.FilterType.USER_COUNTRY: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY)),
schemas.FilterType.USER_CITY: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_CITY),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_CITY)),
schemas.FilterType.USER_STATE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_STATE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_STATE)),
schemas.FilterType.USER_ID: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID)),
schemas.FilterType.USER_ANONYMOUS_ID: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID)),
schemas.FilterType.REV_ID: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID)),
schemas.FilterType.REFERRER: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REFERRER),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REFERRER)),
schemas.FilterType.UTM_CAMPAIGN: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_CAMPAIGN),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_CAMPAIGN)),
schemas.FilterType.UTM_MEDIUM: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_MEDIUM),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_MEDIUM)),
schemas.FilterType.UTM_SOURCE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE)),
# MOBILE
schemas.FilterType.user_os_mobile: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_mobile)),
schemas.FilterType.user_device_mobile: SupportedFilter(
schemas.FilterType.USER_OS_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE)),
schemas.FilterType.USER_DEVICE_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(
typename=schemas.FilterType.user_device_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device_mobile)),
schemas.FilterType.user_country_mobile: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_mobile)),
schemas.FilterType.user_id_mobile: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_mobile)),
schemas.FilterType.user_anonymous_id_mobile: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_mobile)),
schemas.FilterType.rev_id_mobile: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_mobile),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_mobile)),
typename=schemas.FilterType.USER_DEVICE_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE_MOBILE)),
schemas.FilterType.USER_COUNTRY_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY_MOBILE)),
schemas.FilterType.USER_ID_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID_MOBILE)),
schemas.FilterType.USER_ANONYMOUS_ID_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID_MOBILE)),
schemas.FilterType.REV_ID_MOBILE: SupportedFilter(
get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID_MOBILE),
query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID_MOBILE)),
}

View file

@ -177,7 +177,7 @@ def get_all(tenant_id, data: schemas.TrailSearchPayloadSchema):
conditions.append("users.name ILIKE %(query)s")
conditions.append("users.tenant_id = %(tenant_id)s")
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
op=schemas.SearchEventOperator.CONTAINS)
cur.execute(
cur.mogrify(
f"""SELECT COUNT(*) AS count,

View file

@ -114,7 +114,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
def exists_by_name(tenant_id: int, name: str, exclude_id: Optional[int],
webhook_type: str = schemas.WebhookType.webhook) -> bool:
webhook_type: str = schemas.WebhookType.WEBHOOK) -> bool:
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT EXISTS(SELECT 1
FROM public.webhooks

View file

@ -73,7 +73,6 @@ rm -rf ./chalicelib/utils/sql_helper.py
rm -rf ./chalicelib/utils/storage/generators.py
rm -rf ./chalicelib/utils/storage/interface.py
rm -rf ./chalicelib/utils/storage/s3.py
rm -rf ./chalicelib/utils/strings.py
rm -rf ./chalicelib/utils/TimeUTC.py
rm -rf ./crons/__init__.py
rm -rf ./crons/core_crons.py

View file

@ -235,7 +235,7 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
# for backward compatibility
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"],
dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.SESSION_REPLAY)])
def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
if not sessionId.isnumeric():
@ -255,7 +255,7 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
@app.post('/{projectId}/sessions/search', tags=["sessions"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id,
@ -264,7 +264,7 @@ def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema =
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True,
@ -273,7 +273,7 @@ def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema
@app.get('/{projectId}/sessions/{sessionId}/first-mob', tags=["sessions", "replay"],
dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.SESSION_REPLAY)])
def get_first_mob_file(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
if not sessionId.isnumeric():
@ -289,7 +289,7 @@ def get_first_mob_file(projectId: int, sessionId: Union[int, str], background_ta
@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"],
dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.SESSION_REPLAY)])
def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
if not sessionId.isnumeric():
@ -309,7 +309,7 @@ def get_session_events(projectId: int, sessionId: Union[int, str], background_ta
@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"],
dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.SESSION_REPLAY)])
def get_session_events(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
if not sessionId.isnumeric():
@ -326,7 +326,7 @@ def get_session_events(projectId: int, sessionId: Union[int, str],
@app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"],
dependencies=[OR_scope(Permissions.dev_tools)])
dependencies=[OR_scope(Permissions.DEV_TOOLS)])
def get_error_trace(projectId: int, sessionId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_trace(project_id=projectId, error_id=errorId)
@ -337,7 +337,7 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
}
@app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
@app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.DEV_TOOLS)])
def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
@ -348,7 +348,7 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun
return data
@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.DEV_TOOLS)])
def errors_get_details_sourcemaps(projectId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_trace(project_id=projectId, error_id=errorId)
@ -359,7 +359,7 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str,
}
@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], dependencies=[OR_scope(Permissions.dev_tools)])
@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], dependencies=[OR_scope(Permissions.DEV_TOOLS)])
def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(),
context: schemas.CurrentContext = Depends(OR_context)):
@ -378,7 +378,7 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa
@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"],
dependencies=[OR_scope(Permissions.assist_live, ServicePermissions.assist_live)])
dependencies=[OR_scope(Permissions.ASSIST_LIVE, ServicePermissions.ASSIST_LIVE)])
def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
@ -394,8 +394,8 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"],
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay,
ServicePermissions.assist_live, ServicePermissions.session_replay)])
dependencies=[OR_scope(Permissions.ASSIST_LIVE, Permissions.SESSION_REPLAY,
ServicePermissions.ASSIST_LIVE, ServicePermissions.SESSION_REPLAY)])
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Replay file not found"]}
@ -417,9 +417,9 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"],
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools,
ServicePermissions.assist_live, ServicePermissions.session_replay,
ServicePermissions.dev_tools)])
dependencies=[OR_scope(Permissions.ASSIST_LIVE, Permissions.SESSION_REPLAY, Permissions.DEV_TOOLS,
ServicePermissions.ASSIST_LIVE, ServicePermissions.SESSION_REPLAY,
ServicePermissions.DEV_TOOLS)])
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Devtools file not found"]}
@ -440,14 +440,14 @@ def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
return FileResponse(path=path, media_type="application/octet-stream")
@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.session_replay)])
@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatMapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
@app.post('/{projectId}/sessions/{sessionId}/heatmaps', tags=["heatmaps"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def get_heatmaps_by_session_id_url(projectId: int, sessionId: int,
data: schemas.GetHeatMapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
@ -455,7 +455,7 @@ def get_heatmaps_by_session_id_url(projectId: int, sessionId: int,
@app.post('/{projectId}/sessions/{sessionId}/clickmaps', tags=["heatmaps"],
dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.SESSION_REPLAY)])
def get_clickmaps_by_session_id_url(projectId: int, sessionId: int,
data: schemas.GetClickMapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
@ -463,14 +463,14 @@ def get_clickmaps_by_session_id_url(projectId: int, sessionId: int,
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def add_remove_favorite_session2(projectId: int, sessionId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)
@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId,
tenant_id=context.tenant_id,
@ -483,7 +483,7 @@ def assign_session(projectId: int, sessionId, context: schemas.CurrentContext =
@app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def assign_session(projectId: int, sessionId: int, issueId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId,
@ -496,7 +496,7 @@ def assign_session(projectId: int, sessionId: int, issueId: str,
@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def comment_assignment(projectId: int, sessionId: int, issueId: str,
data: schemas.CommentAssignmentSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
@ -511,7 +511,7 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str,
@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
@ -526,7 +526,7 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id)
@ -538,7 +538,7 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo
@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
@ -551,14 +551,14 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.delete(project_id=projectId, note_id=noteId)
return data
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
@ -572,7 +572,7 @@ def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
note_id=noteId, webhook_id=webhookId)
@app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)])
@app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
@ -583,7 +583,7 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
@app.post('/{project_id}/feature-flags/search', tags=["feature flags"],
dependencies=[OR_scope(Permissions.feature_flags)])
dependencies=[OR_scope(Permissions.FEATURE_FLAGS)])
def search_feature_flags(project_id: int,
data: schemas.SearchFlagsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
@ -591,19 +591,19 @@ def search_feature_flags(project_id: int,
@app.get('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"],
dependencies=[OR_scope(Permissions.feature_flags)])
dependencies=[OR_scope(Permissions.FEATURE_FLAGS)])
def get_feature_flag(project_id: int, feature_flag_id: int):
return feature_flags.get_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)
@app.post('/{project_id}/feature-flags', tags=["feature flags"], dependencies=[OR_scope(Permissions.feature_flags)])
@app.post('/{project_id}/feature-flags', tags=["feature flags"], dependencies=[OR_scope(Permissions.FEATURE_FLAGS)])
def add_feature_flag(project_id: int, data: schemas.FeatureFlagSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return feature_flags.create_feature_flag(project_id=project_id, user_id=context.user_id, feature_flag_data=data)
@app.put('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"],
dependencies=[OR_scope(Permissions.feature_flags)])
dependencies=[OR_scope(Permissions.FEATURE_FLAGS)])
def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.FeatureFlagSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return feature_flags.update_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id,
@ -611,13 +611,13 @@ def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.Fea
@app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"],
dependencies=[OR_scope(Permissions.feature_flags)])
dependencies=[OR_scope(Permissions.FEATURE_FLAGS)])
def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)):
return {"data": feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)}
@app.post('/{project_id}/feature-flags/{feature_flag_id}/status', tags=["feature flags"],
dependencies=[OR_scope(Permissions.feature_flags)])
dependencies=[OR_scope(Permissions.FEATURE_FLAGS)])
def update_feature_flag_status(project_id: int, feature_flag_id: int,
data: schemas.FeatureFlagStatus = Body(...)):
return {"data": feature_flags.update_feature_flag_status(project_id=project_id, feature_flag_id=feature_flag_id,

View file

@ -5,7 +5,7 @@ from chalicelib.core import product_analytics
from or_dependencies import OR_scope
from routers.base import get_routers
public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.metrics)])
public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.METRICS)])
@app.post('/{projectId}/insights/journey', tags=["insights"])

View file

@ -7,7 +7,7 @@ from chalicelib.core import dashboards, custom_metrics, funnels
from or_dependencies import OR_context, OR_scope
from routers.base import get_routers
public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.metrics)])
public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.METRICS)])
@app.post('/{projectId}/dashboards', tags=["dashboard"])

View file

@ -9,20 +9,20 @@ from .transformers_validators import remove_whitespace, remove_duplicate_values
class Permissions(str, Enum):
session_replay = "SESSION_REPLAY"
dev_tools = "DEV_TOOLS"
SESSION_REPLAY = "SESSION_REPLAY"
DEV_TOOLS = "DEV_TOOLS"
# errors = "ERRORS"
metrics = "METRICS"
assist_live = "ASSIST_LIVE"
assist_call = "ASSIST_CALL"
feature_flags = "FEATURE_FLAGS"
METRICS = "METRICS"
ASSIST_LIVE = "ASSIST_LIVE"
ASSIST_CALL = "ASSIST_CALL"
FEATURE_FLAGS = "FEATURE_FLAGS"
class ServicePermissions(str, Enum):
session_replay = "SERVICE_SESSION_REPLAY"
dev_tools = "SERVICE_DEV_TOOLS"
assist_live = "SERVICE_ASSIST_LIVE"
assist_call = "SERVICE_ASSIST_CALL"
SESSION_REPLAY = "SERVICE_SESSION_REPLAY"
DEV_TOOLS = "SERVICE_DEV_TOOLS"
ASSIST_LIVE = "SERVICE_ASSIST_LIVE"
ASSIST_CALL = "SERVICE_ASSIST_CALL"
class CurrentContext(schemas.CurrentContext):
@ -58,10 +58,10 @@ class SignalsSchema(BaseModel):
class InsightCategories(str, Enum):
errors = "errors"
network = "network"
rage = "rage"
resources = "resources"
ERRORS = "errors"
NETWORK = "network"
RAGE = "rage"
RESOURCES = "resources"
class GetInsightsSchema(schemas._TimedSchema):
@ -89,12 +89,12 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
user_id: Optional[int] = Field(default=None)
query: Optional[str] = Field(default=None)
action: Optional[str] = Field(default=None)
order: schemas.SortOrderType = Field(default=schemas.SortOrderType.desc)
order: schemas.SortOrderType = Field(default=schemas.SortOrderType.DESC)
@model_validator(mode="before")
def transform_order(cls, values):
if values.get("order") is None:
values["order"] = schemas.SortOrderType.desc
values["order"] = schemas.SortOrderType.DESC
else:
values["order"] = values["order"].upper()
return values
@ -146,7 +146,6 @@ class AssistRecordSearchPayloadSchema(schemas._PaginatedSchema, schemas._TimedSc
query: Optional[str] = Field(default=None)
order: Literal["asc", "desc"] = Field(default="desc")
# TODO: move these to schema when Insights is supported on PG
class CardInsights(schemas.CardInsights):
metric_value: List[InsightCategories] = Field(default=[])

View file

@ -0,0 +1,80 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.19.0-ee';
DROP TABLE IF EXISTS experimental.events_l7d_mv;
ALTER TABLE experimental.events
MODIFY COLUMN IF EXISTS normalized_x Nullable(Float32),
MODIFY COLUMN IF EXISTS normalized_y Nullable(Float32);
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv
ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 7 DAY
POPULATE
AS
SELECT session_id,
project_id,
event_type,
datetime,
label,
hesitation_time,
name,
payload,
level,
source,
message,
error_id,
duration,
context,
url,
url_host,
url_path,
url_hostpath,
request_start,
response_start,
response_end,
dom_content_loaded_event_start,
dom_content_loaded_event_end,
load_event_start,
load_event_end,
first_paint,
first_contentful_paint_time,
speed_index,
visually_complete,
time_to_interactive,
ttfb,
ttlb,
response_time,
dom_building_time,
dom_content_loaded_event_time,
load_event_time,
min_fps,
avg_fps,
max_fps,
min_cpu,
avg_cpu,
max_cpu,
min_total_js_heap_size,
avg_total_js_heap_size,
max_total_js_heap_size,
min_used_js_heap_size,
avg_used_js_heap_size,
max_used_js_heap_size,
method,
status,
success,
request_body,
response_body,
issue_type,
issue_id,
error_tags_keys,
error_tags_values,
transfer_size,
selector,
normalized_x,
normalized_y,
message_id,
_timestamp
FROM experimental.events
WHERE datetime >= now() - INTERVAL 7 DAY;

View file

@ -0,0 +1,31 @@
\set previous_version 'v1.19.0-ee'
\set next_version 'v1.20.0-ee'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
--
ALTER TABLE IF EXISTS events.clicks
ALTER COLUMN normalized_x SET DATA TYPE decimal,
ALTER COLUMN normalized_y SET DATA TYPE decimal;
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -0,0 +1,31 @@
\set previous_version 'v1.19.0'
\set next_version 'v1.20.0'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
--
ALTER TABLE IF EXISTS events.clicks
ALTER COLUMN normalized_x SET DATA TYPE decimal,
ALTER COLUMN normalized_y SET DATA TYPE decimal;
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif