* feat(chalice): autocomplete return top 10 with stats

* fix(chalice): fixed autocomplete top 10 meta-filters

* fix(chalice): multiple migration fixes
refactor(chalice): refactored ch-sessions code
This commit is contained in:
Kraiem Taha Yassine 2024-12-19 18:21:30 +01:00 committed by GitHub
parent c004bc8932
commit f7ddf82591
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 270 additions and 272 deletions

View file

@ -1,3 +1,4 @@
import logging
from os import access, R_OK
from os.path import exists as path_exists, getsize
@ -10,6 +11,8 @@ import schemas
from chalicelib.core import projects
from chalicelib.utils.TimeUTC import TimeUTC
logger = logging.getLogger(__name__)
ASSIST_KEY = config("ASSIST_KEY")
ASSIST_URL = config("ASSIST_URL") % ASSIST_KEY
@ -52,21 +55,21 @@ def __get_live_sessions_ws(project_id, data):
results = requests.post(ASSIST_URL + config("assist") + f"/{project_key}",
json=data, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for __get_live_sessions_ws")
print(results.text)
logger.error(f"!! issue with the peer-server code:{results.status_code} for __get_live_sessions_ws")
logger.error(results.text)
return {"total": 0, "sessions": []}
live_peers = results.json().get("data", [])
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
logger.error("!! Timeout getting Assist response")
live_peers = {"total": 0, "sessions": []}
except Exception as e:
print("!! Issue getting Live-Assist response")
print(str(e))
print("expected JSON, received:")
logger.error("!! Issue getting Live-Assist response")
logger.exception(e)
logger.error("expected JSON, received:")
try:
print(results.text)
logger.error(results.text)
except:
print("couldn't get response")
logger.error("couldn't get response")
live_peers = {"total": 0, "sessions": []}
_live_peers = live_peers
if "sessions" in live_peers:
@ -102,8 +105,8 @@ def get_live_session_by_id(project_id, session_id):
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for get_live_session_by_id")
print(results.text)
logger.error(f"!! issue with the peer-server code:{results.status_code} for get_live_session_by_id")
logger.error(results.text)
return None
results = results.json().get("data")
if results is None:
@ -111,16 +114,16 @@ def get_live_session_by_id(project_id, session_id):
results["live"] = True
results["agentToken"] = __get_agent_token(project_id=project_id, project_key=project_key, session_id=session_id)
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
logger.error("!! Timeout getting Assist response")
return None
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
print("expected JSON, received:")
logger.error("!! Issue getting Assist response")
logger.exception(e)
logger.error("expected JSON, received:")
try:
print(results.text)
logger.error(results.text)
except:
print("couldn't get response")
logger.error("couldn't get response")
return None
return results
@ -132,21 +135,21 @@ def is_live(project_id, session_id, project_key=None):
results = requests.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for is_live")
print(results.text)
logger.error(f"!! issue with the peer-server code:{results.status_code} for is_live")
logger.error(results.text)
return False
results = results.json().get("data")
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
logger.error("!! Timeout getting Assist response")
return False
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
print("expected JSON, received:")
logger.error("!! Issue getting Assist response")
logger.exception(e)
logger.error("expected JSON, received:")
try:
print(results.text)
logger.error(results.text)
except:
print("couldn't get response")
logger.error("couldn't get response")
return False
return str(session_id) == results
@ -161,21 +164,21 @@ def autocomplete(project_id, q: str, key: str = None):
ASSIST_URL + config("assistList") + f"/{project_key}/autocomplete",
params=params, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for autocomplete")
print(results.text)
logger.error(f"!! issue with the peer-server code:{results.status_code} for autocomplete")
logger.error(results.text)
return {"errors": [f"Something went wrong wile calling assist:{results.text}"]}
results = results.json().get("data", [])
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
logger.error("!! Timeout getting Assist response")
return {"errors": ["Assist request timeout"]}
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
print("expected JSON, received:")
logger.error("!! Issue getting Assist response")
logger.exception(e)
logger.error("expected JSON, received:")
try:
print(results.text)
logger.error(results.text)
except:
print("couldn't get response")
logger.error("couldn't get response")
return {"errors": ["Something went wrong wile calling assist"]}
for r in results:
r["type"] = __change_keys(r["type"])
@ -239,24 +242,24 @@ def session_exists(project_id, session_id):
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code} for session_exists")
print(results.text)
logger.error(f"!! issue with the peer-server code:{results.status_code} for session_exists")
logger.error(results.text)
return None
results = results.json().get("data")
if results is None:
return False
return True
except requests.exceptions.Timeout:
print("!! Timeout getting Assist response")
logger.error("!! Timeout getting Assist response")
return False
except Exception as e:
print("!! Issue getting Assist response")
print(str(e))
print("expected JSON, received:")
logger.error("!! Issue getting Assist response")
logger.exception(e)
logger.error("expected JSON, received:")
try:
print(results.text)
logger.error(results.text)
except:
print("couldn't get response")
logger.error("couldn't get response")
return False

View file

@ -59,10 +59,10 @@ def __get_autocomplete_table(value, project_id):
"c_list": tuple(c_list)}
results = []
try:
results = cur.execute(query=query, params=params)
results = cur.execute(query=query, parameters=params)
except Exception as err:
logger.exception("--------- CH AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
logger.exception(cur.format(query=query, params=params))
logger.exception(cur.format(query=query, parameters=params))
logger.exception("--------- PARAMS -----------")
logger.exception(params)
logger.exception("--------- VALUE -----------")
@ -119,7 +119,7 @@ def __generic_autocomplete(event: Event):
query = __generic_query(event.ui_type, value_length=len(value))
params = {"project_id": project_id, "value": helper.string_to_sql_like(value),
"svalue": helper.string_to_sql_like("^" + value)}
results = cur.execute(query=query, params=params)
results = cur.execute(query=query, parameters=params)
return helper.list_to_camel_case(results)
return f
@ -137,7 +137,7 @@ def __generic_autocomplete_metas(typename):
return []
query = __generic_query(typename, value_length=len(text))
rows = cur.execute(query=query, params=params)
rows = cur.execute(query=query, parameters=params)
return rows
return f
@ -335,5 +335,5 @@ def get_top_values(project_id, event_type, event_key=None):
SELECT c_value AS value, row_count, truncate(row_count * 100 / total_count,2) AS row_percentage
FROM raw;"""
params = {"project_id": project_id}
results = cur.execute(query=query, params=params)
results = cur.execute(query=query, parameters=params)
return helper.list_to_camel_case(results)

View file

@ -1,12 +1,11 @@
from decouple import config
import schemas
from . import errors as errors_legacy
from chalicelib.core import metrics, metadata
from chalicelib.core import metadata
from chalicelib.core import sessions
from chalicelib.core.metrics import metrics
from chalicelib.utils import ch_client, exp_ch_helper
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from . import errors as errors_legacy
def _multiple_values(values, value_key="value"):
@ -290,7 +289,7 @@ def get_details(project_id, error_id, user_id, **data):
# print("--------------------")
# print(ch.format(main_ch_query, params))
# print("--------------------")
row = ch.execute(query=main_ch_query, params=params)
row = ch.execute(query=main_ch_query, parameters=params)
if len(row) == 0:
return {"errors": ["error not found"]}
row = row[0]
@ -309,7 +308,7 @@ def get_details(project_id, error_id, user_id, **data):
# print("--------------------")
# print(ch.format(query, params))
# print("--------------------")
status = ch.execute(query=query, params=params)
status = ch.execute(query=query, parameters=params)
if status is not None:
status = status[0]
@ -650,7 +649,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
# print(ch.format(main_ch_query, params))
# print("------------")
rows = ch.execute(query=main_ch_query, params=params)
rows = ch.execute(query=main_ch_query, parameters=params)
total = rows[0]["total"] if len(rows) > 0 else 0
for r in rows:

View file

@ -1,6 +1,10 @@
import logging
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
logger = logging.getLogger(__name__)
class Actions:
@ -150,23 +154,23 @@ def get_scheduled_jobs():
def execute_jobs():
jobs = get_scheduled_jobs()
for job in jobs:
print(f"Executing jobId:{job['jobId']}")
logger.info(f"Executing jobId:{job['jobId']}")
try:
if job["action"] == Actions.DELETE_USER_DATA:
session_ids = __get_session_ids_by_user_ids(project_id=job["projectId"],
user_ids=[job["referenceId"]])
if len(session_ids) > 0:
print(f"Deleting {len(session_ids)} sessions")
logger.info(f"Deleting {len(session_ids)} sessions")
__delete_sessions_by_session_ids(session_ids=session_ids)
__delete_session_mobs_by_session_ids(session_ids=session_ids, project_id=job["projectId"])
else:
raise Exception(f"The action '{job['action']}' not supported.")
job["status"] = JobStatus.COMPLETED
print(f"Job completed {job['jobId']}")
logger.info(f"Job completed {job['jobId']}")
except Exception as e:
job["status"] = JobStatus.FAILED
job["errors"] = str(e)
print(f"Job failed {job['jobId']}")
logger.error(f"Job failed {job['jobId']}")
update(job["jobId"], job)

View file

@ -4,7 +4,8 @@ import logging
from fastapi import HTTPException, status
import schemas
from chalicelib.core import errors, issues
from chalicelib.core import issues
from chalicelib.core.errors import errors
from chalicelib.core.metrics import heatmaps, product_analytics, funnels, custom_metrics_predefined
from chalicelib.core.sessions import sessions
from chalicelib.utils import helper, pg_client

View file

@ -1,6 +1,7 @@
import json
from typing import Optional, List
import logging
from collections import Counter
from typing import Optional, List
from fastapi import HTTPException, status
@ -9,6 +10,8 @@ from chalicelib.core import users
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
logger = logging.getLogger(__name__)
def __exists_by_name(name: str, exclude_id: Optional[int]) -> bool:
with pg_client.PostgresClient() as cur:
@ -410,7 +413,7 @@ def update_project_conditions(project_id, conditions):
create_project_conditions(project_id, to_be_created)
if to_be_updated:
print(to_be_updated)
logger.debug(to_be_updated)
update_project_condition(project_id, to_be_updated)
return get_conditions(project_id)

View file

@ -468,8 +468,6 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
if len(data.filters) > 0:
meta_keys = None
for i, f in enumerate(data.filters):
if not isinstance(f.value, list):
f.value = [f.value]
filter_type = f.type
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"

View file

@ -59,31 +59,6 @@ SESSION_PROJECTION_COLS_CH_MAP = """\
"""
def _multiple_conditions(condition, values, value_key="value", is_not=False):
query = []
for i in range(len(values)):
k = f"{value_key}_{i}"
query.append(condition.replace(value_key, k))
return "(" + (" AND " if is_not else " OR ").join(query) + ")"
def _multiple_values(values, value_key="value"):
query_values = {}
if values is not None and isinstance(values, list):
for i in range(len(values)):
k = f"{value_key}_{i}"
query_values[k] = values[i]
return query_values
def _isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator.ON_ANY, schemas.SearchEventOperator.IS_ANY]
def _isUndefined_operator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator.IS_UNDEFINED]
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.ALL, count_only=False, issue=None, ids_only=False,
@ -508,7 +483,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
data.filters.append(
schemas.SessionSearchFilterSchema(value=[issue['type']],
type=schemas.FilterType.ISSUE.value,
operator='is')
operator=schemas.SearchEventOperator.IS.value)
)
ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
@ -541,16 +516,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
# to reduce include a sub-query of sessions inside events query, in order to reduce the selected data
include_in_events = False
for i, f in enumerate(data.filters):
if not isinstance(f.value, list):
f.value = [f.value]
filter_type = f.type
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"
full_args = {**full_args, f_k: f.value, **_multiple_values(f.value, value_key=f_k)}
full_args = {**full_args, f_k: f.value, **sh.multi_values(f.value, value_key=f_k)}
op = sh.get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.EVENTS_COUNT] else f.operator.value
is_any = _isAny_opreator(f.operator)
is_undefined = _isUndefined_operator(f.operator)
is_any = sh.isAny_opreator(f.operator)
is_undefined = sh.isUndefined_operator(f.operator)
if not is_any and not is_undefined and len(f.value) == 0:
continue
is_not = False
@ -562,9 +535,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_browser)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE]:
if is_any:
@ -572,9 +546,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_os)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE]:
if is_any:
@ -582,9 +556,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_device)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE]:
if is_any:
@ -592,9 +566,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_country)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in schemas.FilterType.USER_CITY:
if is_any:
@ -602,9 +577,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_city)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in schemas.FilterType.USER_STATE:
if is_any:
@ -612,9 +587,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.user_state)')
else:
extra_constraints.append(
_multiple_conditions(f's.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f's.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
sh.multi_conditions(f'ms.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.UTM_SOURCE]:
if is_any:
@ -625,11 +600,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.utm_source)')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.UTM_MEDIUM]:
if is_any:
extra_constraints.append('isNotNull(s.utm_medium)')
@ -639,11 +614,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.utm_medium')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.UTM_CAMPAIGN]:
if is_any:
extra_constraints.append('isNotNull(s.utm_campaign)')
@ -653,11 +628,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.utm_campaign)')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f's.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f'ms.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f'ms.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.DURATION:
if len(f.value) > 0 and f.value[0] is not None:
@ -674,11 +649,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNotNull(ms.base_referrer)')
else:
extra_constraints.append(
_multiple_conditions(f"s.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.base_referrer {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == events.EventType.METADATA.ui_type:
# get metadata list only if you need it
if meta_keys is None:
@ -693,11 +668,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append(f"isNull(ms.{metadata.index_to_colname(meta_keys[f.source])})")
else:
extra_constraints.append(
_multiple_conditions(
sh.multi_conditions(
f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)",
f.value, is_not=is_not, value_key=f_k))
ss_constraints.append(
_multiple_conditions(
sh.multi_conditions(
f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)",
f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]:
@ -709,11 +684,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.user_id)')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.USER_ANONYMOUS_ID,
schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]:
if is_any:
@ -724,11 +699,11 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.user_anonymous_id)')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]:
if is_any:
extra_constraints.append('isNotNull(s.rev_id)')
@ -738,19 +713,19 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
ss_constraints.append('isNull(ms.rev_id)')
else:
extra_constraints.append(
_multiple_conditions(f"s.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.PLATFORM:
# op = sh.get_sql_operator(f.operator)
extra_constraints.append(
_multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.ISSUE:
if is_any:
extra_constraints.append("notEmpty(s.issue_types)")
@ -760,21 +735,21 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
issues.append(f)
extra_constraints.append(f"hasAny(s.issue_types,%({f_k})s)")
# _multiple_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
# sh.multi_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
# value_key=f_k))
ss_constraints.append(f"hasAny(ms.issue_types,%({f_k})s)")
# _multiple_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
# sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
# value_key=f_k))
if is_not:
extra_constraints[-1] = f"not({extra_constraints[-1]})"
ss_constraints[-1] = f"not({ss_constraints[-1]})"
elif filter_type == schemas.FilterType.EVENTS_COUNT:
extra_constraints.append(
_multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
_multiple_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
sh.multi_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
else:
continue
include_in_events = True
@ -788,7 +763,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if len(data.events) > 0:
valid_events_count = 0
for event in data.events:
is_any = _isAny_opreator(event.operator)
is_any = sh.isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if __is_valid_event(is_any=is_any, event=event):
@ -800,7 +775,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
or_events = data.events_order == schemas.SearchEventOrder.OR
for i, event in enumerate(data.events):
event_type = event.type
is_any = _isAny_opreator(event.operator)
is_any = sh.isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
if not __is_valid_event(is_any=is_any, event=event):
@ -822,8 +797,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
event.value = helper.values_for_operator(value=event.value, op=event.operator)
full_args = {**full_args,
**_multiple_values(event.value, value_key=e_k),
**_multiple_values(event.source, value_key=s_k)}
**sh.multi_values(event.value, value_key=e_k),
**sh.multi_values(event.source, value_key=s_k)}
if event_type == events.EventType.CLICK.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
@ -835,19 +810,19 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if not is_any:
if schemas.ClickEventExtraOperator.has_value(event.operator):
event_where.append(
_multiple_conditions(f"main.selector {op} %({e_k})s", event.value, value_key=e_k))
sh.multi_conditions(f"main.selector {op} %({e_k})s", event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
else:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
else:
_column = events.EventType.CLICK_MOBILE.column
@ -856,15 +831,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.INPUT.ui_type:
@ -876,20 +851,20 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
if event.source is not None and len(event.source) > 0:
event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")}
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
else:
_column = events.EventType.INPUT_MOBILE.column
event_where.append(
@ -897,15 +872,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.LOCATION.ui_type:
@ -917,15 +892,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
else:
_column = events.EventType.VIEW_MOBILE.column
@ -934,15 +909,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{
"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CUSTOM.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
@ -951,14 +926,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.REQUEST.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
@ -967,14 +942,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.STATEACTION.ui_type:
@ -984,14 +959,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
# TODO: isNot for ERROR
elif event_type == events.EventType.ERROR.ui_type:
@ -1003,12 +978,12 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = []
if not is_any and event.value not in [None, "*", ""]:
event_where.append(
_multiple_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
sh.multi_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_extra_join += f" AND {event_where[-1]}"
if len(event.source) > 0 and event.source[0] not in [None, "*", ""]:
event_where.append(_multiple_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
event_where.append(sh.multi_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_extra_join += f" AND {event_where[-1]}"
@ -1021,14 +996,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.INPUT_MOBILE.ui_type:
_column = events.EventType.INPUT_MOBILE.column
@ -1036,14 +1011,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.VIEW_MOBILE.ui_type:
_column = events.EventType.VIEW_MOBILE.column
@ -1051,14 +1026,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CUSTOM_MOBILE.ui_type:
_column = events.EventType.CUSTOM_MOBILE.column
@ -1066,14 +1041,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.REQUEST_MOBILE.ui_type:
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
@ -1082,14 +1057,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.CRASH_MOBILE.ui_type:
_column = events.EventType.CRASH_MOBILE.column
@ -1097,14 +1072,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == events.EventType.SWIPE_MOBILE.ui_type and platform != "web":
_column = events.EventType.SWIPE_MOBILE.column
@ -1112,14 +1087,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"] = event_where[-1]
elif event_type == schemas.PerformanceEventType.FETCH_FAILED:
@ -1130,14 +1105,14 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions[-1]["condition"] = []
if not is_any:
if is_not:
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
event_where.append(sh.multi_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
value_key=e_k))
events_conditions_not.append(
{"type": f"sub.event_type='{exp_ch_helper.get_event_type(event_type, platform=platform)}'"})
events_conditions_not[-1]["condition"] = event_where[-1]
else:
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
event_where.append(sh.multi_conditions(f"main.{_column} {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
col = performance_event.get_col(event_type)
colname = col["column"]
@ -1157,15 +1132,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
tname = "main"
if not is_any:
event_where.append(
_multiple_conditions(f"main.url_path {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.url_path {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
e_k += "_custom"
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(f"isNotNull({tname}.{colname}) AND {tname}.{colname}>0 AND " +
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
# TODO: isNot for PerformanceEvent
@ -1180,15 +1155,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
tname = "main"
if not is_any:
event_where.append(
_multiple_conditions(f"main.url_path {op} %({e_k})s",
event.value, value_key=e_k))
sh.multi_conditions(f"main.url_path {op} %({e_k})s",
event.value, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
e_k += "_custom"
full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(f"isNotNull({tname}.{colname}) AND {tname}.{colname}>0 AND " +
_multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
event.source, value_key=e_k))
events_conditions[-1]["condition"].append(event_where[-1])
events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"])
@ -1199,44 +1174,44 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
apply = False
events_conditions[-1]["condition"] = []
for j, f in enumerate(event.filters):
is_any = _isAny_opreator(f.operator)
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_fetch{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType.FETCH_URL:
event_where.append(
_multiple_conditions(f"main.url_path {op} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.url_path {op} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE:
event_where.append(
_multiple_conditions(f"main.status {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.status {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType.FETCH_METHOD:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType.FETCH_DURATION:
event_where.append(
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.duration {f.operator} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType.FETCH_REQUEST_BODY:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
apply = True
else:
@ -1252,29 +1227,29 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
events_conditions.append({"type": event_where[-1]})
events_conditions[-1]["condition"] = []
for j, f in enumerate(event.filters):
is_any = _isAny_opreator(f.operator)
is_any = sh.isAny_opreator(f.operator)
if is_any or len(f.value) == 0:
continue
f.value = helper.values_for_operator(value=f.value, op=f.operator)
op = sh.get_sql_operator(f.operator)
e_k_f = e_k + f"_graphql{j}"
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)}
if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME:
event_where.append(
_multiple_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD:
event_where.append(
_multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType.GRAPHQL_REQUEST_BODY:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
elif f.type == schemas.GraphqlFilterType.GRAPHQL_RESPONSE_BODY:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
events_conditions[-1]["condition"].append(event_where[-1])
else:
logging.warning(f"undefined GRAPHQL filter: {f.type}")
@ -1464,9 +1439,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
for i, f in enumerate(issues):
f_k_v = f"f_issue_v{i}"
f_k_s = f_k_v + "_source"
full_args = {**full_args, **_multiple_values(f.value, value_key=f_k_v), f_k_s: f.source}
issues_conditions.append(_multiple_conditions(f"issues.type=%({f_k_v})s", f.value,
value_key=f_k_v))
full_args = {**full_args, **sh.multi_values(f.value, value_key=f_k_v), f_k_s: f.source}
issues_conditions.append(sh.multi_conditions(f"issues.type=%({f_k_v})s", f.value,
value_key=f_k_v))
issues_conditions[-1] = f"({issues_conditions[-1]} AND issues.context_string=%({f_k_s})s)"
extra_join = f"""INNER JOIN (SELECT DISTINCT events.session_id
FROM experimental.issues
@ -1483,17 +1458,17 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
if extra_conditions and len(extra_conditions) > 0:
_extra_or_condition = []
for i, c in enumerate(extra_conditions):
if _isAny_opreator(c.operator):
if sh.isAny_opreator(c.operator):
continue
e_k = f"ec_value{i}"
op = sh.get_sql_operator(c.operator)
c.value = helper.values_for_operator(value=c.value, op=c.operator)
full_args = {**full_args,
**_multiple_values(c.value, value_key=e_k)}
**sh.multi_values(c.value, value_key=e_k)}
if c.type == events.EventType.LOCATION.ui_type:
_extra_or_condition.append(
_multiple_conditions(f"extra_event.url_path {op} %({e_k})s",
c.value, value_key=e_k))
sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s",
c.value, value_key=e_k))
else:
logging.warning(f"unsupported extra_event type:${c.type}")
if len(_extra_or_condition) > 0:

View file

@ -1,7 +1,11 @@
import logging
from chalicelib.utils import pg_client, helper, email_helper, smtp
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import get_issue_title
logger = logging.getLogger(__name__)
LOWEST_BAR_VALUE = 3
@ -30,7 +34,7 @@ def edit_config(user_id, weekly_report):
def cron():
if not smtp.has_smtp():
print("!!! No SMTP configuration found, ignoring weekly report")
logger.info("!!! No SMTP configuration found, ignoring weekly report")
return
_now = TimeUTC.now()
with pg_client.PostgresClient(unlimited_query=True) as cur:
@ -88,17 +92,17 @@ def cron():
) AS month_1_issues ON (TRUE);"""), params)
projects_data = cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report query: {_now2 - _now} ms")
logger.debug(f">> Weekly report query: {_now2 - _now} ms")
_now = _now2
emails_to_send = []
for p in projects_data:
params["project_id"] = p["project_id"]
print(f"checking {p['project_name']} : {p['project_id']}")
logger.debug(f"checking {p['project_name']} : {p['project_id']}")
if len(p["emails"]) == 0 \
or p["this_week_issues_count"] + p["past_week_issues_count"] + p["past_month_issues_count"] == 0:
print('ignore')
logger.debug('ignore')
continue
print("valid")
logger.debug("valid")
p["past_week_issues_evolution"] = helper.__decimal_limit(
helper.__progress(p["this_week_issues_count"], p["past_week_issues_count"]), 1)
p["past_month_issues_evolution"] = helper.__decimal_limit(
@ -121,7 +125,7 @@ def cron():
ORDER BY timestamp_i;""", params))
days_partition = cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
logger.debug(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
max_days_partition = max(x['issues_count'] for x in days_partition)
for d in days_partition:
@ -140,7 +144,7 @@ def cron():
LIMIT 4;""", params))
issues_by_type = cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
logger.debug(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
max_issues_by_type = sum(i["count"] for i in issues_by_type)
for i in issues_by_type:
@ -172,7 +176,7 @@ def cron():
ORDER BY timestamp_i;""", params))
issues_breakdown_by_day = cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
logger.debug(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
for i in issues_breakdown_by_day:
i["sum"] = sum(x["count"] for x in i["partition"])
@ -221,7 +225,7 @@ def cron():
ORDER BY issue_count DESC;""", params))
issues_breakdown_list = cur.fetchall()
_now2 = TimeUTC.now()
print(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
logger.debug(f">> Weekly report s-query-1: {_now2 - _now} ms project_id: {p['project_id']}")
_now = _now2
if len(issues_breakdown_list) > 4:
others = {"type": "Others",
@ -255,6 +259,6 @@ def cron():
"issues_breakdown_by_day": issues_breakdown_by_day,
"issues_breakdown_list": issues_breakdown_list
}})
print(f">>> Sending weekly report to {len(emails_to_send)} email-group")
logger.info(f">>> Sending weekly report to {len(emails_to_send)} email-group")
for e in emails_to_send:
email_helper.weekly_report2(recipients=e["email"], data=e["data"])

View file

@ -16,6 +16,16 @@ def get_main_events_table(timestamp=0, platform="web"):
def get_main_sessions_table(timestamp=0):
return "experimental.sessions"
def get_user_favorite_sessions_table(timestamp=0):
return "experimental.user_favorite_sessions"
def get_user_viewed_sessions_table(timestamp=0):
return "experimental.user_viewed_sessions"
def get_user_viewed_errors_table(timestamp=0):
return "experimental.user_viewed_errors"
def get_main_js_errors_sessions_table(timestamp=0):

View file

@ -1,5 +1,5 @@
from typing import Union
from enum import Enum
import schemas
@ -49,7 +49,7 @@ def multi_values(values, value_key="value"):
if values is not None and isinstance(values, list):
for i in range(len(values)):
k = f"{value_key}_{i}"
query_values[k] = values[i]
query_values[k] = values[i].value if isinstance(values[i], Enum) else values[i]
return query_values

View file

@ -8,8 +8,9 @@ from chalicelib.core import sourcemaps, events, projects, alerts, issues, \
metadata, reset_password, \
log_tools, sessions, announcements, \
weekly_report, assist, mobile, tenants, boarding, \
notifications, webhook, users, saved_search, tags, autocomplete
notifications, webhook, users, saved_search, tags
from chalicelib.core.metrics import custom_metrics
from chalicelib.core.autocomplete import autocomplete
from chalicelib.core.issue_tracking import github, integrations_global, integrations_manager, \
jira_cloud
from chalicelib.core.log_tools import datadog, newrelic, stackdriver, elasticsearch, \