refactor(chalice): refactored sessions search

refactor(chalice): refactored notes
fix(chalice): fixed imports
This commit is contained in:
Taha Yassine Kraiem 2025-04-23 17:09:08 +01:00 committed by Kraiem Taha Yassine
parent 22d71ceb14
commit bbdde7be81
6 changed files with 109 additions and 311 deletions

View file

@ -6,6 +6,7 @@ from decouple import config
import schemas
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
from chalicelib.core.collaborations.collaboration_slack import Slack
from chalicelib.core.modules import TENANT_CONDITION
from chalicelib.utils import pg_client, helper
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
@ -16,12 +17,13 @@ logger = logging.getLogger(__name__)
def get_note(tenant_id, project_id, user_id, note_id, share=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
{",(SELECT name FROM users WHERE user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
{f",(SELECT name FROM users WHERE {TENANT_CONDITION} AND user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
FROM sessions_notes INNER JOIN users USING (user_id)
WHERE sessions_notes.project_id = %(project_id)s
AND sessions_notes.note_id = %(note_id)s
AND sessions_notes.deleted_at IS NULL
AND (sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public);""",
AND (sessions_notes.user_id = %(user_id)s
OR sessions_notes.is_public AND {TENANT_CONDITION});""",
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
"note_id": note_id, "share": share})
@ -42,7 +44,7 @@ def get_session_notes(tenant_id, project_id, session_id, user_id):
AND sessions_notes.deleted_at IS NULL
AND sessions_notes.session_id = %(session_id)s
AND (sessions_notes.user_id = %(user_id)s
OR sessions_notes.is_public)
OR sessions_notes.is_public AND {TENANT_CONDITION})
ORDER BY created_at DESC;""",
{"project_id": project_id, "user_id": user_id,
"tenant_id": tenant_id, "session_id": session_id})
@ -60,7 +62,8 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
# base conditions
conditions = [
"sessions_notes.project_id = %(project_id)s",
"sessions_notes.deleted_at IS NULL"
"sessions_notes.deleted_at IS NULL",
TENANT_CONDITION
]
params = {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}
@ -125,9 +128,10 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public, thumbnail, start_at, end_at)
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s, %(thumbnail)s, %(start_at)s, %(end_at)s)
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND {TENANT_CONDITION}) AS user_name;""",
{"user_id": user_id, "project_id": project_id, "session_id": session_id,
**data.model_dump()})
**data.model_dump(),
"tenant_id": tenant_id})
cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone())
if result:
@ -147,6 +151,7 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
sub_query.append("timestamp = %(timestamp)s")
sub_query.append("updated_at = timezone('utc'::text, now())")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""UPDATE public.sessions_notes
@ -157,14 +162,14 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
AND user_id = %(user_id)s
AND note_id = %(note_id)s
AND deleted_at ISNULL
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump()})
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND {TENANT_CONDITION}) AS user_name;""",
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump(),
"tenant_id": tenant_id})
)
row = helper.dict_to_camel_case(cur.fetchone())
if row:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return row
return {"errors": ["Note not found"]}
return row
def delete(project_id, note_id):

15
ee/api/.gitignore vendored
View file

@ -211,21 +211,8 @@ Pipfile.lock
/chalicelib/core/metadata.py
/chalicelib/core/mobile.py
/chalicelib/core/saved_search.py
/chalicelib/core/sessions/sessions_pg.py
/chalicelib/core/sessions/sessions_ch.py
/chalicelib/core/sessions/sessions_devtool/sessions_devtool.py
/chalicelib/core/sessions/sessions_favorite/sessions_favorite.py
/chalicelib/core/sessions/sessions_assignments.py
/chalicelib/core/sessions/sessions_metas.py
/chalicelib/core/sessions/sessions_mobs.py
/chalicelib/core/sessions/sessions_replay.py
/chalicelib/core/sessions/sessions_search.py
/chalicelib/core/sessions/performance_event.py
/chalicelib/core/sessions/*.py
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
/chalicelib/core/sessions/unprocessed_sessions.py
/chalicelib/core/sessions/__init__.py
/chalicelib/core/sessions/sessions_legacy_mobil.py
/chalicelib/core/sessions/sessions_search_exp.py
/chalicelib/core/metrics/modules
/chalicelib/core/socket_ios.py
/chalicelib/core/sourcemaps

View file

@ -1,273 +0,0 @@
import logging
from urllib.parse import urljoin
from decouple import config
import schemas
from chalicelib.core.collaborations.collaboration_msteams import MSTeams
from chalicelib.core.collaborations.collaboration_slack import Slack
from chalicelib.utils import pg_client, helper
from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
logger = logging.getLogger(__name__)
def get_note(tenant_id, project_id, user_id, note_id, share=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
{",(SELECT name FROM users WHERE tenant_id=%(tenant_id)s AND user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
FROM sessions_notes INNER JOIN users USING (user_id)
WHERE sessions_notes.project_id = %(project_id)s
AND sessions_notes.note_id = %(note_id)s
AND sessions_notes.deleted_at IS NULL
AND (sessions_notes.user_id = %(user_id)s
OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s);""",
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
"note_id": note_id, "share": share})
cur.execute(query=query)
row = cur.fetchone()
row = helper.dict_to_camel_case(row)
if row:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
row["updatedAt"] = TimeUTC.datetime_to_timestamp(row["updatedAt"])
return row
def get_session_notes(tenant_id, project_id, session_id, user_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
FROM sessions_notes INNER JOIN users USING (user_id)
WHERE sessions_notes.project_id = %(project_id)s
AND sessions_notes.deleted_at IS NULL
AND sessions_notes.session_id = %(session_id)s
AND (sessions_notes.user_id = %(user_id)s
OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)
ORDER BY created_at DESC;""",
{"project_id": project_id, "user_id": user_id,
"tenant_id": tenant_id, "session_id": session_id})
cur.execute(query=query)
rows = cur.fetchall()
rows = helper.list_to_camel_case(rows)
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return rows
def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema):
with pg_client.PostgresClient() as cur:
# base conditions
conditions = [
"sessions_notes.project_id = %(project_id)s",
"sessions_notes.deleted_at IS NULL",
"users.tenant_id = %(tenant_id)s"
]
params = {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}
# tag conditions
if data.tags:
tag_key = "tag_value"
conditions.append(
sh.multi_conditions(f"%({tag_key})s = sessions_notes.tag", data.tags, value_key=tag_key)
)
params.update(sh.multi_values(data.tags, value_key=tag_key))
# filter by ownership or shared status
if data.shared_only:
conditions.append("sessions_notes.is_public IS TRUE")
elif data.mine_only:
conditions.append("sessions_notes.user_id = %(user_id)s")
else:
conditions.append("(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)")
# search condition
if data.search:
conditions.append("sessions_notes.message ILIKE %(search)s")
params["search"] = f"%{data.search}%"
query = f"""
SELECT
COUNT(1) OVER () AS full_count,
sessions_notes.*,
users.name AS user_name
FROM
sessions_notes
INNER JOIN
users USING (user_id)
WHERE
{" AND ".join(conditions)}
ORDER BY
created_at {data.order}
LIMIT
%(limit)s OFFSET %(offset)s;
"""
params.update({
"limit": data.limit,
"offset": data.limit * (data.page - 1)
})
query = cur.mogrify(query, params)
logger.debug(query)
cur.execute(query)
rows = cur.fetchall()
result = {"count": 0, "notes": helper.list_to_camel_case(rows)}
if rows:
result["count"] = rows[0]["fullCount"]
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
row.pop("fullCount")
return result
def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public, thumbnail, start_at, end_at)
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s, %(thumbnail)s, %(start_at)s, %(end_at)s)
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""",
{"user_id": user_id, "project_id": project_id, "session_id": session_id,
**data.model_dump(),
"tenant_id": tenant_id})
cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone())
if result:
result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"])
return result
def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema):
sub_query = []
if data.message is not None:
sub_query.append("message = %(message)s")
if data.tag is not None and len(data.tag) > 0:
sub_query.append("tag = %(tag)s")
if data.is_public is not None:
sub_query.append("is_public = %(is_public)s")
if data.timestamp is not None:
sub_query.append("timestamp = %(timestamp)s")
sub_query.append("updated_at = timezone('utc'::text, now())")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""UPDATE public.sessions_notes
SET
{" ,".join(sub_query)}
WHERE
project_id = %(project_id)s
AND user_id = %(user_id)s
AND note_id = %(note_id)s
AND deleted_at ISNULL
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""",
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump(),
"tenant_id": tenant_id})
)
row = helper.dict_to_camel_case(cur.fetchone())
if row:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return row
def delete(project_id, note_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(""" UPDATE public.sessions_notes
SET deleted_at = timezone('utc'::text, now())
WHERE note_id = %(note_id)s
AND project_id = %(project_id)s
AND deleted_at ISNULL;""",
{"project_id": project_id, "note_id": note_id})
)
return {"data": {"state": "success"}}
def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None:
return {"errors": ["Note not found"]}
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
if note["timestamp"] > 0:
session_url += f"&jumpto={note['timestamp']}"
title = f"<{session_url}|Note for session {note['sessionId']}>"
blocks = [{"type": "section",
"fields": [{"type": "mrkdwn",
"text": title}]},
{"type": "section",
"fields": [{"type": "plain_text",
"text": note["message"]}]}]
if note["tag"]:
blocks.append({"type": "context",
"elements": [{"type": "plain_text",
"text": f"Tag: *{note['tag']}*"}]})
bottom = f"Created by {note['userName'].capitalize()}"
if user_id != note["userId"]:
bottom += f"\nSent by {note['shareName']}: "
blocks.append({"type": "context",
"elements": [{"type": "plain_text",
"text": bottom}]})
return Slack.send_raw(
tenant_id=tenant_id,
webhook_id=webhook_id,
body={"blocks": blocks}
)
def share_to_msteams(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None:
return {"errors": ["Note not found"]}
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
if note["timestamp"] > 0:
session_url += f"&jumpto={note['timestamp']}"
title = f"[Note for session {note['sessionId']}]({session_url})"
blocks = [{
"type": "TextBlock",
"text": title,
"style": "heading",
"size": "Large"
},
{
"type": "TextBlock",
"spacing": "Small",
"text": note["message"]
}
]
if note["tag"]:
blocks.append({"type": "TextBlock",
"spacing": "Small",
"text": f"Tag: *{note['tag']}*",
"size": "Small"})
bottom = f"Created by {note['userName'].capitalize()}"
if user_id != note["userId"]:
bottom += f"\nSent by {note['shareName']}: "
blocks.append({"type": "TextBlock",
"spacing": "Default",
"text": bottom,
"size": "Small",
"fontType": "Monospace"})
return MSTeams.send_raw(
tenant_id=tenant_id,
webhook_id=webhook_id,
body={"type": "message",
"attachments": [
{"contentType": "application/vnd.microsoft.card.adaptive",
"contentUrl": None,
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.5",
"body": [{
"type": "ColumnSet",
"style": "emphasis",
"separator": True,
"bleed": True,
"columns": [{"width": "stretch",
"items": blocks,
"type": "Column"}]
}]}}
]})

View file

@ -80,3 +80,94 @@ def get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEvent
if event_type not in defs:
raise Exception(f"unsupported EventType:{event_type}")
return defs.get(event_type)
# AI generated
def simplify_clickhouse_type(ch_type: str) -> str:
"""
Simplify a ClickHouse data type name to a broader category like:
int, float, decimal, datetime, string, uuid, enum, array, tuple, map, nested, etc.
"""
# 1) Strip out common wrappers like Nullable(...) or LowCardinality(...)
# Possibly multiple wrappers: e.g. "LowCardinality(Nullable(Int32))"
pattern_wrappers = re.compile(r'(Nullable|LowCardinality)\((.*)\)')
while True:
match = pattern_wrappers.match(ch_type)
if match:
ch_type = match.group(2)
else:
break
# 2) Normalize (lowercase) for easier checks
normalized_type = ch_type.lower()
# 3) Use pattern matching or direct checks for known categories
# (You can adapt this as you see fit for your environment.)
# Integers: Int8, Int16, Int32, Int64, Int128, Int256, UInt8, UInt16, ...
if re.match(r'^(u?int)(8|16|32|64|128|256)$', normalized_type):
return "int"
# Floats: Float32, Float64
if re.match(r'^float(32|64)$', normalized_type):
return "float"
# Decimal: Decimal(P, S)
if normalized_type.startswith("decimal"):
return "decimal"
# Date/DateTime
if normalized_type.startswith("date"):
return "datetime"
if normalized_type.startswith("datetime"):
return "datetime"
# Strings: String, FixedString(N)
if normalized_type.startswith("string"):
return "string"
if normalized_type.startswith("fixedstring"):
return "string"
# UUID
if normalized_type.startswith("uuid"):
return "uuid"
# Enums: Enum8(...) or Enum16(...)
if normalized_type.startswith("enum8") or normalized_type.startswith("enum16"):
return "enum"
# Arrays: Array(T)
if normalized_type.startswith("array"):
return "array"
# Tuples: Tuple(T1, T2, ...)
if normalized_type.startswith("tuple"):
return "tuple"
# Map(K, V)
if normalized_type.startswith("map"):
return "map"
# Nested(...)
if normalized_type.startswith("nested"):
return "nested"
# If we didn't match above, just return the original type in lowercase
return normalized_type
def simplify_clickhouse_types(ch_types: list[str]) -> list[str]:
"""
Takes a list of ClickHouse types and returns a list of simplified types
by calling `simplify_clickhouse_type` on each.
"""
return list(set([simplify_clickhouse_type(t) for t in ch_types]))
def get_sub_condition(col_name: str, val_name: str,
operator: Union[schemas.SearchEventOperator, schemas.MathOperator]):
if operator == SearchEventOperator.PATTERN:
return f"match({col_name}, %({val_name})s)"
op = sh.get_sql_operator(operator)
return f"{col_name} {op} %({val_name})s"

View file

@ -32,21 +32,8 @@ rm -rf ./chalicelib/core/log_tools
rm -rf ./chalicelib/core/metadata.py
rm -rf ./chalicelib/core/mobile.py
rm -rf ./chalicelib/core/saved_search.py
rm -rf ./chalicelib/core/sessions/sessions_pg.py
rm -rf ./chalicelib/core/sessions/sessions_ch.py
rm -rf ./chalicelib/core/sessions/sessions_devtool/sessions_devtool.py
rm -rf ./chalicelib/core/sessions/sessions_favorite/sessions_favorite.py
rm -rf ./chalicelib/core/sessions/sessions_assignments.py
rm -rf ./chalicelib/core/sessions/sessions_metas.py
rm -rf ./chalicelib/core/sessions/sessions_mobs.py
rm -rf ./chalicelib/core/sessions/sessions_replay.py
rm -rf ./chalicelib/core/sessions/sessions_search.py
rm -rf ./chalicelib/core/sessions/performance_event.py
rm -rf ./chalicelib/core/sessions/*.py
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
rm -rf ./chalicelib/core/sessions/__init__.py
rm -rf ./chalicelib/core/sessions/sessions_legacy_mobil.py
rm -rf ./chalicelib/core/sessions/sessions_search_exp.py
rm -rf ./chalicelib/core/metrics/modules
rm -rf ./chalicelib/core/socket_ios.py
rm -rf ./chalicelib/core/sourcemaps

View file

@ -3,3 +3,4 @@ from .schemas_ee import *
from .assist_stats_schema import *
from .product_analytics import *
from . import overrides as _overrides
from .schemas import _PaginatedSchema as PaginatedSchema