fix(chalice): fixed missing timestamp in sessions replay

fix(chalice): fixed nested custom events in session replay
fix(chalice): fixed issues events in session replay
This commit is contained in:
Taha Yassine Kraiem 2025-05-27 12:17:04 +02:00 committed by Kraiem Taha Yassine
parent 63b89c816b
commit ed39bbf1d4
4 changed files with 35 additions and 20 deletions

View file

@ -1,18 +1,13 @@
from chalicelib.utils import ch_client from chalicelib.utils import ch_client
from .events_pg import * from .events_pg import *
from chalicelib.utils.exp_ch_helper import explode_dproperties, add_timestamp
def __explode_properties(rows):
for i in range(len(rows)):
rows[i] = {**rows[i], **rows[i]["$properties"]}
rows[i].pop("$properties")
return rows
def get_customs_by_session_id(session_id, project_id): def get_customs_by_session_id(session_id, project_id):
with ch_client.ClickHouseClient() as cur: with ch_client.ClickHouseClient() as cur:
rows = cur.execute(""" \ rows = cur.execute(""" \
SELECT `$properties`, SELECT `$properties`,
properties,
created_at, created_at,
'CUSTOM' AS type 'CUSTOM' AS type
FROM product_analytics.events FROM product_analytics.events
@ -21,8 +16,10 @@ def get_customs_by_session_id(session_id, project_id):
AND `$event_name`!='INCIDENT' AND `$event_name`!='INCIDENT'
ORDER BY created_at;""", ORDER BY created_at;""",
{"project_id": project_id, "session_id": session_id}) {"project_id": project_id, "session_id": session_id})
rows = __explode_properties(rows) rows = helper.list_to_camel_case(rows, ignore_keys=["properties"])
return helper.list_to_camel_case(rows) rows = explode_dproperties(rows)
rows = add_timestamp(rows)
return rows
def __merge_cells(rows, start, count, replacement): def __merge_cells(rows, start, count, replacement):
@ -69,12 +66,13 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type:
parameters={"project_id": project_id, "session_id": session_id, parameters={"project_id": project_id, "session_id": session_id,
"select_events": select_events}) "select_events": select_events})
rows = cur.execute(query) rows = cur.execute(query)
rows = __explode_properties(rows) rows = explode_dproperties(rows)
if group_clickrage and 'CLICK' in select_events: if group_clickrage and 'CLICK' in select_events:
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
rows = helper.list_to_camel_case(rows) rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: k["createdAt"]) rows = sorted(rows, key=lambda k: k["createdAt"])
rows = add_timestamp(rows)
return rows return rows
@ -91,7 +89,7 @@ def get_incidents_by_session_id(session_id, project_id):
ORDER BY created_at;""", ORDER BY created_at;""",
parameters={"project_id": project_id, "session_id": session_id}) parameters={"project_id": project_id, "session_id": session_id})
rows = cur.execute(query) rows = cur.execute(query)
rows = __explode_properties(rows) rows = explode_dproperties(rows)
rows = helper.list_to_camel_case(rows) rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: k["createdAt"]) rows = sorted(rows, key=lambda k: k["createdAt"])
return rows return rows

View file

@ -1,6 +1,6 @@
from chalicelib.utils import ch_client, helper from chalicelib.utils import ch_client, helper
import datetime import datetime
from .issues_pg import get_all_types from chalicelib.utils.exp_ch_helper import explode_dproperties, add_timestamp
def get(project_id, issue_id): def get(project_id, issue_id):
@ -21,7 +21,7 @@ def get(project_id, issue_id):
def get_by_session_id(session_id, project_id, issue_type=None): def get_by_session_id(session_id, project_id, issue_type=None):
with ch_client.ClickHouseClient() as cur: with ch_client.ClickHouseClient() as cur:
query = cur.format(query=f"""\ query = cur.format(query=f"""\
SELECT * SELECT created_at, `$properties`
FROM product_analytics.events FROM product_analytics.events
WHERE session_id = %(session_id)s WHERE session_id = %(session_id)s
AND project_id= %(project_id)s AND project_id= %(project_id)s
@ -29,8 +29,11 @@ def get_by_session_id(session_id, project_id, issue_type=None):
{"AND issue_type = %(type)s" if issue_type is not None else ""} {"AND issue_type = %(type)s" if issue_type is not None else ""}
ORDER BY created_at;""", ORDER BY created_at;""",
parameters={"session_id": session_id, "project_id": project_id, "type": issue_type}) parameters={"session_id": session_id, "project_id": project_id, "type": issue_type})
data = cur.execute(query) rows = cur.execute(query)
return helper.list_to_camel_case(data) rows = explode_dproperties(rows)
rows = helper.list_to_camel_case(rows)
rows = add_timestamp(rows)
return rows
# To reduce the number of issues in the replay; # To reduce the number of issues in the replay;

View file

@ -1,13 +1,14 @@
import logging import logging
import math
import re import re
import struct
from decimal import Decimal
from typing import Union, Any from typing import Union, Any
import schemas import schemas
from chalicelib.utils import sql_helper as sh from chalicelib.utils import sql_helper as sh
from chalicelib.utils.TimeUTC import TimeUTC
from schemas import SearchEventOperator from schemas import SearchEventOperator
import math
import struct
from decimal import Decimal
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -233,3 +234,16 @@ def best_clickhouse_type(value):
return "Float64" return "Float64"
raise TypeError(f"Unsupported type: {type(value).__name__}") raise TypeError(f"Unsupported type: {type(value).__name__}")
def explode_dproperties(rows):
for i in range(len(rows)):
rows[i] = {**rows[i], **rows[i]["$properties"]}
rows[i].pop("$properties")
return rows
def add_timestamp(rows):
for row in rows:
row["timestamp"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return rows

View file

@ -15,11 +15,11 @@ def random_string(length=36):
return "".join(random.choices(string.hexdigits, k=length)) return "".join(random.choices(string.hexdigits, k=length))
def list_to_camel_case(items: list[dict], flatten: bool = False) -> list[dict]: def list_to_camel_case(items: list[dict], flatten: bool = False, ignore_keys=[]) -> list[dict]:
for i in range(len(items)): for i in range(len(items)):
if flatten: if flatten:
items[i] = flatten_nested_dicts(items[i]) items[i] = flatten_nested_dicts(items[i])
items[i] = dict_to_camel_case(items[i]) items[i] = dict_to_camel_case(items[i], ignore_keys=[])
return items return items