* refactor(chalice): upgraded dependencies

* refactor(chalice): upgraded dependencies
feat(chalice): support heatmaps

* feat(chalice): support table-of-browsers showing user-count

* feat(chalice): support table-of-devices showing user-count

* feat(chalice): support table-of-URLs showing user-count

* feat(chalice): heatmaps select random session from shortest 20

* feat(chalice): save&update specific replay for heatmap-card
This commit is contained in:
Kraiem Taha Yassine 2024-06-27 19:14:36 +02:00 committed by GitHub
parent 4b61e31ed4
commit 094b3a7c06
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 51 additions and 22 deletions

View file

@ -330,11 +330,14 @@ def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
session_data = None
if data.metric_type == schemas.MetricType.click_map:
session_data = __get_click_map_chart(project_id=project_id, user_id=user_id,
data=data, include_mobs=False)
if session_data is not None:
session_data = json.dumps(session_data)
if data.metric_type in (schemas.MetricType.click_map, schemas.MetricType.heat_map):
if data.session_id is not None:
session_data = json.dumps({"sessionId": data.session_id})
else:
session_data = __get_click_map_chart(project_id=project_id, user_id=user_id,
data=data, include_mobs=False)
if session_data is not None:
session_data = json.dumps({"sessionId": session_data["sessionId"]})
_data = {"session_data": session_data}
for i, s in enumerate(data.series):
for k in s.model_dump().keys():
@ -373,7 +376,8 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
metric: dict = get_card(metric_id=metric_id, project_id=project_id,
user_id=user_id, flatten=False, include_data=True)
if metric is None:
return None
series_ids = [r["seriesId"] for r in metric["series"]]
@ -406,8 +410,12 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
d_series_ids.append(i)
params["d_series_ids"] = tuple(d_series_ids)
params["card_info"] = None
params["session_data"] = metric["data"]
if data.metric_type == schemas.MetricType.pathAnalysis:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
elif data.metric_type in (schemas.MetricType.click_map, schemas.MetricType.heat_map) \
and data.session_id is not None:
params["session_data"] = json.dumps({"sessionId": data.session_id})
with pg_client.PostgresClient() as cur:
sub_queries = []
@ -441,7 +449,8 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
edited_at = timezone('utc'::text, now()),
default_config = %(config)s,
thumbnail = %(thumbnail)s,
card_info = %(card_info)s
card_info = %(card_info)s,
data = %(session_data)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)

View file

@ -184,9 +184,12 @@ def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_
with pg_client.PostgresClient() as cur:
data.order = schemas.SortOrderType.desc
data.sort = 'duration'
main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_COLS}
{query_part}
ORDER BY {data.sort} {data.order.value}
main_query = cur.mogrify(f"""SELECT *
FROM (SELECT {SESSION_PROJECTION_COLS}
{query_part}
ORDER BY {data.sort} {data.order.value}
LIMIT 20) AS raw
ORDER BY random()
LIMIT 1;""", full_args)
logger.debug("--------------------")
logger.debug(main_query)

View file

@ -1128,6 +1128,8 @@ class __CardSchema(CardSessionsSchema):
metric_type: MetricType = Field(...)
metric_of: Any
metric_value: List[IssueType] = Field(default=[])
# This is used to save the selected session for heatmaps
session_id: Optional[int] = Field(default=None)
@computed_field
@property

View file

@ -350,9 +350,12 @@ def __get_path_analysis_card_info(data: schemas.CardPathAnalysis):
def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur:
session_data = None
if data.metric_type == schemas.MetricType.click_map:
session_data = __get_click_map_chart(project_id=project_id, user_id=user_id,
data=data, include_mobs=False)
if data.metric_type in (schemas.MetricType.click_map, schemas.MetricType.heat_map):
if data.session_id is not None:
session_data = json.dumps({"sessionId": data.session_id})
else:
session_data = __get_click_map_chart(project_id=project_id, user_id=user_id,
data=data, include_mobs=False)
if session_data is not None:
# for EE only
keys = sessions_mobs. \
@ -405,7 +408,8 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
metric: dict = get_card(metric_id=metric_id, project_id=project_id,
user_id=user_id, flatten=False, include_data=True)
if metric is None:
return None
series_ids = [r["seriesId"] for r in metric["series"]]
@ -438,8 +442,12 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
d_series_ids.append(i)
params["d_series_ids"] = tuple(d_series_ids)
params["card_info"] = None
params["session_data"] = metric["data"]
if data.metric_type == schemas.MetricType.pathAnalysis:
params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data))
elif data.metric_type in (schemas.MetricType.click_map, schemas.MetricType.heat_map) \
and data.session_id is not None:
params["session_data"] = json.dumps({"sessionId": data.session_id})
with pg_client.PostgresClient() as cur:
sub_queries = []
@ -473,7 +481,8 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
edited_at = timezone('utc'::text, now()),
default_config = %(config)s,
thumbnail = %(thumbnail)s,
card_info = %(card_info)s
card_info = %(card_info)s,
data = %(session_data)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)

View file

@ -200,9 +200,12 @@ if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
with pg_client.PostgresClient() as cur:
data.order = schemas.SortOrderType.desc
data.sort = 'duration'
main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_COLS}
{query_part}
ORDER BY {data.sort} {data.order.value}
main_query = cur.mogrify(f"""SELECT *
FROM (SELECT {SESSION_PROJECTION_COLS}
{query_part}
ORDER BY {data.sort} {data.order.value}
LIMIT 20) AS raw
ORDER BY random()
LIMIT 1;""", full_args)
logger.debug("--------------------")
logger.debug(main_query)
@ -282,10 +285,13 @@ else:
with ch_client.ClickHouseClient() as cur:
data.order = schemas.SortOrderType.desc
data.sort = 'duration'
main_query = cur.format(f"""SELECT {SESSION_PROJECTION_COLS}
{query_part}
ORDER BY {data.sort} {data.order.value}
LIMIT 1;""", full_args)
main_query = cur.format(f"""SELECT *
FROM (SELECT {SESSION_PROJECTION_COLS}
{query_part}
ORDER BY {data.sort} {data.order.value}
LIMIT 20) AS raw
ORDER BY rand()
LIMIT 1;""", full_args)
logger.debug("--------------------")
logger.debug(main_query)
logger.debug("--------------------")