openreplay/api/chalicelib/core/heatmaps.py
Kraiem Taha Yassine a34179365e
Api v1.15.0 (#1464)
* feat(DB): rearranged queries
feat(DB): ready for v1.15.0

* refactor(chalice): upgraded dependencies
refactor(crons): upgraded dependencies
refactor(alerts): upgraded dependencies

* fix(chalice): return error when updating inexistant webhook

* feat(chalice): fixed delete webhook response

* feat(chalice): limit webhooks name length

* feat(chalice): upgraded dependencies
feat(alerts): upgraded dependencies
feat(crons): upgraded dependencies

* fix(chalice): remove urllib3 dependency

* feat(chalice): remove FOSS to pydantic v2

* fix(chalice): freeze urllib3 to not have conflicts between boto3 and requests

* feat(chalice): refactoring schema in progress

* feat(chalice): refactoring schema in progress

* feat(chalice): refactoring schema in progress

* feat(chalice): refactoring schema in progress
feat(chalice): upgraded dependencies

* feat(chalice): refactored schema

* fix(chalice): pull rebase dev

* feat(DB): transfer size support

* feat(chalice): support service account

* feat(chalice): support service account

* fix(chalice): fixed refactored PayloadSchema-name

* feat(chalice): path analysis

* feat(chalice): support service account 1/2

* feat(DB): timezone support

* feat(chalice): upgraded dependencies
feat(alerts): upgraded dependencies
feat(crons): upgraded dependencies
feat(assist): upgraded dependencies
feat(sourcemaps): upgraded dependencies

* feat(chalice): path analysis schema changes

* feat(chalice): path analysis query change

* feat(chalice): path analysis query change

* feat(chalice): ios replay support

* feat(chalice): ios replay support

* feat(chalice): path analysis changes

* feat(chalice): upgraded dependencies

* feat(chalice): simple hide minor paths

* feat(chalice): path analysis density

* feat(chalice): session's replay ios events

* feat(chalice): fixed typo

* feat(chalice): support project's platform

* feat(DB): support project's platform

* feat(chalice): path analysis EE in progress

* feat(chalice): project's platform API

* feat(chalice): fixed create project

* feat(chalice): EE path analysis in progress

* feat(chalice): EE path analysis
refactor(chalice): support specific database name for clickhouse-client

* feat(chalice): upgraded dependencies
feat(chalice): path analysis specific event type for startPoint
feat(chalice): path analysis specific event type for endPoint
feat(chalice): path analysis specific event type for exclude

* refactoring(chalice): changed IOS click event type
2023-09-06 17:06:33 +01:00

74 lines
4 KiB
Python

from chalicelib.utils import sql_helper as sh
import schemas
from chalicelib.utils import helper, pg_client
def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
args = {"startDate": data.startTimestamp, "endDate": data.endTimestamp,
"project_id": project_id, "url": data.url}
constraints = ["sessions.project_id = %(project_id)s",
"(url = %(url)s OR path= %(url)s)",
"clicks.timestamp >= %(startDate)s",
"clicks.timestamp <= %(endDate)s",
"start_ts >= %(startDate)s",
"start_ts <= %(endDate)s",
"duration IS NOT NULL"]
query_from = "events.clicks INNER JOIN sessions USING (session_id)"
q_count = "count(1) AS count"
has_click_rage_filter = False
if len(data.filters) > 0:
for i, f in enumerate(data.filters):
if f.type == schemas.FilterType.issue and len(f.value) > 0:
has_click_rage_filter = True
q_count = "max(real_count) AS count,TRUE AS click_rage"
query_from += """INNER JOIN events_common.issues USING (timestamp, session_id)
INNER JOIN issues AS mis USING (issue_id)
INNER JOIN LATERAL (
SELECT COUNT(1) AS real_count
FROM events.clicks AS sc
INNER JOIN sessions as ss USING (session_id)
WHERE ss.project_id = 2
AND (sc.url = %(url)s OR sc.path = %(url)s)
AND sc.timestamp >= %(startDate)s
AND sc.timestamp <= %(endDate)s
AND ss.start_ts >= %(startDate)s
AND ss.start_ts <= %(endDate)s
AND sc.selector = clicks.selector) AS r_clicks ON (TRUE)"""
constraints += ["mis.project_id = %(project_id)s",
"issues.timestamp >= %(startDate)s",
"issues.timestamp <= %(endDate)s"]
f_k = f"issue_value{i}"
args = {**args, **sh.multi_values(f.value, value_key=f_k)}
constraints.append(sh.multi_conditions(f"%({f_k})s = ANY (issue_types)",
f.value, value_key=f_k))
constraints.append(sh.multi_conditions(f"mis.type = %({f_k})s",
f.value, value_key=f_k))
if data.click_rage and not has_click_rage_filter:
constraints.append("""(issues.session_id IS NULL
OR (issues.timestamp >= %(startDate)s
AND issues.timestamp <= %(endDate)s
AND mis.project_id = %(project_id)s))""")
q_count += ",COALESCE(bool_or(mis.type = 'click_rage'), FALSE) AS click_rage"
query_from += """LEFT JOIN events_common.issues USING (timestamp, session_id)
LEFT JOIN issues AS mis USING (issue_id)"""
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT selector, {q_count}
FROM {query_from}
WHERE {" AND ".join(constraints)}
GROUP BY selector
LIMIT 500;""", args)
# print("---------")
# print(query.decode('UTF-8'))
# print("---------")
try:
cur.execute(query)
except Exception as err:
print("--------- HEATMAP SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
print("--------- PAYLOAD -----------")
print(data)
print("--------------------")
raise err
rows = cur.fetchall()
return helper.list_to_camel_case(rows)