* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* feat(chalice): autocomplete return top 10 with stats

* fix(chalice): fixed autocomplete top 10 meta-filters

* refactor(chalice): removed unused code
This commit is contained in:
Kraiem Taha Yassine 2024-11-06 17:24:03 +01:00 committed by GitHub
parent d1de937ed2
commit c2405dfbb3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 3 additions and 273 deletions

View file

@ -264,12 +264,6 @@ def __search_metadata(project_id, value, key=None, source=None):
return helper.list_to_camel_case(results)
class TableColumn:
def __init__(self, table, column):
self.table = table
self.column = column
TYPE_TO_COLUMN = {
schemas.EventType.CLICK: "label",
schemas.EventType.INPUT: "label",

View file

@ -373,122 +373,6 @@ def get_details(project_id, error_id, user_id, **data):
return {"data": helper.dict_to_camel_case(row)}
def get_details_chart(project_id, error_id, user_id, **data):
ch_sub_query = __get_basic_constraints()
ch_sub_query.append("error_id = %(error_id)s")
with ch_client.ClickHouseClient() as ch:
if data.get("startDate") is None:
data["startDate"] = TimeUTC.now(-7)
else:
data["startDate"] = int(data["startDate"])
if data.get("endDate") is None:
data["endDate"] = TimeUTC.now()
else:
data["endDate"] = int(data["endDate"])
density = int(data.get("density", 7))
step_size = __get_step_size(data["startDate"], data["endDate"], density)
params = {
"startDate": data['startDate'],
"endDate": data['endDate'],
"project_id": project_id,
"userId": user_id,
"step_size": step_size,
"error_id": error_id}
main_ch_query = f"""\
SELECT browser_details.error_id AS error_id,
browsers_partition,
os_partition,
device_partition,
country_partition,
chart
FROM (SELECT %(error_id)s AS error_id,
groupArray([[[user_browser]], [[toString(count_per_browser)]],versions_partition]) AS browsers_partition
FROM (SELECT user_browser,
COUNT(session_id) AS count_per_browser
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY user_browser
ORDER BY count_per_browser DESC) AS count_per_browser_query
INNER JOIN (SELECT user_browser,
groupArray([user_browser_version, toString(count_per_version)]) AS versions_partition
FROM (SELECT user_browser,
user_browser_version,
COUNT(session_id) AS count_per_version
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY user_browser, user_browser_version
ORDER BY count_per_version DESC) AS count_per_version_details
GROUP BY user_browser ) AS browesr_version_details USING (user_browser)) AS browser_details
INNER JOIN (SELECT %(error_id)s AS error_id,
groupArray(
[[[user_os]], [[toString(count_per_os)]],versions_partition]) AS os_partition
FROM (SELECT user_os,
COUNT(session_id) AS count_per_os
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY user_os
ORDER BY count_per_os DESC) AS count_per_os_details
INNER JOIN (SELECT user_os,
groupArray([user_os_version, toString(count_per_version)]) AS versions_partition
FROM (SELECT user_os, user_os_version, COUNT(session_id) AS count_per_version
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY user_os, user_os_version
ORDER BY count_per_version DESC) AS count_per_version_query
GROUP BY user_os ) AS os_version_query USING (user_os)) AS os_details
ON os_details.error_id = browser_details.error_id
INNER JOIN (SELECT %(error_id)s AS error_id,
groupArray(
[[[toString(user_device_type)]], [[toString(count_per_device)]],versions_partition]) AS device_partition
FROM (SELECT user_device_type,
COUNT(session_id) AS count_per_device
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY user_device_type
ORDER BY count_per_device DESC) AS count_per_device_details
INNER JOIN (SELECT user_device_type,
groupArray([user_device, toString(count_per_device)]) AS versions_partition
FROM (SELECT user_device_type,
coalesce(user_device,'unknown') AS user_device,
COUNT(session_id) AS count_per_device
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY user_device_type, user_device
ORDER BY count_per_device DESC) AS count_per_device_details
GROUP BY user_device_type ) AS device_version_details USING (user_device_type)) AS device_details
ON device_details.error_id = os_details.error_id
INNER JOIN (SELECT %(error_id)s AS error_id,
groupArray(
[[[toString(user_country)]], [[toString(count_per_country)]]]) AS country_partition
FROM (SELECT user_country,
COUNT(session_id) AS count_per_country
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY user_country
ORDER BY count_per_country DESC) AS count_per_country_details) AS country_details
ON country_details.error_id = device_details.error_id
INNER JOIN (SELECT %(error_id)s AS error_id, groupArray([timestamp, count]) AS chart
FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
COUNT(DISTINCT session_id) AS count
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY timestamp
ORDER BY timestamp) AS chart_details) AS chart_details
ON country_details.error_id = chart_details.error_id;"""
# print("--------------------")
# print(main_ch_query % params)
row = ch.execute(query=main_ch_query, params=params)
if len(row) == 0:
return {"errors": ["error not found"]}
row = row[0]
row["tags"] = __process_tags(row)
row["chart"] = __rearrange_chart_details(start_at=data["startDate"], end_at=data["endDate"], density=density,
chart=row["chart"])
return {"data": helper.dict_to_camel_case(row)}
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", type_condition=True, project_key="project_id", table_name=None):
ch_sub_query = [f"{project_key} =toUInt16(%(project_id)s)"]
@ -521,26 +405,6 @@ def __get_sort_key(key):
}.get(key, 'max_datetime')
def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):
if project_key is None:
ch_sub_query = []
else:
ch_sub_query = [f"{project_key} =%(project_id)s"]
if time_constraint:
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
f"timestamp < %({endTime_arg_name})s"]
if chart:
ch_sub_query += [f"timestamp >= generated_timestamp",
f"timestamp < generated_timestamp + %({step_size_name})s"]
if platform == schemas.PlatformType.MOBILE:
ch_sub_query.append("user_device_type = 'mobile'")
elif platform == schemas.PlatformType.DESKTOP:
ch_sub_query.append("user_device_type = 'desktop'")
return ch_sub_query
def search(data: schemas.SearchErrorsSchema, project_id, user_id):
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(data.startTimestamp)
MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(data.startTimestamp)
@ -907,43 +771,3 @@ def merge(error_ids):
# row = cur.fetchone()
return {"data": "success"}
def format_first_stack_frame(error):
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
for s in error["stack"]:
for c in s.get("context", []):
for sci, sc in enumerate(c):
if isinstance(sc, str) and len(sc) > 1000:
c[sci] = sc[:1000]
# convert bytes to string:
if isinstance(s["filename"], bytes):
s["filename"] = s["filename"].decode("utf-8")
return error
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(userId)s)
SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed
FROM (SELECT root_error.error_id
FROM events.errors
INNER JOIN public.errors AS root_error USING (error_id)
LEFT JOIN user_viewed USING (error_id)
WHERE project_id = %(project_id)s
AND timestamp >= %(startTimestamp)s
AND timestamp <= %(endTimestamp)s
AND source = 'js_exception'
AND root_error.status = 'unresolved'
AND user_viewed.error_id ISNULL
LIMIT 1
) AS timed_errors;""",
{"project_id": project_id, "userId": user_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp})
cur.execute(query=query)
row = cur.fetchone()
return {
"data": helper.dict_to_camel_case(row)
}

View file

@ -60,20 +60,6 @@ def get(project_id, integration):
return helper.dict_to_camel_case(helper.flatten_nested_dicts(r))
def get_all_by_type(integration):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""\
SELECT integrations.*
FROM public.integrations INNER JOIN public.projects USING(project_id)
WHERE provider = %(provider)s AND projects.deleted_at ISNULL;""",
{"provider": integration})
)
r = cur.fetchall()
return helper.list_to_camel_case(r, flatten=True)
def edit(project_id, integration, changes):
if "projectId" in changes:
changes.pop("project_id")
@ -117,4 +103,4 @@ def get_all_by_tenant(tenant_id, integration):
{"tenant_id": tenant_id, "provider": integration})
)
r = cur.fetchall()
return helper.list_to_camel_case(r, flatten=True)
return helper.list_to_camel_case(r, flatten=True)

View file

@ -85,16 +85,6 @@ def __complete_missing_steps(start_time, end_time, density, neutral, rows, time_
return result
def __merge_charts(list1, list2, time_key="timestamp"):
if len(list1) != len(list2):
raise Exception("cannot merge unequal lists")
result = []
for i in range(len(list1)):
timestamp = min(list1[i][time_key], list2[i][time_key])
result.append({**list1[i], **list2[i], time_key: timestamp})
return result
def __get_constraint(data, fields, table_name):
constraints = []
# for k in fields.keys():
@ -314,32 +304,6 @@ def get_errors_per_domains(project_id, limit, page, startTimestamp=TimeUTC.now(d
return response
def __get_calls_errors_4xx_or_5xx(status, project_id, startTimestamp=TimeUTC.now(delta_days=-1),
endTimestamp=TimeUTC.now(),
platform=None, **args):
ch_sub_query = __get_basic_constraints(table_name="requests", data=args)
ch_sub_query.append("requests.event_type = 'REQUEST'")
ch_sub_query.append(f"intDiv(requests.status, 100) == {status}")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT requests.method,
requests.url_hostpath,
COUNT(1) AS all_requests
FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS requests
WHERE {" AND ".join(ch_sub_query)}
GROUP BY requests.method, requests.url_hostpath
ORDER BY all_requests DESC
LIMIT 10;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
# print(ch.format(query=ch_query, params=params))
rows = ch.execute(query=ch_query, params=params)
return helper.list_to_camel_case(rows)
def get_errors_per_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(),
platform=None, density=7, **args):
step_size = __get_step_size(startTimestamp, endTimestamp, density)

View file

@ -149,29 +149,6 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
return helper.dict_to_camel_case(row)
def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
FROM public.sessions AS ss
WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at"""
if include_gdpr:
extra_select += ",s.gdpr"
query = cur.mogrify(f"""SELECT s.project_key,
s.name
{extra_select}
FROM public.projects AS s
WHERE s.project_key =%(project_key)s
AND s.tenant_id =%(tenant_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_key": project_key, "tenant_id": tenant_id})
cur.execute(query=query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if __exists_by_name(name=data.name, exclude_id=None, tenant_id=tenant_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")

View file

@ -71,17 +71,6 @@ def __process_trace(trace: TraceSchema):
return data
async def write_trace(trace: TraceSchema):
data = __process_trace(trace)
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""INSERT INTO traces(user_id, tenant_id, created_at, auth, action, method, path_format, endpoint, payload, parameters, status)
VALUES (%(user_id)s, %(tenant_id)s, %(created_at)s, %(auth)s, %(action)s, %(method)s, %(path_format)s, %(endpoint)s, %(payload)s::jsonb, %(parameters)s::jsonb, %(status)s);""",
data)
)
async def write_traces_batch(traces: List[TraceSchema]):
if len(traces) == 0:
return

View file

@ -5,7 +5,7 @@ from time import time
from decouple import config
from chalicelib.core import assist
from chalicelib.core import assist_ice
from chalicelib.utils import helper_ee
@ -29,7 +29,7 @@ def get_temporary_credentials():
def get_full_config():
servers = assist.get_ice_servers()
servers = assist_ice.get_ice_servers()
if servers is None:
return None
servers = servers.split("|")

View file

@ -26,10 +26,6 @@ def get_main_sessions_table(timestamp=0):
and timestamp and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.sessions"
def get_autocomplete_table(timestamp=0):
return "experimental.autocomplete"
def get_user_favorite_sessions_table(timestamp=0):
return "experimental.user_favorite_sessions"