Api v1.15.0 (#1595)

* fix(DB): fixed gdpr-array issue
* refactor(chalice): enhanced logs
* refactor(chalice): path-analysis compute more exact percentages
* refactor(chalice): compute node time
This commit is contained in:
Kraiem Taha Yassine 2023-10-30 21:46:01 +01:00 committed by GitHub
parent dd8050aaa2
commit 84695c0e48
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 65 additions and 18 deletions

View file

@ -66,6 +66,7 @@ async def or_middleware(request: Request, call_next):
if helper.TRACK_TIME:
now = time.time() - now
if now > 2:
now = round(now, 2)
logging.warning(f"Execution time: {now} s for {request.method}: {request.url.path}")
return response

View file

@ -21,10 +21,11 @@ def __transform_journey(rows, reverse_path=False):
break
number_of_step1 += 1
total_100p += r["sessions_count"]
for i in range(number_of_step1):
rows[i]["value"] = 100 / number_of_step1
# for i in range(number_of_step1):
# rows[i]["value"] = 100 / number_of_step1
for i in range(number_of_step1, len(rows)):
# for i in range(number_of_step1, len(rows)):
for i in range(len(rows)):
rows[i]["value"] = rows[i]["sessions_count"] * 100 / total_100p
nodes = []
@ -34,21 +35,35 @@ def __transform_journey(rows, reverse_path=False):
source = f"{r['event_number_in_session']}_{r['event_type']}_{r['e_value']}"
if source not in nodes:
nodes.append(source)
nodes_values.append({"name": r['e_value'], "eventType": r['event_type']})
nodes_values.append({"name": r['e_value'], "eventType": r['event_type'],
"avgTimeFromPrevious": 0, "sessionsCount": 0})
if r['next_value']:
target = f"{r['event_number_in_session'] + 1}_{r['next_type']}_{r['next_value']}"
if target not in nodes:
nodes.append(target)
nodes_values.append({"name": r['next_value'], "eventType": r['next_type']})
nodes_values.append({"name": r['next_value'], "eventType": r['next_type'],
"avgTimeFromPrevious": 0, "sessionsCount": 0})
sr_idx = nodes.index(source)
tg_idx = nodes.index(target)
if r["avg_time_from_previous"] is not None:
nodes_values[tg_idx]["avgTimeFromPrevious"] += r["avg_time_from_previous"] * r["sessions_count"]
nodes_values[tg_idx]["sessionsCount"] += r["sessions_count"]
link = {"eventType": r['event_type'], "sessionsCount": r["sessions_count"],
"value": r["value"], "avgTimeFromPrevious": r["avg_time_from_previous"]}
if not reverse_path:
link["source"] = nodes.index(source)
link["target"] = nodes.index(target)
link["source"] = sr_idx
link["target"] = tg_idx
else:
link["source"] = nodes.index(target)
link["target"] = nodes.index(source)
link["source"] = tg_idx
link["target"] = sr_idx
links.append(link)
for n in nodes_values:
if n["sessionsCount"] > 0:
n["avgTimeFromPrevious"] = n["avgTimeFromPrevious"] / n["sessionsCount"]
else:
n["avgTimeFromPrevious"] = None
n.pop("sessionsCount")
return {"nodes": nodes_values,
"links": sorted(links, key=lambda x: (x["source"], x["target"]), reverse=False)}

View file

@ -83,6 +83,7 @@ async def or_middleware(request: Request, call_next):
if helper.TRACK_TIME:
now = time.time() - now
if now > 2:
now = round(now, 2)
logging.warning(f"Execution time: {now} s for {request.method}: {request.url.path}")
return response

View file

@ -23,10 +23,11 @@ def __transform_journey(rows, reverse_path=False):
break
number_of_step1 += 1
total_100p += r["sessions_count"]
for i in range(number_of_step1):
rows[i]["value"] = 100 / number_of_step1
# for i in range(number_of_step1):
# rows[i]["value"] = 100 / number_of_step1
for i in range(number_of_step1, len(rows)):
# for i in range(number_of_step1, len(rows)):
for i in range(len(rows)):
rows[i]["value"] = rows[i]["sessions_count"] * 100 / total_100p
nodes = []
@ -36,21 +37,35 @@ def __transform_journey(rows, reverse_path=False):
source = f"{r['event_number_in_session']}_{r['event_type']}_{r['e_value']}"
if source not in nodes:
nodes.append(source)
nodes_values.append({"name": r['e_value'], "eventType": r['event_type']})
nodes_values.append({"name": r['e_value'], "eventType": r['event_type'],
"avgTimeFromPrevious": 0, "sessionsCount": 0})
if r['next_value']:
target = f"{r['event_number_in_session'] + 1}_{r['next_type']}_{r['next_value']}"
if target not in nodes:
nodes.append(target)
nodes_values.append({"name": r['next_value'], "eventType": r['next_type']})
nodes_values.append({"name": r['next_value'], "eventType": r['next_type'],
"avgTimeFromPrevious": 0, "sessionsCount": 0})
sr_idx = nodes.index(source)
tg_idx = nodes.index(target)
if r["avg_time_from_previous"] is not None:
nodes_values[tg_idx]["avgTimeFromPrevious"] += r["avg_time_from_previous"] * r["sessions_count"]
nodes_values[tg_idx]["sessionsCount"] += r["sessions_count"]
link = {"eventType": r['event_type'], "sessionsCount": r["sessions_count"],
"value": r["value"], "avgTimeFromPrevious": r["avg_time_from_previous"]}
if not reverse_path:
link["source"] = nodes.index(source)
link["target"] = nodes.index(target)
link["source"] = sr_idx
link["target"] = tg_idx
else:
link["source"] = nodes.index(target)
link["target"] = nodes.index(source)
link["source"] = tg_idx
link["target"] = sr_idx
links.append(link)
for n in nodes_values:
if n["sessionsCount"] > 0:
n["avgTimeFromPrevious"] = n["avgTimeFromPrevious"] / n["sessionsCount"]
else:
n["avgTimeFromPrevious"] = None
n.pop("sessionsCount")
return {"nodes": nodes_values,
"links": sorted(links, key=lambda x: (x["source"], x["target"]), reverse=False)}

View file

@ -157,6 +157,14 @@ CREATE TABLE IF NOT EXISTS public.assist_events_aggregates_logs
ALTER TABLE IF EXISTS public.users
ADD COLUMN IF NOT EXISTS settings jsonb DEFAULT NULL;
--To fix array-gdpr
UPDATE public.projects
SET gdpr=(SELECT *
FROM (SELECT jsonb_array_elements(gdpr) AS g) AS ra
WHERE jsonb_typeof(g) = 'object'
LIMIT 1)
WHERE jsonb_typeof(gdpr) = 'array';
COMMIT;
\elif :is_next

View file

@ -156,6 +156,13 @@ CREATE TABLE IF NOT EXISTS public.assist_events_aggregates_logs
ALTER TABLE IF EXISTS public.users
ADD COLUMN IF NOT EXISTS settings jsonb DEFAULT NULL;
--To fix array-gdpr
UPDATE public.projects
SET gdpr=(SELECT *
FROM (SELECT jsonb_array_elements(gdpr) AS g) AS ra
WHERE jsonb_typeof(g) = 'object'
LIMIT 1)
WHERE jsonb_typeof(gdpr) = 'array';
COMMIT;