feat(api): insights slipping users list
This commit is contained in:
parent
a174348a37
commit
7e07e763ff
1 changed files with 31 additions and 30 deletions
|
|
@ -854,15 +854,11 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes
|
|||
@dev.timed
|
||||
def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[],
|
||||
**args):
|
||||
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
|
||||
time_constraint=True)
|
||||
pg_sub_query.append("user_id IS NOT NULL")
|
||||
pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
|
||||
pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
|
||||
event_type = "PAGES"
|
||||
event_value = "/"
|
||||
extra_values = {}
|
||||
default = True
|
||||
meta_condition = []
|
||||
for f in filters:
|
||||
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
|
||||
event_type = f["value"]
|
||||
|
|
@ -870,45 +866,50 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi
|
|||
event_value = f["value"]
|
||||
default = False
|
||||
elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
|
||||
pg_sub_query.append(f"sessions.user_id = %(user_id)s")
|
||||
meta_condition.append(f"sessions_metadata.user_id = %(user_id)s")
|
||||
extra_values["user_id"] = f["value"]
|
||||
event_table = JOURNEY_TYPES[event_type]["table"]
|
||||
event_column = JOURNEY_TYPES[event_type]["column"]
|
||||
pg_sub_query.append(f"feature.{event_column} = %(value)s")
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
ch_sub_query = __get_basic_constraints(table_name="feature", data=args)
|
||||
meta_condition += __get_meta_constraint(args)
|
||||
ch_sub_query += meta_condition
|
||||
ch_sub_query.append("user_id IS NOT NULL")
|
||||
ch_sub_query.append("not empty(user_id)")
|
||||
ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)")
|
||||
ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)")
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
if default:
|
||||
# get most used value
|
||||
pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
|
||||
FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query[:-1])}
|
||||
AND length({event_column}) > 2
|
||||
ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
|
||||
FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id)
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY value
|
||||
ORDER BY count DESC
|
||||
LIMIT 1;"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp,
|
||||
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
row = cur.fetchone()
|
||||
if row is not None:
|
||||
event_value = row["value"]
|
||||
row = ch.execute(ch_query, params)
|
||||
if len(row) > 0:
|
||||
event_value = row[0]["value"]
|
||||
extra_values["value"] = event_value
|
||||
if len(event_value) > 2:
|
||||
pg_sub_query.append(f"length({event_column})>2")
|
||||
pg_query = f"""SELECT user_id, last_time, interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen
|
||||
FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count
|
||||
FROM {event_table} AS feature INNER JOIN sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY user_id) AS user_last_usage
|
||||
INNER JOIN sessions USING (user_id)
|
||||
WHERE EXTRACT(EPOCH FROM now()) * 1000 - last_time > 7 * 24 * 60 * 60 * 1000
|
||||
GROUP BY user_id, last_time,interactions_count;"""
|
||||
|
||||
ch_sub_query.append(f"feature.{event_column} = %(value)s")
|
||||
ch_query = f"""SELECT user_id,
|
||||
toUnixTimestamp(last_time)*1000 AS last_time,
|
||||
interactions_count,
|
||||
toUnixTimestamp(first_seen) * 1000 AS first_seen,
|
||||
toUnixTimestamp(last_seen) * 1000 AS last_seen
|
||||
FROM (SELECT user_id, last_time, interactions_count, MIN(datetime) AS first_seen, MAX(datetime) AS last_seen
|
||||
FROM (SELECT user_id, MAX(datetime) AS last_time, COUNT(DISTINCT session_id) AS interactions_count
|
||||
FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id)
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
GROUP BY user_id ) AS user_last_usage INNER JOIN sessions_metadata USING (user_id)
|
||||
WHERE now() - last_time > 7
|
||||
GROUP BY user_id, last_time, interactions_count) AS raw_results;"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp,
|
||||
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
|
||||
# print(cur.mogrify(pg_query, params))
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
rows = cur.fetchall()
|
||||
# print(ch_query, params)
|
||||
rows = ch.execute(ch_query, params)
|
||||
return {
|
||||
"startTimestamp": startTimestamp,
|
||||
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}],
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue