openreplay/ee/api/chalicelib/core/notifications.py
Kraiem Taha Yassine a34179365e
Api v1.15.0 (#1464)
* feat(DB): rearranged queries
feat(DB): ready for v1.15.0

* refactor(chalice): upgraded dependencies
refactor(crons): upgraded dependencies
refactor(alerts): upgraded dependencies

* fix(chalice): return error when updating inexistant webhook

* feat(chalice): fixed delete webhook response

* feat(chalice): limit webhooks name length

* feat(chalice): upgraded dependencies
feat(alerts): upgraded dependencies
feat(crons): upgraded dependencies

* fix(chalice): remove urllib3 dependency

* feat(chalice): remove FOSS to pydantic v2

* fix(chalice): freeze urllib3 to not have conflicts between boto3 and requests

* feat(chalice): refactoring schema in progress

* feat(chalice): refactoring schema in progress

* feat(chalice): refactoring schema in progress

* feat(chalice): refactoring schema in progress
feat(chalice): upgraded dependencies

* feat(chalice): refactored schema

* fix(chalice): pull rebase dev

* feat(DB): transfer size support

* feat(chalice): support service account

* feat(chalice): support service account

* fix(chalice): fixed refactored PayloadSchema-name

* feat(chalice): path analysis

* feat(chalice): support service account 1/2

* feat(DB): timezone support

* feat(chalice): upgraded dependencies
feat(alerts): upgraded dependencies
feat(crons): upgraded dependencies
feat(assist): upgraded dependencies
feat(sourcemaps): upgraded dependencies

* feat(chalice): path analysis schema changes

* feat(chalice): path analysis query change

* feat(chalice): path analysis query change

* feat(chalice): ios replay support

* feat(chalice): ios replay support

* feat(chalice): path analysis changes

* feat(chalice): upgraded dependencies

* feat(chalice): simple hide minor paths

* feat(chalice): path analysis density

* feat(chalice): session's replay ios events

* feat(chalice): fixed typo

* feat(chalice): support project's platform

* feat(DB): support project's platform

* feat(chalice): path analysis EE in progress

* feat(chalice): project's platform API

* feat(chalice): fixed create project

* feat(chalice): EE path analysis in progress

* feat(chalice): EE path analysis
refactor(chalice): support specific database name for clickhouse-client

* feat(chalice): upgraded dependencies
feat(chalice): path analysis specific event type for startPoint
feat(chalice): path analysis specific event type for endPoint
feat(chalice): path analysis specific event type for exclude

* refactoring(chalice): changed IOS click event type
2023-09-06 17:06:33 +01:00

103 lines
4.8 KiB
Python

import json
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
def get_all(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT notifications.*,
user_viewed_notifications.notification_id NOTNULL AS viewed
FROM public.notifications
LEFT JOIN (SELECT notification_id
FROM public.user_viewed_notifications
WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id)
WHERE notifications.tenant_id =%(tenant_id)s
OR notifications.user_id =%(user_id)s
ORDER BY created_at DESC
LIMIT 100;""",
{"tenant_id": tenant_id, "user_id": user_id})
)
rows = helper.list_to_camel_case(cur.fetchall())
for r in rows:
r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"])
return rows
def get_all_count(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
SELECT COALESCE(COUNT(notifications.*),0) AS count
FROM public.notifications
LEFT JOIN (SELECT notification_id
FROM public.user_viewed_notifications
WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id)
WHERE (notifications.tenant_id =%(tenant_id)s
OR notifications.user_id =%(user_id)s) AND user_viewed_notifications.notification_id IS NULL;""",
{"tenant_id": tenant_id, "user_id": user_id})
)
row = cur.fetchone()
return row
def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None):
if len(notification_ids) == 0 and endTimestamp is None:
return False
if startTimestamp is None:
startTimestamp = 0
notification_ids = [(user_id, id) for id in notification_ids]
with pg_client.PostgresClient() as cur:
if len(notification_ids) > 0:
cur.executemany(
"INSERT INTO public.user_viewed_notifications(user_id, notification_id) VALUES (%s,%s) ON CONFLICT DO NOTHING;",
notification_ids)
else:
query = """INSERT INTO public.user_viewed_notifications(user_id, notification_id)
SELECT %(user_id)s AS user_id, notification_id
FROM public.notifications
WHERE (tenant_id =%(tenant_id)s OR user_id =%(user_id)s)
AND EXTRACT(EPOCH FROM created_at)*1000>=(%(startTimestamp)s)
AND EXTRACT(EPOCH FROM created_at)*1000<=(%(endTimestamp)s+1000)
ON CONFLICT DO NOTHING;"""
params = {"user_id": user_id, "tenant_id": tenant_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
# print('-------------------')
# print(cur.mogrify(query, params))
cur.execute(cur.mogrify(query, params))
return True
def create(notifications):
if len(notifications) == 0:
return []
with pg_client.PostgresClient() as cur:
values = []
for n in notifications:
clone = dict(n)
if "userId" not in clone and "tenantId" not in clone:
print('!!! Error: userId or tenantId is required to create a notification')
continue
if "userId" not in clone:
clone["userId"] = None
if "tenantId" not in clone:
clone["tenantId"] = None
if "options" not in clone:
clone["options"] = '{}'
else:
clone["options"] = json.dumps(clone["options"])
values.append(
cur.mogrify(
"(%(tenantId)s, %(userId)s, %(title)s, %(description)s, %(buttonText)s, %(buttonUrl)s, %(imageUrl)s,%(options)s)",
clone).decode('UTF-8')
)
cur.execute(
f"""INSERT INTO public.notifications(tenant_id, user_id, title, description, button_text, button_url, image_url, options)
VALUES {",".join(values)} RETURNING *;""")
rows = helper.list_to_camel_case(cur.fetchall())
for r in rows:
r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"])
r["viewed"] = False
return rows