* refactor(chalice): removed errors status
refactor(chalice): removed errors viewed
refactor(chalice): removed errors favorite
refactor(DB): removed errors viewed
refactor(DB): removed errors favorite

* refactor(chalice): ignore hide excess for Path Finder as it will be done in UI
This commit is contained in:
Kraiem Taha Yassine 2024-12-23 18:34:57 +01:00 committed by GitHub
parent 4c4e1b6580
commit 82ab91bc25
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 114 additions and 303 deletions

View file

@ -1,19 +1,23 @@
import json
import schemas
from chalicelib.core import sourcemaps
from chalicelib.core.errors.modules import sessions
from chalicelib.core.sourcemaps import sourcemaps
from chalicelib.core.sessions import sessions_search
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
from typing import Optional, List, Union, Literal
def get(error_id, family=False):
def get(error_id, family=False) -> dict | List[dict]:
if family:
return get_batch([error_id])
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"SELECT * FROM public.errors WHERE error_id = %(error_id)s LIMIT 1;",
"""SELECT *
FROM public.errors
WHERE error_id = %(error_id)s
LIMIT 1;""",
{"error_id": error_id})
cur.execute(query=query)
result = cur.fetchone()
@ -47,9 +51,10 @@ def get_batch(error_ids):
return helper.list_to_camel_case(errors)
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
project_key="project_id"):
def __get_basic_constraints(platform: Optional[schemas.PlatformType] = None, time_constraint: bool = True,
startTime_arg_name: str = "startDate", endTime_arg_name: str = "endDate",
chart: bool = False, step_size_name: str = "step_size",
project_key: Optional[str] = "project_id"):
if project_key is None:
ch_sub_query = []
else:
@ -102,8 +107,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
data.endTimestamp = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors")
statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status)
statuses = sessions_search.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=data.status)
if len(statuses) == 0:
return empty_response
error_ids = [e["errorId"] for e in statuses]
@ -314,41 +319,3 @@ def get_sessions(start_date, end_date, project_id, user_id, error_id):
'total': total,
'sessions': helper.list_to_camel_case(sessions_list)
}
ACTION_STATE = {
"unsolve": 'unresolved',
"solve": 'resolved',
"ignore": 'ignored'
}
def change_state(project_id, user_id, error_id, action):
errors = get(error_id, family=True)
print(len(errors))
status = ACTION_STATE.get(action)
if errors is None or len(errors) == 0:
return {"errors": ["error not found"]}
if errors[0]["status"] == status:
return {"errors": [f"error is already {status}"]}
if errors[0]["status"] == ACTION_STATE["solve"] and status == ACTION_STATE["ignore"]:
return {"errors": [f"state transition not permitted {errors[0]['status']} -> {status}"]}
params = {
"userId": user_id,
"error_ids": tuple([e["errorId"] for e in errors]),
"status": status}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""UPDATE public.errors
SET status = %(status)s
WHERE error_id IN %(error_ids)s
RETURNING status""",
params)
cur.execute(query=query)
row = cur.fetchone()
if row is not None:
for e in errors:
e["status"] = row["status"]
return {"data": errors}

View file

@ -62,8 +62,6 @@ def get_batch(error_ids):
return errors_legacy.get_batch(error_ids=error_ids)
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
endTime_arg_name="endDate", type_condition=True, project_key="project_id", table_name=None):
ch_sub_query = [f"{project_key} =toUInt16(%(project_id)s)"]
@ -413,48 +411,3 @@ def get_sessions(start_date, end_date, project_id, user_id, error_id):
project_id=project_id,
user_id=user_id,
error_id=error_id)
def change_state(project_id, user_id, error_id, action):
return errors_legacy.change_state(project_id=project_id, user_id=user_id, error_id=error_id, action=action)
MAX_RANK = 2
def __status_rank(status):
return {
'unresolved': MAX_RANK - 2,
'ignored': MAX_RANK - 1,
'resolved': MAX_RANK
}.get(status)
def merge(error_ids):
error_ids = list(set(error_ids))
errors = get_batch(error_ids)
if len(error_ids) <= 1 or len(error_ids) > len(errors):
return {"errors": ["invalid list of ids"]}
error_ids = [e["errorId"] for e in errors]
parent_error_id = error_ids[0]
status = "unresolved"
for e in errors:
if __status_rank(status) < __status_rank(e["status"]):
status = e["status"]
if __status_rank(status) == MAX_RANK:
break
params = {
"error_ids": tuple(error_ids),
"parent_error_id": parent_error_id,
"status": status
}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""UPDATE public.errors
SET parent_error_id = %(parent_error_id)s, status = %(status)s
WHERE error_id IN %(error_ids)s OR parent_error_id IN %(error_ids)s;""",
params)
cur.execute(query=query)
# row = cur.fetchone()
return {"data": "success"}

View file

@ -1,48 +0,0 @@
from chalicelib.utils import pg_client
def add_favorite_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""INSERT INTO public.user_favorite_errors(user_id, error_id)
VALUES (%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id})
)
return {"errorId": error_id, "favorite": True}
def remove_favorite_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""DELETE FROM public.user_favorite_errors
WHERE
user_id = %(userId)s
AND error_id = %(error_id)s;""",
{"userId": user_id, "error_id": error_id})
)
return {"errorId": error_id, "favorite": False}
def favorite_error(project_id, user_id, error_id):
exists, favorite = error_exists_and_favorite(user_id=user_id, error_id=error_id)
if not exists:
return {"errors": ["cannot bookmark non-rehydrated errors"]}
if favorite:
return remove_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
return add_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
def error_exists_and_favorite(user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT errors.error_id AS exists, ufe.error_id AS favorite
FROM public.errors
LEFT JOIN (SELECT error_id FROM public.user_favorite_errors WHERE user_id = %(userId)s) AS ufe USING (error_id)
WHERE error_id = %(error_id)s;""",
{"userId": user_id, "error_id": error_id})
)
r = cur.fetchone()
if r is None:
return False, False
return True, r.get("favorite") is not None

View file

@ -1,37 +0,0 @@
from chalicelib.utils import pg_client
def add_viewed_error(project_id, user_id, error_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""INSERT INTO public.user_viewed_errors(user_id, error_id)
VALUES (%(userId)s,%(error_id)s);""",
{"userId": user_id, "error_id": error_id})
)
def viewed_error_exists(user_id, error_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""SELECT
errors.error_id AS hydrated,
COALESCE((SELECT TRUE
FROM public.user_viewed_errors AS ve
WHERE ve.error_id = %(error_id)s
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
FROM public.errors
WHERE error_id = %(error_id)s""",
{"userId": user_id, "error_id": error_id})
cur.execute(
query=query
)
r = cur.fetchone()
if r:
return r.get("viewed")
return True
def viewed_error(project_id, user_id, error_id):
if viewed_error_exists(user_id=user_id, error_id=error_id):
return None
return add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)

View file

@ -403,7 +403,9 @@ WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
{"UNION ALL".join(projection_query)};"""
params = {"project_id": project_id, "startTimestamp": data.startTimestamp,
"endTimestamp": data.endTimestamp, "density": data.density,
"eventThresholdNumberInGroup": 4 if data.hide_excess else 8,
# This is ignored because UI will take care of it
# "eventThresholdNumberInGroup": 4 if data.hide_excess else 8,
"eventThresholdNumberInGroup": 8,
**extra_values}
query = cur.mogrify(pg_query, params)
_now = time()

View file

@ -362,7 +362,9 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
_now = time()
params = {"project_id": project_id, "startTimestamp": data.startTimestamp,
"endTimestamp": data.endTimestamp, "density": data.density,
"eventThresholdNumberInGroup": 4 if data.hide_excess else 8,
# This is ignored because UI will take care of it
# "eventThresholdNumberInGroup": 4 if data.hide_excess else 8,
"eventThresholdNumberInGroup": 8,
**extra_values}
ch_query1 = f"""\

View file

@ -10,7 +10,7 @@ import schemas
from chalicelib.core import scope
from chalicelib.core import assist, signup, feature_flags
from chalicelib.core.metrics import heatmaps
from chalicelib.core.errors import errors_favorite, errors_viewed, errors, errors_details
from chalicelib.core.errors import errors, errors_details
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
sessions_assignments, unprocessed_sessions, sessions_search
from chalicelib.core import tenants, users, projects, license
@ -329,13 +329,10 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
@app.get('/{projectId}/errors/{errorId}', tags=['errors'])
def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
def errors_get_details(projectId: int, errorId: str, density24: int = 24, density30: int = 30,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors_details.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
**{"density24": density24, "density30": density30})
if data.get("data") is not None:
background_tasks.add_task(errors_viewed.viewed_error, project_id=projectId, user_id=context.user_id,
error_id=errorId)
return data
@ -350,22 +347,15 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str,
}
@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"])
def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(),
context: schemas.CurrentContext = Depends(OR_context)):
if action == "favorite":
return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId)
elif action == "sessions":
start_date = startDate
end_date = endDate
return {
"data": errors.get_sessions(project_id=projectId, user_id=context.user_id, error_id=errorId,
start_date=start_date, end_date=end_date)}
elif action in list(errors.ACTION_STATE.keys()):
return errors.change_state(project_id=projectId, user_id=context.user_id, error_id=errorId, action=action)
else:
return {"errors": ["undefined action"]}
@app.get('/{projectId}/errors/{errorId}/sessions', tags=["errors"])
def get_errors_sessions(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(),
context: schemas.CurrentContext = Depends(OR_context)):
start_date = startDate
end_date = endDate
return {
"data": errors.get_sessions(project_id=projectId, user_id=context.user_id, error_id=errorId,
start_date=start_date, end_date=end_date)}
@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"])

2
ee/api/.gitignore vendored
View file

@ -291,6 +291,4 @@ Pipfile.lock
/chalicelib/core/errors/modules/*
/chalicelib/core/errors/errors.py
/chalicelib/core/errors/errors_ch.py
/chalicelib/core/errors/errors_favorite.py
/chalicelib/core/errors/errors_viewed.py
/chalicelib/core/errors/errors_details.py

View file

@ -9,7 +9,7 @@ requests = "==2.32.3"
boto3 = "==1.35.86"
pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["binary", "pool"], version = "==3.2.3"}
psycopg = {extras = ["pool", "binary"], version = "==3.2.3"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
clickhouse-connect = "==0.8.11"
elasticsearch = "==8.17.0"

View file

@ -1,16 +0,0 @@
import logging
from chalicelib.core.errors.errors_viewed import *
from chalicelib.utils import ch_client, exp_ch_helper
_add_viewed_error = add_viewed_error
logger = logging.getLogger(__name__)
def add_viewed_error(project_id, user_id, error_id):
_add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)
with ch_client.ClickHouseClient() as cur:
query = f"""INSERT INTO {exp_ch_helper.get_user_viewed_errors_table()}(project_id,user_id, error_id)
VALUES (%(project_id)s,%(userId)s,%(error_id)s);"""
params = {"userId": user_id, "error_id": error_id, "project_id": project_id}
cur.execute(query=query, params=params)

View file

@ -108,9 +108,7 @@ rm -rf ./chalicelib/core/alerts/alerts_processor.py
rm -rf ./chalicelib/core/alerts/alerts_processor_ch.py
rm -rf ./chalicelib/core/alerts/alerts_listener.py
rm -rf ./chalicelib/core/alerts/modules/helpers.py
rm -rf /chalicelib/core/errors/modules
rm -rf /chalicelib/core/errors/errors.py
rm -rf /chalicelib/core/errors/errors_ch.py
rm -rf /chalicelib/core/errors/errors_favorite.py
rm -rf /chalicelib/core/errors/errors_viewed.py
rm -rf /chalicelib/core/errors/errors_details.py
rm -rf ./chalicelib/core/errors/modules
rm -rf ./chalicelib/core/errors/errors.py
rm -rf ./chalicelib/core/errors/errors_ch.py
rm -rf ./chalicelib/core/errors/errors_details.py

View file

@ -9,7 +9,7 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re
import schemas
from chalicelib.core import scope
from chalicelib.core import assist, signup, feature_flags
from chalicelib.core.errors import errors, errors_viewed, errors_favorite
from chalicelib.core.errors import errors
from chalicelib.core.metrics import heatmaps
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_assignments, \
sessions_viewed, unprocessed_sessions
@ -349,13 +349,10 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
@app.get('/{projectId}/errors/{errorId}', tags=['errors'],
dependencies=[OR_scope(Permissions.DEV_TOOLS, ServicePermissions.DEV_TOOLS)])
def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
def errors_get_details(projectId: int, errorId: str, density24: int = 24, density30: int = 30,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
**{"density24": density24, "density30": density30})
if data.get("data") is not None:
background_tasks.add_task(errors_viewed.viewed_error, project_id=projectId, user_id=context.user_id,
error_id=errorId)
return data
@ -371,22 +368,15 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str,
}
@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], dependencies=[OR_scope(Permissions.DEV_TOOLS)])
def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(),
context: schemas.CurrentContext = Depends(OR_context)):
if action == "favorite":
return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId)
elif action == "sessions":
start_date = startDate
end_date = endDate
return {
"data": errors.get_sessions(project_id=projectId, user_id=context.user_id, error_id=errorId,
start_date=start_date, end_date=end_date)}
elif action in list(errors.ACTION_STATE.keys()):
return errors.change_state(project_id=projectId, user_id=context.user_id, error_id=errorId, action=action)
else:
return {"errors": ["undefined action"]}
@app.get('/{projectId}/errors/{errorId}/sessions', tags=["errors"], dependencies=[OR_scope(Permissions.DEV_TOOLS)])
def get_errors_sessions(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(),
context: schemas.CurrentContext = Depends(OR_context)):
start_date = startDate
end_date = endDate
return {
"data": errors.get_sessions(project_id=projectId, user_id=context.user_id, error_id=errorId,
start_date=start_date, end_date=end_date)}
@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"],

View file

@ -23,6 +23,9 @@ DELETE
FROM public.metrics
WHERE metrics.metric_type = 'insights';
DROP TABLE IF EXISTS public.user_favorite_errors;
DROP TABLE IF EXISTS public.user_viewed_errors;
COMMIT;
\elif :is_next

View file

@ -395,21 +395,6 @@ CREATE INDEX errors_project_id_error_id_integration_idx ON public.errors (projec
CREATE INDEX errors_error_id_idx ON public.errors (error_id);
CREATE INDEX errors_parent_error_id_idx ON public.errors (parent_error_id);
CREATE TABLE public.user_favorite_errors
(
user_id integer NOT NULL REFERENCES public.users (user_id) ON DELETE CASCADE,
error_id text NOT NULL REFERENCES public.errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, error_id)
);
CREATE TABLE public.user_viewed_errors
(
user_id integer NOT NULL REFERENCES public.users (user_id) ON DELETE CASCADE,
error_id text NOT NULL REFERENCES public.errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, error_id)
);
CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id);
CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other');
CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS','AC','AN','BU','CP','CS','CT','DD','DG','DY','EA','FQ','FX','HV','IC','JT','MI','NH','NQ','NT','PC','PU','PZ','RH','SU','TA','TP','VD','WK','YD','YU','ZR');

View file

@ -17,6 +17,22 @@ $$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
CREATE TABLE public.user_favorite_errors
(
user_id integer NOT NULL REFERENCES public.users (user_id) ON DELETE CASCADE,
error_id text NOT NULL REFERENCES public.errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, error_id)
);
CREATE TABLE public.user_viewed_errors
(
user_id integer NOT NULL REFERENCES public.users (user_id) ON DELETE CASCADE,
error_id text NOT NULL REFERENCES public.errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, error_id)
);
CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id);
CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
COMMIT;
\elif :is_next

View file

@ -193,14 +193,3 @@ CREATE TABLE IF NOT EXISTS experimental.ios_events
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
(
project_id UInt16,
user_id UInt32,
error_id String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, error_id)
TTL _timestamp + INTERVAL 3 MONTH;

View file

@ -193,14 +193,3 @@ CREATE TABLE IF NOT EXISTS experimental.ios_events
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
(
project_id UInt16,
user_id UInt32,
error_id String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, user_id, error_id)
TTL _timestamp + INTERVAL 3 MONTH;

View file

@ -23,6 +23,10 @@ DELETE
FROM public.metrics
WHERE metrics.metric_type = 'insights';
DROP TABLE IF EXISTS public.user_favorite_errors;
DROP TABLE IF EXISTS public.user_viewed_errors;
COMMIT;
\elif :is_next

View file

@ -357,22 +357,6 @@ CREATE INDEX errors_project_id_error_id_integration_idx ON public.errors (projec
CREATE INDEX errors_error_id_idx ON public.errors (error_id);
CREATE INDEX errors_parent_error_id_idx ON public.errors (parent_error_id);
CREATE TABLE public.user_favorite_errors
(
user_id integer NOT NULL REFERENCES public.users (user_id) ON DELETE CASCADE,
error_id text NOT NULL REFERENCES public.errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, error_id)
);
CREATE TABLE public.user_viewed_errors
(
user_id integer NOT NULL REFERENCES public.users (user_id) ON DELETE CASCADE,
error_id text NOT NULL REFERENCES public.errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, error_id)
);
CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id);
CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other');
CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS','AC','AN','BU','CP','CS','CT','DD','DG','DY','EA','FQ','FX','HV','IC','JT','MI','NH','NQ','NT','PC','PU','PZ','RH','SU','TA','TP','VD','WK','YD','YU','ZR');

View file

@ -0,0 +1,42 @@
\set previous_version 'v1.22.0'
\set next_version 'v1.21.0'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB downgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
CREATE TABLE public.user_favorite_errors
(
user_id integer NOT NULL REFERENCES public.users (user_id) ON DELETE CASCADE,
error_id text NOT NULL REFERENCES public.errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, error_id)
);
CREATE TABLE public.user_viewed_errors
(
user_id integer NOT NULL REFERENCES public.users (user_id) ON DELETE CASCADE,
error_id text NOT NULL REFERENCES public.errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, error_id)
);
CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id);
CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB downgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif