Api v1.15.0 (#1478)
* refactor(chalice): upgraded dependencies refactor(crons): upgraded dependencies refactor(alerts): upgraded dependencies * fix(chalice): return error when updating inexistant webhook * feat(chalice): fixed delete webhook response * feat(chalice): limit webhooks name length * feat(chalice): upgraded dependencies feat(alerts): upgraded dependencies feat(crons): upgraded dependencies * fix(chalice): remove urllib3 dependency * feat(chalice): remove FOSS to pydantic v2 * fix(chalice): freeze urllib3 to not have conflicts between boto3 and requests * feat(chalice): refactoring schema in progress * feat(chalice): refactoring schema in progress * feat(chalice): refactoring schema in progress * feat(chalice): refactoring schema in progress feat(chalice): upgraded dependencies * feat(chalice): refactored schema * feat(DB): transfer size support * feat(chalice): support service account * feat(chalice): support service account * fix(chalice): fixed refactored PayloadSchema-name * feat(chalice): path analysis * feat(chalice): support service account 1/2 * feat(DB): timezone support * feat(chalice): upgraded dependencies feat(alerts): upgraded dependencies feat(crons): upgraded dependencies feat(assist): upgraded dependencies feat(sourcemaps): upgraded dependencies * feat(chalice): path analysis schema changes * feat(chalice): path analysis query change * feat(chalice): path analysis query change * feat(chalice): ios replay support * feat(chalice): ios replay support * feat(chalice): path analysis changes * feat(DB): ios events * feat(chalice): upgraded dependencies * feat(chalice): simple hide minor paths * feat(chalice): path analysis density * feat(chalice): session's replay ios events * feat(chalice): fixed typo * feat(chalice): support project's platform * feat(DB): support project's platform * feat(chalice): path analysis EE in progress * feat(chalice): project's platform API * feat(chalice): fixed create project * feat(chalice): EE path analysis in progress * feat(chalice): EE path analysis refactor(chalice): support specific database name for clickhouse-client * feat(chalice): upgraded dependencies feat(chalice): path analysis specific event type for startPoint feat(chalice): path analysis specific event type for endPoint feat(chalice): path analysis specific event type for exclude * refactoring(chalice): changed IOS click event type * refactoring(chalice): upgraded dependencies refactoring(alerts): upgraded dependencies refactoring(crons): upgraded dependencies refactoring(peers): upgraded dependencies refactoring(assist): upgraded dependencies refactoring(sourcemaps-reader): upgraded dependencies * refactoring(chalice): upgraded dependencies refactoring(alerts): upgraded dependencies refactoring(crons): upgraded dependencies refactoring(peers): upgraded dependencies refactoring(assist): upgraded dependencies refactoring(sourcemaps-reader): upgraded dependencies * feat(chalice): upgraded dependencies feat(alerts): upgraded dependencies feat(crons): upgraded dependencies * refactoring(chalice): refactored cards refactoring(chalice): upgraded dependencies * feat(chalice): get path-analysis issues list * feat(chalice): changed crash_ios feat(DB): changed crash_ios * fix(chalice): fix crashlooping * feat(chalice): support tap-rage feat(DB): support tap-rage * feat(chalice): Exp search support click-selector feat(DB): CH support click-selector * feat(chalice): refresh token feat(DB): refresh token * feat(chalice): refresh token changes * feat(chalice): fixed authorizer context attribute changes * feat(chalice): fixed refresh token path&age * feat(chalice): fixed refresh token RTR * feat(chalice): EE refresh token feat(DB): EE refresh token * feat(chalice): migrated EE refresh token * feat(chalice): fixed crashing changes * feat(chalice): fixed instant expiration * feat(chalice): fix * feat(chalice): fix * feat(chalice): fix * feat(chalice): refresh token debug * feat(chalice): refresh token debug * feat(chalice): refresh token debug * feat(chalice): fix refresh token path * feat(chalice): refresh token on signup * feat(DB): refresh token
This commit is contained in:
parent
7422a4919f
commit
f9c3204ca1
38 changed files with 1072 additions and 322 deletions
|
|
@ -5,7 +5,7 @@ name = "pypi"
|
|||
|
||||
[packages]
|
||||
requests = "==2.31.0"
|
||||
boto3 = "==1.28.40"
|
||||
boto3 = "==1.28.42"
|
||||
pyjwt = "==2.8.0"
|
||||
psycopg2-binary = "==2.9.7"
|
||||
elasticsearch = "==8.9.0"
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Request
|
||||
|
|
@ -5,8 +6,20 @@ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
|||
from starlette import status
|
||||
from starlette.exceptions import HTTPException
|
||||
|
||||
from chalicelib.core import authorizers, users
|
||||
import schemas
|
||||
from chalicelib.core import authorizers, users
|
||||
|
||||
|
||||
def _get_current_auth_context(request: Request, jwt_payload: dict) -> schemas.CurrentContext:
|
||||
user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1))
|
||||
if user is None:
|
||||
print("JWTAuth: User not found.")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
|
||||
request.state.authorizer_identity = "jwt"
|
||||
request.state.currentContext = schemas.CurrentContext(tenantId=jwt_payload.get("tenantId", -1),
|
||||
userId=jwt_payload.get("userId", -1),
|
||||
email=user["email"])
|
||||
return request.state.currentContext
|
||||
|
||||
|
||||
class JWTAuth(HTTPBearer):
|
||||
|
|
@ -14,40 +27,55 @@ class JWTAuth(HTTPBearer):
|
|||
super(JWTAuth, self).__init__(auto_error=auto_error)
|
||||
|
||||
async def __call__(self, request: Request) -> Optional[schemas.CurrentContext]:
|
||||
credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request)
|
||||
if credentials:
|
||||
if not credentials.scheme == "Bearer":
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.")
|
||||
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
|
||||
auth_exists = jwt_payload is not None \
|
||||
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
tenant_id=jwt_payload.get("tenantId", -1),
|
||||
jwt_iat=jwt_payload.get("iat", 100),
|
||||
jwt_aud=jwt_payload.get("aud", ""))
|
||||
if jwt_payload is None \
|
||||
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
|
||||
or not auth_exists:
|
||||
if jwt_payload is not None:
|
||||
print(jwt_payload)
|
||||
if jwt_payload.get("iat") is None:
|
||||
print("JWTAuth: iat is None")
|
||||
if jwt_payload.get("aud") is None:
|
||||
print("JWTAuth: aud is None")
|
||||
if not auth_exists:
|
||||
print("JWTAuth: not users.auth_exists")
|
||||
|
||||
if request.url.path in ["/refresh", "/api/refresh"]:
|
||||
refresh_token = request.cookies.get("refreshToken")
|
||||
jwt_payload = authorizers.jwt_refresh_authorizer(scheme="Bearer", token=refresh_token)
|
||||
if jwt_payload is None or jwt_payload.get("jti") is None:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
|
||||
user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1))
|
||||
if user is None:
|
||||
print("JWTAuth: User not found.")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
|
||||
jwt_payload["authorizer_identity"] = "jwt"
|
||||
request.state.authorizer_identity = "jwt"
|
||||
request.state.currentContext = schemas.CurrentContext(tenantId=jwt_payload.get("tenantId", -1),
|
||||
userId=jwt_payload.get("userId", -1),
|
||||
email=user["email"])
|
||||
return request.state.currentContext
|
||||
auth_exists = users.refresh_auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
jwt_jti=jwt_payload["jti"])
|
||||
if not auth_exists:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
|
||||
|
||||
credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request)
|
||||
if credentials:
|
||||
if not credentials.scheme == "Bearer":
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid authentication scheme.")
|
||||
old_jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials,
|
||||
leeway=datetime.timedelta(days=3))
|
||||
if old_jwt_payload is None \
|
||||
or old_jwt_payload.get("userId") is None \
|
||||
or old_jwt_payload.get("userId") != jwt_payload.get("userId"):
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
|
||||
|
||||
return _get_current_auth_context(request=request, jwt_payload=jwt_payload)
|
||||
|
||||
else:
|
||||
print("JWTAuth: Invalid authorization code.")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authorization code.")
|
||||
credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request)
|
||||
if credentials:
|
||||
if not credentials.scheme == "Bearer":
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid authentication scheme.")
|
||||
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
|
||||
auth_exists = jwt_payload is not None \
|
||||
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
jwt_iat=jwt_payload.get("iat", 100))
|
||||
if jwt_payload is None \
|
||||
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
|
||||
or not auth_exists:
|
||||
if jwt_payload is not None:
|
||||
print(jwt_payload)
|
||||
if jwt_payload.get("iat") is None:
|
||||
print("JWTAuth: iat is None")
|
||||
if jwt_payload.get("aud") is None:
|
||||
print("JWTAuth: aud is None")
|
||||
if not auth_exists:
|
||||
print("JWTAuth: not users.auth_exists")
|
||||
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
|
||||
|
||||
return _get_current_auth_context(request=request, jwt_payload=jwt_payload)
|
||||
|
||||
print("JWTAuth: Invalid authorization code.")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authorization code.")
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from chalicelib.core import tenants
|
|||
from chalicelib.core import users
|
||||
|
||||
|
||||
def jwt_authorizer(scheme: str, token: str):
|
||||
def jwt_authorizer(scheme: str, token: str, leeway=0):
|
||||
if scheme.lower() != "bearer":
|
||||
return None
|
||||
try:
|
||||
|
|
@ -14,7 +14,8 @@ def jwt_authorizer(scheme: str, token: str):
|
|||
token,
|
||||
config("jwt_secret"),
|
||||
algorithms=config("jwt_algorithm"),
|
||||
audience=[f"front:{helper.get_stage_name()}"]
|
||||
audience=[f"front:{helper.get_stage_name()}"],
|
||||
leeway=leeway
|
||||
)
|
||||
except jwt.ExpiredSignatureError:
|
||||
print("! JWT Expired signature")
|
||||
|
|
@ -26,6 +27,26 @@ def jwt_authorizer(scheme: str, token: str):
|
|||
return payload
|
||||
|
||||
|
||||
def jwt_refresh_authorizer(scheme: str, token: str):
|
||||
if scheme.lower() != "bearer":
|
||||
return None
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
config("JWT_REFRESH_SECRET"),
|
||||
algorithms=config("jwt_algorithm"),
|
||||
audience=[f"front:{helper.get_stage_name()}"]
|
||||
)
|
||||
except jwt.ExpiredSignatureError:
|
||||
print("! JWT-refresh Expired signature")
|
||||
return None
|
||||
except BaseException as e:
|
||||
print("! JWT-refresh Base Exception")
|
||||
print(e)
|
||||
return None
|
||||
return payload
|
||||
|
||||
|
||||
def jwt_context(context):
|
||||
user = users.get(user_id=context["userId"], tenant_id=context["tenantId"])
|
||||
if user is None:
|
||||
|
|
@ -37,18 +58,14 @@ def jwt_context(context):
|
|||
}
|
||||
|
||||
|
||||
def get_jwt_exp(iat):
|
||||
return iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000
|
||||
|
||||
|
||||
def generate_jwt(id, tenant_id, iat, aud):
|
||||
def generate_jwt(user_id, tenant_id, iat, aud):
|
||||
token = jwt.encode(
|
||||
payload={
|
||||
"userId": id,
|
||||
"userId": user_id,
|
||||
"tenantId": tenant_id,
|
||||
"exp": get_jwt_exp(iat),
|
||||
"exp": iat + config("JWT_EXPIRATION", cast=int),
|
||||
"iss": config("JWT_ISSUER"),
|
||||
"iat": iat // 1000,
|
||||
"iat": iat,
|
||||
"aud": aud
|
||||
},
|
||||
key=config("jwt_secret"),
|
||||
|
|
@ -57,6 +74,23 @@ def generate_jwt(id, tenant_id, iat, aud):
|
|||
return token
|
||||
|
||||
|
||||
def generate_jwt_refresh(user_id, tenant_id, iat, aud, jwt_jti):
|
||||
token = jwt.encode(
|
||||
payload={
|
||||
"userId": user_id,
|
||||
"tenantId": tenant_id,
|
||||
"exp": iat + config("JWT_REFRESH_EXPIRATION", cast=int),
|
||||
"iss": config("JWT_ISSUER"),
|
||||
"iat": iat,
|
||||
"aud": aud,
|
||||
"jti": jwt_jti
|
||||
},
|
||||
key=config("JWT_REFRESH_SECRET"),
|
||||
algorithm=config("jwt_algorithm")
|
||||
)
|
||||
return token
|
||||
|
||||
|
||||
def api_key_authorizer(token):
|
||||
t = tenants.get_by_api_key(token)
|
||||
if t is not None:
|
||||
|
|
|
|||
|
|
@ -230,8 +230,8 @@ def __search_errors_ios(project_id, value, key=None, source=None):
|
|||
if len(value) > 2:
|
||||
query = f"""(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{events.EventType.ERROR_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -240,8 +240,8 @@ def __search_errors_ios(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{events.EventType.ERROR_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -250,8 +250,8 @@ def __search_errors_ios(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{events.EventType.ERROR_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -260,8 +260,8 @@ def __search_errors_ios(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{events.EventType.ERROR_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -270,8 +270,8 @@ def __search_errors_ios(project_id, value, key=None, source=None):
|
|||
else:
|
||||
query = f"""(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{events.EventType.ERROR_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
@ -280,8 +280,8 @@ def __search_errors_ios(project_id, value, key=None, source=None):
|
|||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{events.EventType.ERROR_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
'{events.EventType.CRASH_IOS.ui_type}' AS type
|
||||
FROM {events.EventType.CRASH_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ PIE_CHART_GROUP = 5
|
|||
# TODO: refactor this to split
|
||||
# timeseries /
|
||||
# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs
|
||||
# remove "table of" calls from this function
|
||||
def __try_live(project_id, data: schemas.CardSchema):
|
||||
results = []
|
||||
for i, s in enumerate(data.series):
|
||||
|
|
@ -45,8 +46,13 @@ def __try_live(project_id, data: schemas.CardSchema):
|
|||
return results
|
||||
|
||||
|
||||
def __is_funnel_chart(data: schemas.CardSchema):
|
||||
return data.metric_type == schemas.MetricType.funnel
|
||||
def __get_table_of_series(project_id, data: schemas.CardSchema):
|
||||
results = []
|
||||
for i, s in enumerate(data.series):
|
||||
results.append(sessions.search2_table(data=s.filter, project_id=project_id, density=data.density,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def __get_funnel_chart(project_id: int, data: schemas.CardFunnel, user_id: int = None):
|
||||
|
|
@ -58,11 +64,6 @@ def __get_funnel_chart(project_id: int, data: schemas.CardFunnel, user_id: int =
|
|||
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
|
||||
|
||||
|
||||
def __is_errors_list(data: schemas.CardSchema):
|
||||
return data.metric_type == schemas.MetricType.table \
|
||||
and data.metric_of == schemas.MetricOfTable.errors
|
||||
|
||||
|
||||
def __get_errors_list(project_id, user_id, data: schemas.CardSchema):
|
||||
if len(data.series) == 0:
|
||||
return {
|
||||
|
|
@ -72,11 +73,6 @@ def __get_errors_list(project_id, user_id, data: schemas.CardSchema):
|
|||
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def __is_sessions_list(data: schemas.CardSchema):
|
||||
return data.metric_type == schemas.MetricType.table \
|
||||
and data.metric_of == schemas.MetricOfTable.sessions
|
||||
|
||||
|
||||
def __get_sessions_list(project_id, user_id, data: schemas.CardSchema):
|
||||
if len(data.series) == 0:
|
||||
print("empty series")
|
||||
|
|
@ -87,10 +83,6 @@ def __get_sessions_list(project_id, user_id, data: schemas.CardSchema):
|
|||
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def __is_predefined(data: schemas.CardSchema):
|
||||
return data.is_template
|
||||
|
||||
|
||||
def __is_click_map(data: schemas.CardSchema):
|
||||
return data.metric_type == schemas.MetricType.click_map
|
||||
|
||||
|
|
@ -115,10 +107,6 @@ def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardP
|
|||
selected_event_type=data.metric_value, hide_minor_paths=data.hide_excess)
|
||||
|
||||
|
||||
def __is_path_analysis(data: schemas.CardSchema):
|
||||
return data.metric_type == schemas.MetricType.pathAnalysis
|
||||
|
||||
|
||||
def __get_timeseries_chart(project_id: int, data: schemas.CardTimeSeries, user_id: int = None):
|
||||
series_charts = __try_live(project_id=project_id, data=data)
|
||||
if data.view_type == schemas.MetricTimeseriesViewType.progress:
|
||||
|
|
@ -131,13 +119,12 @@ def __get_timeseries_chart(project_id: int, data: schemas.CardTimeSeries, user_i
|
|||
return results
|
||||
|
||||
|
||||
def empty(**args):
|
||||
def not_supported(**args):
|
||||
raise Exception("not supported")
|
||||
|
||||
|
||||
def __get_table_of_user_ids(project_id: int, data: schemas.CardTable, user_id: int = None):
|
||||
series_charts = __try_live(project_id=project_id, data=data)
|
||||
return series_charts
|
||||
return __get_table_of_series(project_id=project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_sessions(project_id: int, data: schemas.CardTable, user_id):
|
||||
|
|
@ -149,23 +136,23 @@ def __get_table_of_errors(project_id: int, data: schemas.CardTable, user_id: int
|
|||
|
||||
|
||||
def __get_table_of_issues(project_id: int, data: schemas.CardTable, user_id: int = None):
|
||||
return __try_live(project_id=project_id, data=data)
|
||||
return __get_table_of_series(project_id=project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_browsers(project_id: int, data: schemas.CardTable, user_id: int = None):
|
||||
return __try_live(project_id=project_id, data=data)
|
||||
return __get_table_of_series(project_id=project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_devises(project_id: int, data: schemas.CardTable, user_id: int = None):
|
||||
return __try_live(project_id=project_id, data=data)
|
||||
return __get_table_of_series(project_id=project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_countries(project_id: int, data: schemas.CardTable, user_id: int = None):
|
||||
return __try_live(project_id=project_id, data=data)
|
||||
return __get_table_of_series(project_id=project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = None):
|
||||
return __try_live(project_id=project_id, data=data)
|
||||
return __get_table_of_series(project_id=project_id, data=data)
|
||||
|
||||
|
||||
def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int):
|
||||
|
|
@ -179,7 +166,7 @@ def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int):
|
|||
schemas.MetricOfTable.user_country: __get_table_of_countries,
|
||||
schemas.MetricOfTable.visited_url: __get_table_of_urls,
|
||||
}
|
||||
return supported.get(data.metric_of, empty)(project_id=project_id, data=data, user_id=user_id)
|
||||
return supported.get(data.metric_of, not_supported)(project_id=project_id, data=data, user_id=user_id)
|
||||
|
||||
|
||||
def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
|
||||
|
|
@ -191,48 +178,10 @@ def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
|
|||
schemas.MetricType.table: __get_table_chart,
|
||||
schemas.MetricType.click_map: __get_click_map_chart,
|
||||
schemas.MetricType.funnel: __get_funnel_chart,
|
||||
schemas.MetricType.insights: empty,
|
||||
schemas.MetricType.insights: not_supported,
|
||||
schemas.MetricType.pathAnalysis: __get_path_analysis_chart
|
||||
}
|
||||
return supported.get(data.metric_type, empty)(project_id=project_id, data=data, user_id=user_id)
|
||||
|
||||
|
||||
def merged_live(project_id, data: schemas.CardSchema, user_id=None):
|
||||
return get_chart(project_id=project_id, data=data, user_id=user_id)
|
||||
print("---1")
|
||||
if data.is_template:
|
||||
print("---2")
|
||||
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump())
|
||||
elif __is_funnel_chart(data):
|
||||
print("---3")
|
||||
return __get_funnel_chart(project_id=project_id, data=data)
|
||||
elif __is_errors_list(data):
|
||||
print("---4")
|
||||
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
|
||||
elif __is_sessions_list(data):
|
||||
print("---5")
|
||||
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
|
||||
elif __is_click_map(data):
|
||||
print("---6")
|
||||
return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data)
|
||||
elif __is_path_analysis(data):
|
||||
print("---7")
|
||||
return __get_path_analysis_chart(project_id=project_id, data=data)
|
||||
elif len(data.series) == 0:
|
||||
print("---8")
|
||||
return []
|
||||
series_charts = __try_live(project_id=project_id, data=data)
|
||||
print("---9")
|
||||
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
|
||||
print("---10")
|
||||
return series_charts
|
||||
results = [{}] * len(series_charts[0])
|
||||
print("---11")
|
||||
for i in range(len(results)):
|
||||
for j, series_chart in enumerate(series_charts):
|
||||
results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
|
||||
data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]}
|
||||
return results
|
||||
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
|
||||
|
||||
|
||||
def __merge_metric_with_data(metric: schemas.CardSchema,
|
||||
|
|
@ -260,10 +209,10 @@ def make_chart(project_id, user_id, data: schemas.CardSessionsSchema, metric: sc
|
|||
return None
|
||||
metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
|
||||
return merged_live(project_id=project_id, data=metric, user_id=user_id)
|
||||
return get_chart(project_id=project_id, data=metric, user_id=user_id)
|
||||
|
||||
|
||||
def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
# raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False, include_data=True)
|
||||
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if raw_metric is None:
|
||||
|
|
@ -317,7 +266,7 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSc
|
|||
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
|
||||
|
||||
|
||||
def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
|
||||
def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
|
||||
results = []
|
||||
if len(data.series) == 0:
|
||||
return results
|
||||
|
|
@ -332,6 +281,58 @@ def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
|
|||
return results
|
||||
|
||||
|
||||
def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel):
|
||||
if len(data.series) == 0:
|
||||
return {"data": []}
|
||||
data.series[0].filter.startTimestamp = data.startTimestamp
|
||||
data.series[0].filter.endTimestamp = data.endTimestamp
|
||||
data = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
|
||||
return {"data": data}
|
||||
|
||||
|
||||
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
|
||||
if len(data.series) == 0:
|
||||
return {"data": []}
|
||||
filters = []
|
||||
print(data.series[0].filter.filters)
|
||||
for f in data.series[0].filter.filters:
|
||||
if schemas.ProductAnalyticsFilterType.has_value(f.type):
|
||||
for sf in f.filters:
|
||||
o = sf.model_dump()
|
||||
o["isEvent"] = True
|
||||
if f.type == schemas.ProductAnalyticsFilterType.exclude:
|
||||
o["operator"] = "notOn"
|
||||
filters.append(o)
|
||||
else:
|
||||
o = f.model_dump()
|
||||
o["isEvent"] = False
|
||||
filters.append(o)
|
||||
return __get_table_of_issues(project_id=project_id, user_id=user_id,
|
||||
data=schemas.CardTable(
|
||||
startTimestamp=data.startTimestamp,
|
||||
endTimestamp=data.endTimestamp,
|
||||
metricType=schemas.MetricType.table,
|
||||
metricOf=schemas.MetricOfTable.issues,
|
||||
viewType=schemas.MetricTableViewType.table,
|
||||
series=[{"filter": {"filters": filters}}]))
|
||||
|
||||
|
||||
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):
|
||||
if data.is_template:
|
||||
return not_supported()
|
||||
if data.metric_of == schemas.MetricOfTable.issues:
|
||||
return __get_table_of_issues(project_id=project_id, user_id=user_id, data=data)
|
||||
supported = {
|
||||
schemas.MetricType.timeseries: not_supported,
|
||||
schemas.MetricType.table: not_supported,
|
||||
schemas.MetricType.click_map: not_supported,
|
||||
schemas.MetricType.funnel: __get_funnel_issues,
|
||||
schemas.MetricType.insights: not_supported,
|
||||
schemas.MetricType.pathAnalysis: __get_path_analysis_issues,
|
||||
}
|
||||
return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id)
|
||||
|
||||
|
||||
def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
session_data = None
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ class EventType:
|
|||
SWIPE_IOS = Event(ui_type=schemas.EventType.swipe_ios, table="events_ios.swipes", column="label")
|
||||
CUSTOM_IOS = Event(ui_type=schemas.EventType.custom_ios, table="events_common.customs", column="name")
|
||||
REQUEST_IOS = Event(ui_type=schemas.EventType.request_ios, table="events_common.requests", column="path")
|
||||
ERROR_IOS = Event(ui_type=schemas.EventType.error_ios, table="events_ios.crashes",
|
||||
CRASH_IOS = Event(ui_type=schemas.EventType.error_ios, table="events_common.crashes",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
|
||||
|
||||
|
|
@ -159,7 +159,7 @@ SUPPORTED_TYPES = {
|
|||
EventType.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(EventType.REQUEST_IOS),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=EventType.REQUEST_IOS.ui_type)),
|
||||
EventType.ERROR_IOS.ui_type: SupportedFilter(get=autocomplete.__search_errors_ios,
|
||||
EventType.CRASH_IOS.ui_type: SupportedFilter(get=autocomplete.__search_errors_ios,
|
||||
query=None),
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -58,7 +58,9 @@ def get_crashes_by_session_id(session_id):
|
|||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""
|
||||
SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time
|
||||
FROM {events.EventType.ERROR_IOS.table} AS cr INNER JOIN public.crashes_ios AS uc USING (crash_id) INNER JOIN public.sessions AS s USING (session_id)
|
||||
FROM {events.EventType.CRASH_IOS.table} AS cr
|
||||
INNER JOIN public.crashes_ios AS uc USING (crash_ios_id)
|
||||
INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE
|
||||
cr.session_id = %(session_id)s
|
||||
ORDER BY timestamp;""", {"session_id": session_id}))
|
||||
|
|
|
|||
|
|
@ -156,7 +156,7 @@ def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schema
|
|||
"""
|
||||
|
||||
if variants_len > 0:
|
||||
variants_query = f"""{conditions_len > 0 and "," or ""}
|
||||
variants_query = f""",
|
||||
inserted_variants AS (
|
||||
INSERT INTO feature_flags_variants(feature_flag_id, value, description, rollout_percentage, payload)
|
||||
VALUES {",".join([f"((SELECT feature_flag_id FROM inserted_flag),"
|
||||
|
|
@ -455,8 +455,7 @@ def create_variants(feature_flag_id: int, variants: List[schemas.FeatureFlagVari
|
|||
"""
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
params = [(feature_flag_id, v.value, v.description, json.dumps(v.payload), v.rollout_percentage) for v in
|
||||
variants]
|
||||
params = [(feature_flag_id, v.value, v.description, json.dumps(v.payload), v.rollout_percentage) for v in variants]
|
||||
query = cur.mogrify(sql, params)
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
|
|
|
|||
|
|
@ -55,7 +55,6 @@ def __transform_journey2(rows, reverse_path=False):
|
|||
"links": sorted(links, key=lambda x: x["value"], reverse=True)}
|
||||
|
||||
|
||||
JOURNEY_DEPTH = 5
|
||||
JOURNEY_TYPES = {
|
||||
schemas.ProductAnalyticsSelectedEventType.location: {"table": "events.pages", "column": "path"},
|
||||
schemas.ProductAnalyticsSelectedEventType.click: {"table": "events.clicks", "column": "label"},
|
||||
|
|
@ -348,9 +347,8 @@ FROM limited_events
|
|||
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, sessions_count
|
||||
ORDER BY event_number_in_session, e_value, next_value;"""
|
||||
params = {"project_id": project_id, "startTimestamp": data.startTimestamp,
|
||||
"endTimestamp": data.endTimestamp, "JOURNEY_DEPTH": JOURNEY_DEPTH,
|
||||
"endTimestamp": data.endTimestamp, "density": density,
|
||||
"eventThresholdNumberInGroup": 8 if hide_minor_paths else 6,
|
||||
"density": density,
|
||||
# TODO: add if data=args is required
|
||||
# **__get_constraint_values(args),
|
||||
**extra_values}
|
||||
|
|
@ -365,7 +363,6 @@ ORDER BY event_number_in_session, e_value, next_value;"""
|
|||
print("----------------------")
|
||||
rows = cur.fetchall()
|
||||
|
||||
# return __transform_journey(rows)
|
||||
return __transform_journey2(rows=rows, reverse_path=reverse)
|
||||
|
||||
#
|
||||
|
|
|
|||
|
|
@ -41,12 +41,12 @@ def __update(tenant_id, project_id, changes):
|
|||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def __create(tenant_id, name):
|
||||
def __create(tenant_id, data):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""INSERT INTO public.projects (name, active)
|
||||
VALUES (%(name)s,TRUE)
|
||||
query = cur.mogrify(f"""INSERT INTO public.projects (name, platform, active)
|
||||
VALUES (%(name)s,%(platform)s,TRUE)
|
||||
RETURNING project_id;""",
|
||||
{"name": name})
|
||||
data)
|
||||
cur.execute(query=query)
|
||||
project_id = cur.fetchone()["project_id"]
|
||||
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
|
||||
|
|
@ -160,7 +160,7 @@ def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authoriza
|
|||
admin = users.get(user_id=user_id, tenant_id=tenant_id)
|
||||
if not admin["admin"] and not admin["superAdmin"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
return {"data": __create(tenant_id=tenant_id, name=data.name)}
|
||||
return {"data": __create(tenant_id=tenant_id, data=data.model_dump())}
|
||||
|
||||
|
||||
def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
|
||||
|
|
@ -170,7 +170,7 @@ def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
|
|||
if not admin["admin"] and not admin["superAdmin"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
return {"data": __update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"name": data.name})}
|
||||
changes=data.model_dump())}
|
||||
|
||||
|
||||
def delete(tenant_id, user_id, project_id):
|
||||
|
|
@ -253,7 +253,7 @@ def get_capture_status(project_id):
|
|||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def update_capture_status(project_id, changes:schemas.SampleRateSchema):
|
||||
def update_capture_status(project_id, changes: schemas.SampleRateSchema):
|
||||
sample_rate = changes.rate
|
||||
if changes.capture_all:
|
||||
sample_rate = 100
|
||||
|
|
|
|||
|
|
@ -161,6 +161,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
}
|
||||
|
||||
|
||||
# TODO: remove "table of" search from this function
|
||||
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
|
||||
view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType,
|
||||
metric_of: schemas.MetricOfTable, metric_value: List):
|
||||
|
|
@ -212,6 +213,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
else:
|
||||
sessions = cur.fetchone()["count"]
|
||||
elif metric_type == schemas.MetricType.table:
|
||||
print(">>>>>>>>>>>>>TABLE")
|
||||
if isinstance(metric_of, schemas.MetricOfTable):
|
||||
main_col = "user_id"
|
||||
extra_col = ""
|
||||
|
|
@ -239,7 +241,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
extra_col = ", path"
|
||||
distinct_on += ",path"
|
||||
main_query = cur.mogrify(f"""{pre_query}
|
||||
SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS values
|
||||
SELECT COUNT(*) AS count,
|
||||
SUM(users_sessions.session_count) AS total_sessions,
|
||||
COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS values
|
||||
FROM (SELECT {main_col} AS name,
|
||||
count(DISTINCT session_id) AS session_count,
|
||||
ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn
|
||||
|
|
@ -259,10 +263,81 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
# print(main_query)
|
||||
# print("--------------------")
|
||||
cur.execute(main_query)
|
||||
sessions = cur.fetchone()
|
||||
sessions = helper.dict_to_camel_case(cur.fetchone())
|
||||
for s in sessions["values"]:
|
||||
s.pop("rn")
|
||||
sessions["values"] = helper.list_to_camel_case(sessions["values"])
|
||||
|
||||
return sessions
|
||||
|
||||
|
||||
def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
|
||||
metric_of: schemas.MetricOfTable, metric_value: List):
|
||||
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
|
||||
density=density, factor=1, decimal=True))
|
||||
extra_event = None
|
||||
if metric_of == schemas.MetricOfTable.visited_url:
|
||||
extra_event = "events.pages"
|
||||
elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0:
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
|
||||
operator=schemas.SearchEventOperator._is))
|
||||
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
|
||||
favorite_only=False, issue=None, project_id=project_id,
|
||||
user_id=None, extra_event=extra_event)
|
||||
full_args["step_size"] = step_size
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if isinstance(metric_of, schemas.MetricOfTable):
|
||||
main_col = "user_id"
|
||||
extra_col = ""
|
||||
extra_where = ""
|
||||
pre_query = ""
|
||||
distinct_on = "s.session_id"
|
||||
if metric_of == schemas.MetricOfTable.user_country:
|
||||
main_col = "user_country"
|
||||
elif metric_of == schemas.MetricOfTable.user_device:
|
||||
main_col = "user_device"
|
||||
elif metric_of == schemas.MetricOfTable.user_browser:
|
||||
main_col = "user_browser"
|
||||
elif metric_of == schemas.MetricOfTable.issues:
|
||||
main_col = "issue"
|
||||
extra_col = f", UNNEST(s.issue_types) AS {main_col}"
|
||||
if len(metric_value) > 0:
|
||||
extra_where = []
|
||||
for i in range(len(metric_value)):
|
||||
arg_name = f"selected_issue_{i}"
|
||||
extra_where.append(f"{main_col} = %({arg_name})s")
|
||||
full_args[arg_name] = metric_value[i]
|
||||
extra_where = f"WHERE ({' OR '.join(extra_where)})"
|
||||
elif metric_of == schemas.MetricOfTable.visited_url:
|
||||
main_col = "path"
|
||||
extra_col = ", path"
|
||||
distinct_on += ",path"
|
||||
main_query = cur.mogrify(f"""{pre_query}
|
||||
SELECT COUNT(*) AS count,
|
||||
COALESCE(SUM(users_sessions.session_count),0) AS total_sessions,
|
||||
COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS values
|
||||
FROM (SELECT {main_col} AS name,
|
||||
count(DISTINCT session_id) AS session_count,
|
||||
ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn
|
||||
FROM (SELECT *
|
||||
FROM (SELECT DISTINCT ON({distinct_on}) s.session_id, s.user_uuid,
|
||||
s.user_id, s.user_os,
|
||||
s.user_browser, s.user_device,
|
||||
s.user_device_type, s.user_country, s.issue_types{extra_col}
|
||||
{query_part}
|
||||
ORDER BY s.session_id desc) AS filtred_sessions
|
||||
) AS full_sessions
|
||||
{extra_where}
|
||||
GROUP BY {main_col}
|
||||
ORDER BY session_count DESC) AS users_sessions;""",
|
||||
full_args)
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
# print("--------------------")
|
||||
cur.execute(main_query)
|
||||
sessions = helper.dict_to_camel_case(cur.fetchone())
|
||||
for s in sessions["values"]:
|
||||
s.pop("rn")
|
||||
# sessions["values"] = helper.list_to_camel_case(sessions["values"])
|
||||
|
||||
return sessions
|
||||
|
||||
|
|
@ -671,8 +746,8 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.REQUEST_IOS.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == events.EventType.ERROR_IOS.ui_type:
|
||||
event_from = event_from % f"{events.EventType.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)"
|
||||
elif event_type == events.EventType.CRASH_IOS.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CRASH_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)"
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)",
|
||||
|
|
|
|||
|
|
@ -108,6 +108,8 @@ def create_tenant(data: schemas.UserSignupSchema):
|
|||
}
|
||||
return {
|
||||
'jwt': r.pop('jwt'),
|
||||
'refreshToken': r.pop('refreshToken'),
|
||||
'refreshTokenMaxAge': r.pop('refreshTokenMaxAge'),
|
||||
'data': {
|
||||
"user": r,
|
||||
"client": c,
|
||||
|
|
|
|||
|
|
@ -593,35 +593,69 @@ def get_by_invitation_token(token, pass_token=None):
|
|||
return helper.dict_to_camel_case(r)
|
||||
|
||||
|
||||
def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
|
||||
def auth_exists(user_id, jwt_iat):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""SELECT user_id,jwt_iat, changed_at
|
||||
FROM public.users
|
||||
INNER JOIN public.basic_authentication USING(user_id)
|
||||
cur.mogrify(f"""SELECT user_id, EXTRACT(epoch FROM jwt_iat)::BIGINT AS jwt_iat
|
||||
FROM public.users
|
||||
WHERE user_id = %(userId)s
|
||||
AND deleted_at IS NULL
|
||||
AND deleted_at IS NULL
|
||||
LIMIT 1;""",
|
||||
{"userId": user_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return r is not None \
|
||||
and r.get("jwt_iat") is not None \
|
||||
and abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1
|
||||
and abs(jwt_iat - r["jwt_iat"]) <= 1
|
||||
|
||||
|
||||
def change_jwt_iat(user_id):
|
||||
def refresh_auth_exists(user_id, jwt_jti=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""SELECT user_id
|
||||
FROM public.users
|
||||
WHERE user_id = %(userId)s
|
||||
AND deleted_at IS NULL
|
||||
AND jwt_refresh_jti = %(jwt_jti)s
|
||||
LIMIT 1;""",
|
||||
{"userId": user_id, "jwt_jti": jwt_jti})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return r is not None
|
||||
|
||||
|
||||
def change_jwt_iat_jti(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""UPDATE public.users
|
||||
SET jwt_iat = timezone('utc'::text, now())
|
||||
SET jwt_iat = timezone('utc'::text, now()-INTERVAL '2s'),
|
||||
jwt_refresh_jti = 0,
|
||||
jwt_refresh_iat = timezone('utc'::text, now()-INTERVAL '2s')
|
||||
WHERE user_id = %(user_id)s
|
||||
RETURNING jwt_iat;""",
|
||||
RETURNING EXTRACT (epoch FROM jwt_iat)::BIGINT AS jwt_iat,
|
||||
jwt_refresh_jti,
|
||||
EXTRACT (epoch FROM jwt_refresh_iat)::BIGINT AS jwt_refresh_iat;""",
|
||||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
return cur.fetchone().get("jwt_iat")
|
||||
row = cur.fetchone()
|
||||
return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat")
|
||||
|
||||
|
||||
def authenticate(email, password, for_change_password=False) -> dict | None:
|
||||
def refresh_jwt_iat_jti(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""UPDATE public.users
|
||||
SET jwt_iat = timezone('utc'::text, now()-INTERVAL '2s'),
|
||||
jwt_refresh_jti = jwt_refresh_jti + 1
|
||||
WHERE user_id = %(user_id)s
|
||||
RETURNING EXTRACT (epoch FROM jwt_iat)::BIGINT AS jwt_iat,
|
||||
jwt_refresh_jti,
|
||||
EXTRACT (epoch FROM jwt_refresh_iat)::BIGINT AS jwt_refresh_iat""",
|
||||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat")
|
||||
|
||||
|
||||
def authenticate(email, password, for_change_password=False) -> dict | bool | None:
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT
|
||||
|
|
@ -646,17 +680,42 @@ def authenticate(email, password, for_change_password=False) -> dict | None:
|
|||
if for_change_password:
|
||||
return True
|
||||
r = helper.dict_to_camel_case(r)
|
||||
jwt_iat = change_jwt_iat(r['userId'])
|
||||
iat = TimeUTC.datetime_to_timestamp(jwt_iat)
|
||||
jwt_iat, jwt_r_jti, jwt_r_iat = change_jwt_iat_jti(user_id=r['userId'])
|
||||
return {
|
||||
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], iat=iat,
|
||||
"jwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'], iat=jwt_iat,
|
||||
aud=f"front:{helper.get_stage_name()}"),
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'], tenant_id=r['tenantId'],
|
||||
iat=jwt_r_iat, aud=f"front:{helper.get_stage_name()}",
|
||||
jwt_jti=jwt_r_jti),
|
||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int),
|
||||
"email": email,
|
||||
**r
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
def logout(user_id: int):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""UPDATE public.users
|
||||
SET jwt_iat = NULL, jwt_refresh_jti = NULL, jwt_refresh_iat = NULL
|
||||
WHERE user_id = %(user_id)s;""",
|
||||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
|
||||
|
||||
def refresh(user_id: int, tenant_id: int = -1) -> dict:
|
||||
jwt_iat, jwt_r_jti, jwt_r_iat = refresh_jwt_iat_jti(user_id=user_id)
|
||||
return {
|
||||
"jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=jwt_iat,
|
||||
aud=f"front:{helper.get_stage_name()}"),
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=user_id, tenant_id=tenant_id,
|
||||
iat=jwt_r_iat, aud=f"front:{helper.get_stage_name()}",
|
||||
jwt_jti=jwt_r_jti),
|
||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int) - (jwt_iat - jwt_r_iat)
|
||||
}
|
||||
|
||||
|
||||
def get_user_role(tenant_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
|
|
|
|||
|
|
@ -19,9 +19,11 @@ change_password_link=/reset-password?invitation=%s&&pass=%s
|
|||
invitation_link=/api/users/invitation?token=%s
|
||||
js_cache_bucket=sessions-assets
|
||||
jwt_algorithm=HS512
|
||||
JWT_EXPIRATION=2592000
|
||||
JWT_EXPIRATION=120
|
||||
JWT_REFRESH_EXPIRATION=604800
|
||||
JWT_ISSUER=openreplay-oss
|
||||
jwt_secret="SET A RANDOM STRING HERE"
|
||||
JWT_REFRESH_SECRET="SET A RANDOM STRING HERE"
|
||||
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
|
||||
ASSIST_KEY=
|
||||
assist=/sockets-live
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
requests==2.31.0
|
||||
boto3==1.28.41
|
||||
boto3==1.28.42
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.7
|
||||
elasticsearch==8.9.0
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
requests==2.31.0
|
||||
boto3==1.28.41
|
||||
boto3==1.28.42
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.7
|
||||
elasticsearch==8.9.0
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from typing import Optional, Union
|
|||
from decouple import config
|
||||
from fastapi import Body, Depends, BackgroundTasks
|
||||
from fastapi import HTTPException, status
|
||||
from starlette.responses import RedirectResponse, FileResponse
|
||||
from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Response
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \
|
||||
|
|
@ -34,11 +34,17 @@ if not tenants.tenants_exists(use_pool=False):
|
|||
@public_app.post('/signup', tags=['signup'])
|
||||
@public_app.put('/signup', tags=['signup'])
|
||||
def signup_handler(data: schemas.UserSignupSchema = Body(...)):
|
||||
return signup.create_tenant(data)
|
||||
content = signup.create_tenant(data)
|
||||
refresh_token = content.pop("refreshToken")
|
||||
refresh_token_max_age = content.pop("refreshTokenMaxAge")
|
||||
response = JSONResponse(content=content)
|
||||
response.set_cookie(key="refreshToken", value=refresh_token, path="/api/refresh",
|
||||
max_age=refresh_token_max_age, secure=True, httponly=True)
|
||||
return response
|
||||
|
||||
|
||||
@public_app.post('/login', tags=["authentication"])
|
||||
def login_user(data: schemas.UserLoginSchema = Body(...)):
|
||||
def login_user(response: JSONResponse, data: schemas.UserLoginSchema = Body(...)):
|
||||
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
|
|
@ -58,21 +64,37 @@ def login_user(data: schemas.UserLoginSchema = Body(...)):
|
|||
)
|
||||
|
||||
r["smtp"] = smtp.has_smtp()
|
||||
refresh_token = r.pop("refreshToken")
|
||||
refresh_token_max_age = r.pop("refreshTokenMaxAge")
|
||||
content = {
|
||||
'jwt': r.pop('jwt'),
|
||||
'data': {
|
||||
"user": r
|
||||
}
|
||||
}
|
||||
|
||||
return content
|
||||
response = JSONResponse(content=content)
|
||||
response.set_cookie(key="refreshToken", value=refresh_token, path="/api/refresh",
|
||||
max_age=refresh_token_max_age, secure=True, httponly=True)
|
||||
return response
|
||||
|
||||
|
||||
@app.get('/logout', tags=["login", "logout"])
|
||||
def logout_user(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
@app.get('/logout', tags=["login"])
|
||||
def logout_user(response: Response, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
users.logout(user_id=context.user_id)
|
||||
response.delete_cookie(key="refreshToken", path="/api/refresh")
|
||||
return {"data": "success"}
|
||||
|
||||
|
||||
@app.get('/refresh', tags=["login"])
|
||||
def refresh_login(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
r = users.refresh(user_id=context.user_id)
|
||||
content = {"jwt": r.get("jwt")}
|
||||
response = JSONResponse(content=content)
|
||||
response.set_cookie(key="refreshToken", value=r.get("refreshToken"), path="/api/refresh",
|
||||
max_age=r.pop("refreshTokenMaxAge"), secure=True, httponly=True)
|
||||
return response
|
||||
|
||||
|
||||
@app.get('/account', tags=['accounts'])
|
||||
def get_account(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
r = users.get(tenant_id=context.tenant_id, user_id=context.user_id)
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int
|
|||
# @app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
|
||||
def try_card(projectId: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)}
|
||||
return {"data": custom_metrics.get_chart(project_id=projectId, data=data, user_id=context.user_id)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/try/sessions', tags=["cards"])
|
||||
|
|
@ -110,21 +110,16 @@ def try_card(projectId: int, data: schemas.CardSchema = Body(...),
|
|||
# @app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"])
|
||||
def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data)
|
||||
data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, data=data)
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/try/issues', tags=["cards"])
|
||||
# @app.post('/{projectId}/metrics/try/issues', tags=["dashboard"])
|
||||
# @app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"])
|
||||
def try_card_funnel_issues(projectId: int, data: schemas.CardSessionsSchema = Body(...),
|
||||
def try_card_issues(projectId: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if len(data.series) == 0:
|
||||
return {"data": []}
|
||||
data.series[0].filter.startTimestamp = data.startTimestamp
|
||||
data.series[0].filter.endTimestamp = data.endTimestamp
|
||||
data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter)
|
||||
return {"data": data}
|
||||
return {"data": custom_metrics.get_issues(project_id=projectId, user_id=context.user_id, data=data)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/cards', tags=["cards"])
|
||||
|
|
@ -180,7 +175,8 @@ def get_card(projectId: int, metric_id: Union[int, str], context: schemas.Curren
|
|||
def get_card_sessions(projectId: int, metric_id: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
|
||||
data = custom_metrics.get_sessions_by_card_id(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
|
||||
data=data)
|
||||
if data is None:
|
||||
return {"errors": ["custom metric not found"]}
|
||||
return {"data": data}
|
||||
|
|
|
|||
|
|
@ -111,6 +111,7 @@ class EditUserPasswordSchema(BaseModel):
|
|||
|
||||
class CreateProjectSchema(BaseModel):
|
||||
name: str = Field(default="my first project")
|
||||
platform: Literal["web", "ios"] = Field(default="web")
|
||||
|
||||
_transform_name = field_validator('name', mode='before')(remove_whitespace)
|
||||
|
||||
|
|
@ -458,12 +459,13 @@ class EventType(str, Enum):
|
|||
graphql = "graphql"
|
||||
state_action = "stateAction"
|
||||
error = "error"
|
||||
click_ios = "clickIos"
|
||||
click_ios = "tapIos"
|
||||
input_ios = "inputIos"
|
||||
view_ios = "viewIos"
|
||||
custom_ios = "customIos"
|
||||
request_ios = "requestIos"
|
||||
error_ios = "errorIos"
|
||||
swipe_ios = "swipeIos"
|
||||
|
||||
|
||||
class PerformanceEventType(str, Enum):
|
||||
|
|
@ -549,6 +551,8 @@ class IssueType(str, Enum):
|
|||
custom = 'custom'
|
||||
js_exception = 'js_exception'
|
||||
mouse_thrashing = 'mouse_thrashing'
|
||||
# IOS
|
||||
tap_rage = 'tap_rage'
|
||||
|
||||
|
||||
class MetricFormatType(str, Enum):
|
||||
|
|
@ -926,9 +930,6 @@ class MobileSignPayloadSchema(BaseModel):
|
|||
|
||||
|
||||
class CardSeriesFilterSchema(SearchErrorsSchema):
|
||||
# TODO: transform these if they are used by the UI
|
||||
# startDate: Optional[int] = Field(default=None)
|
||||
# endDate: Optional[int] = Field(default=None)
|
||||
sort: Optional[str] = Field(default=None)
|
||||
order: SortOrderType = Field(default=SortOrderType.desc)
|
||||
group_by_user: Literal[False] = False
|
||||
|
|
@ -1110,7 +1111,7 @@ class CardConfigSchema(BaseModel):
|
|||
class __CardSchema(CardSessionsSchema):
|
||||
name: Optional[str] = Field(default=None)
|
||||
is_public: bool = Field(default=True)
|
||||
default_config: CardConfigSchema = Field(..., alias="config")
|
||||
default_config: CardConfigSchema = Field(default=CardConfigSchema(), alias="config")
|
||||
thumbnail: Optional[str] = Field(default=None)
|
||||
metric_format: Optional[MetricFormatType] = Field(default=None)
|
||||
|
||||
|
|
@ -1318,7 +1319,6 @@ class CardPathAnalysis(__CardSchema):
|
|||
metric_value: List[ProductAnalyticsSelectedEventType] = Field(default=[ProductAnalyticsSelectedEventType.location])
|
||||
density: int = Field(default=4, ge=2, le=10)
|
||||
|
||||
# TODO: testing
|
||||
series: List[CardPathAnalysisSchema] = Field(default=[])
|
||||
|
||||
@model_validator(mode="before")
|
||||
|
|
@ -1583,8 +1583,6 @@ class FeatureFlagConditionFilterSchema(BaseModel):
|
|||
type: FilterType = Field(...)
|
||||
value: List[str] = Field(default=[], min_length=1)
|
||||
operator: Union[SearchEventOperator, MathOperator] = Field(...)
|
||||
source: Optional[str] = Field(default=None)
|
||||
sourceOperator: Optional[Union[SearchEventOperator, MathOperator]] = Field(default=None)
|
||||
|
||||
|
||||
class FeatureFlagCondition(BaseModel):
|
||||
|
|
@ -1611,12 +1609,6 @@ class FeatureFlagStatus(BaseModel):
|
|||
is_active: bool = Field(...)
|
||||
|
||||
|
||||
class ModuleStatus(BaseModel):
|
||||
module: Literal["assist", "notes", "bug-reports", "offline-recordings", "alerts"] = Field(...,
|
||||
description="Possible values: notes, bugs, live")
|
||||
status: bool = Field(...)
|
||||
|
||||
|
||||
class FeatureFlagSchema(BaseModel):
|
||||
payload: Optional[str] = Field(default=None)
|
||||
flag_key: str = Field(..., pattern=r'^[a-zA-Z0-9\-]+$')
|
||||
|
|
@ -1626,3 +1618,9 @@ class FeatureFlagSchema(BaseModel):
|
|||
is_active: Optional[bool] = Field(default=True)
|
||||
conditions: List[FeatureFlagCondition] = Field(default=[], min_length=1)
|
||||
variants: List[FeatureFlagVariant] = Field(default=[])
|
||||
|
||||
|
||||
class ModuleStatus(BaseModel):
|
||||
module: Literal["assist", "notes", "bug-reports",
|
||||
"offline-recordings", "alerts"] = Field(..., description="Possible values: notes, bugs, live")
|
||||
status: bool = Field(...)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ name = "pypi"
|
|||
[packages]
|
||||
urllib3 = "==1.26.16"
|
||||
requests = "==2.31.0"
|
||||
boto3 = "==1.28.40"
|
||||
boto3 = "==1.28.42"
|
||||
pyjwt = "==2.8.0"
|
||||
psycopg2-binary = "==2.9.7"
|
||||
elasticsearch = "==8.9.0"
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import datetime
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Request
|
||||
|
|
@ -9,49 +10,78 @@ from chalicelib.core import authorizers, users
|
|||
import schemas
|
||||
|
||||
|
||||
def _get_current_auth_context(request: Request, jwt_payload: dict) -> schemas.CurrentContext:
|
||||
user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1))
|
||||
if user is None:
|
||||
print("JWTAuth: User not found.")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
|
||||
request.state.authorizer_identity = "jwt"
|
||||
if user["serviceAccount"]:
|
||||
user["permissions"] = [p.value for p in schemas.ServicePermissions]
|
||||
request.state.currentContext = schemas.CurrentContext(tenantId=jwt_payload.get("tenantId", -1),
|
||||
userId=jwt_payload.get("userId", -1),
|
||||
email=user["email"],
|
||||
permissions=user["permissions"],
|
||||
serviceAccount=user["serviceAccount"])
|
||||
return request.state.currentContext
|
||||
|
||||
|
||||
class JWTAuth(HTTPBearer):
|
||||
def __init__(self, auto_error: bool = True):
|
||||
super(JWTAuth, self).__init__(auto_error=auto_error)
|
||||
|
||||
async def __call__(self, request: Request) -> Optional[schemas.CurrentContext]:
|
||||
credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request)
|
||||
if credentials:
|
||||
if not credentials.scheme == "Bearer":
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.")
|
||||
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
|
||||
auth_exists = jwt_payload is not None \
|
||||
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
tenant_id=jwt_payload.get("tenantId", -1),
|
||||
jwt_iat=jwt_payload.get("iat", 100),
|
||||
jwt_aud=jwt_payload.get("aud", ""))
|
||||
if jwt_payload is None \
|
||||
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
|
||||
or not auth_exists:
|
||||
if jwt_payload is not None:
|
||||
print(jwt_payload)
|
||||
if jwt_payload.get("iat") is None:
|
||||
print("JWTAuth: iat is None")
|
||||
if jwt_payload.get("aud") is None:
|
||||
print("JWTAuth: aud is None")
|
||||
if not auth_exists:
|
||||
print("JWTAuth: not users.auth_exists")
|
||||
|
||||
if request.url.path in ["/refresh", "/api/refresh"]:
|
||||
refresh_token = request.cookies.get("refreshToken")
|
||||
jwt_payload = authorizers.jwt_refresh_authorizer(scheme="Bearer", token=refresh_token)
|
||||
if jwt_payload is None or jwt_payload.get("jti") is None:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
|
||||
user = users.get(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1))
|
||||
if user is None:
|
||||
print("JWTAuth: User not found.")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
|
||||
jwt_payload["authorizer_identity"] = "jwt"
|
||||
request.state.authorizer_identity = "jwt"
|
||||
if user["serviceAccount"]:
|
||||
user["permissions"] = [p.value for p in schemas_ee.ServicePermissions]
|
||||
request.state.currentContext = schemas.CurrentContext(tenantId=jwt_payload.get("tenantId", -1),
|
||||
userId=jwt_payload.get("userId", -1),
|
||||
email=user["email"],
|
||||
permissions=user["permissions"],
|
||||
serviceAccount=user["serviceAccount"])
|
||||
return request.state.currentContext
|
||||
auth_exists = users.refresh_auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
tenant_id=jwt_payload.get("tenantId", -1),
|
||||
jwt_jti=jwt_payload["jti"])
|
||||
if not auth_exists:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
|
||||
|
||||
credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request)
|
||||
if credentials:
|
||||
if not credentials.scheme == "Bearer":
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid authentication scheme.")
|
||||
old_jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials,
|
||||
leeway=datetime.timedelta(days=3))
|
||||
if old_jwt_payload is None \
|
||||
or old_jwt_payload.get("userId") is None \
|
||||
or old_jwt_payload.get("userId") != jwt_payload.get("userId"):
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
|
||||
|
||||
return _get_current_auth_context(request=request, jwt_payload=jwt_payload)
|
||||
|
||||
else:
|
||||
print("JWTAuth: Invalid authorization code.")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authorization code.")
|
||||
credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request)
|
||||
if credentials:
|
||||
if not credentials.scheme == "Bearer":
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid authentication scheme.")
|
||||
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
|
||||
auth_exists = jwt_payload is not None \
|
||||
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
tenant_id=jwt_payload.get("tenantId", -1),
|
||||
jwt_iat=jwt_payload.get("iat", 100),
|
||||
jwt_aud=jwt_payload.get("aud", ""))
|
||||
if jwt_payload is None \
|
||||
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
|
||||
or not auth_exists:
|
||||
if jwt_payload is not None:
|
||||
print(jwt_payload)
|
||||
if jwt_payload.get("iat") is None:
|
||||
print("JWTAuth: iat is None")
|
||||
if jwt_payload.get("aud") is None:
|
||||
print("JWTAuth: aud is None")
|
||||
if not auth_exists:
|
||||
print("JWTAuth: not users.auth_exists")
|
||||
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
|
||||
return _get_current_auth_context(request=request, jwt_payload=jwt_payload)
|
||||
|
||||
print("JWTAuth: Invalid authorization code.")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authorization code.")
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def jwt_authorizer(scheme: str, token: str):
|
||||
def jwt_authorizer(scheme: str, token: str, leeway=0):
|
||||
if scheme.lower() != "bearer":
|
||||
return None
|
||||
try:
|
||||
|
|
@ -15,7 +15,8 @@ def jwt_authorizer(scheme: str, token: str):
|
|||
token,
|
||||
config("jwt_secret"),
|
||||
algorithms=config("jwt_algorithm"),
|
||||
audience=[f"front:{helper.get_stage_name()}"]
|
||||
audience=[f"front:{helper.get_stage_name()}"],
|
||||
leeway=leeway
|
||||
)
|
||||
except jwt.ExpiredSignatureError:
|
||||
print("! JWT Expired signature")
|
||||
|
|
@ -27,6 +28,26 @@ def jwt_authorizer(scheme: str, token: str):
|
|||
return payload
|
||||
|
||||
|
||||
def jwt_refresh_authorizer(scheme: str, token: str):
|
||||
if scheme.lower() != "bearer":
|
||||
return None
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
config("JWT_REFRESH_SECRET"),
|
||||
algorithms=config("jwt_algorithm"),
|
||||
audience=[f"front:{helper.get_stage_name()}"]
|
||||
)
|
||||
except jwt.ExpiredSignatureError:
|
||||
print("! JWT-refresh Expired signature")
|
||||
return None
|
||||
except BaseException as e:
|
||||
print("! JWT-refresh Base Exception")
|
||||
print(e)
|
||||
return None
|
||||
return payload
|
||||
|
||||
|
||||
def jwt_context(context):
|
||||
user = users.get(user_id=context["userId"], tenant_id=context["tenantId"])
|
||||
if user is None:
|
||||
|
|
@ -42,14 +63,15 @@ def get_jwt_exp(iat):
|
|||
return iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000
|
||||
|
||||
|
||||
def generate_jwt(id, tenant_id, iat, aud, exp=None):
|
||||
def generate_jwt(user_id, tenant_id, iat, aud, exp=None):
|
||||
token = jwt.encode(
|
||||
payload={
|
||||
"userId": id,
|
||||
"userId": user_id,
|
||||
"tenantId": tenant_id,
|
||||
"exp": exp + TimeUTC.get_utc_offset() // 1000 if exp is not None else get_jwt_exp(iat),
|
||||
"exp": exp + TimeUTC.get_utc_offset() // 1000 if exp is not None else iat + config("JWT_EXPIRATION",
|
||||
cast=int),
|
||||
"iss": config("JWT_ISSUER"),
|
||||
"iat": iat // 1000,
|
||||
"iat": iat,
|
||||
"aud": aud
|
||||
},
|
||||
key=config("jwt_secret"),
|
||||
|
|
@ -58,6 +80,23 @@ def generate_jwt(id, tenant_id, iat, aud, exp=None):
|
|||
return token
|
||||
|
||||
|
||||
def generate_jwt_refresh(user_id, tenant_id, iat, aud, jwt_jti):
|
||||
token = jwt.encode(
|
||||
payload={
|
||||
"userId": user_id,
|
||||
"tenantId": tenant_id,
|
||||
"exp": iat + config("JWT_REFRESH_EXPIRATION", cast=int),
|
||||
"iss": config("JWT_ISSUER"),
|
||||
"iat": iat,
|
||||
"aud": aud,
|
||||
"jti": jwt_jti
|
||||
},
|
||||
key=config("JWT_REFRESH_SECRET"),
|
||||
algorithm=config("jwt_algorithm")
|
||||
)
|
||||
return token
|
||||
|
||||
|
||||
def api_key_authorizer(token):
|
||||
t = tenants.get_by_api_key(token)
|
||||
if t is not None:
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ from decouple import config
|
|||
from fastapi import HTTPException, status
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import funnels, issues, metrics, click_maps, sessions_insights, sessions_mobs, sessions_favorite
|
||||
from chalicelib.core import funnels, issues, metrics, click_maps, sessions_insights, sessions_mobs, sessions_favorite, \
|
||||
product_analytics
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.storage import StorageClient, extra
|
||||
|
|
@ -129,14 +130,15 @@ def __get_insights_chart(project_id: int, data: schemas.CardInsights, user_id: i
|
|||
series=data.series))
|
||||
|
||||
|
||||
def __get_path_analysis_chart(project_id, data: schemas.CardSchema):
|
||||
def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
|
||||
if len(data.series) == 0:
|
||||
data.series.append(schemas.CardSeriesSchema())
|
||||
data.series.append(
|
||||
schemas.CardPathAnalysisSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp))
|
||||
elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema):
|
||||
data.series[0].filter = schemas.PathAnalysisSchema()
|
||||
|
||||
return product_analytics.path_analysis(project_id=project_id,
|
||||
data=schemas.PathAnalysisSchema(**data.series[0].filter.model_dump()))
|
||||
return product_analytics.path_analysis(project_id=project_id, data=data.series[0].filter, density=data.density,
|
||||
selected_event_type=data.metric_value, hide_minor_paths=data.hide_excess)
|
||||
|
||||
|
||||
def __is_path_analysis(data: schemas.CardSchema):
|
||||
|
|
@ -216,7 +218,7 @@ def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
|
|||
schemas.MetricType.click_map: __get_click_map_chart,
|
||||
schemas.MetricType.funnel: __get_funnel_chart,
|
||||
schemas.MetricType.insights: __get_insights_chart,
|
||||
schemas.MetricType.pathAnalysis: empty
|
||||
schemas.MetricType.pathAnalysis: __get_path_analysis_chart
|
||||
}
|
||||
return supported.get(data.metric_type, empty)(project_id=project_id, data=data, user_id=user_id)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import ast
|
||||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, projects, performance_event, metrics
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
|
|
@ -728,15 +727,20 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
event_where.append(f"main.event_type='{__get_event_type(event_type)}'")
|
||||
events_conditions.append({"type": event_where[-1]})
|
||||
if not is_any:
|
||||
if is_not:
|
||||
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"})
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
if event.operator == schemas.ClickEventExtraOperator._on_selector:
|
||||
event_where.append(
|
||||
_multiple_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
if is_not:
|
||||
event_where.append(_multiple_conditions(f"sub.{_column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
events_conditions_not.append({"type": f"sub.event_type='{__get_event_type(event_type)}'"})
|
||||
events_conditions_not[-1]["condition"] = event_where[-1]
|
||||
else:
|
||||
event_where.append(_multiple_conditions(f"main.{_column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
events_conditions[-1]["condition"] = event_where[-1]
|
||||
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main "
|
||||
|
|
|
|||
|
|
@ -117,6 +117,8 @@ def create_tenant(data: schemas.UserSignupSchema):
|
|||
}
|
||||
return {
|
||||
'jwt': r.pop('jwt'),
|
||||
'refreshToken': r.pop('refreshToken'),
|
||||
'refreshTokenMaxAge': r.pop('refreshTokenMaxAge'),
|
||||
'data': {
|
||||
"user": r,
|
||||
"client": c,
|
||||
|
|
|
|||
|
|
@ -632,7 +632,7 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
|
|||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT user_id,
|
||||
jwt_iat,
|
||||
EXTRACT(epoch FROM jwt_iat)::BIGINT AS jwt_iat,
|
||||
changed_at,
|
||||
service_account,
|
||||
basic_authentication.user_id IS NOT NULL AS has_basic_auth
|
||||
|
|
@ -648,22 +648,57 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
|
|||
return r is not None \
|
||||
and (r["service_account"] and not r["has_basic_auth"]
|
||||
or r.get("jwt_iat") is not None \
|
||||
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1))
|
||||
and (abs(jwt_iat - r["jwt_iat"]) <= 1))
|
||||
|
||||
|
||||
def change_jwt_iat(user_id):
|
||||
def refresh_auth_exists(user_id, tenant_id, jwt_iat, jwt_aud, jwt_jti=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""UPDATE public.users
|
||||
SET jwt_iat = timezone('utc'::text, now())
|
||||
WHERE user_id = %(user_id)s
|
||||
RETURNING jwt_iat;""",
|
||||
{"user_id": user_id})
|
||||
cur.execute(
|
||||
cur.mogrify(f"""SELECT user_id
|
||||
FROM public.users
|
||||
WHERE user_id = %(userId)s
|
||||
AND tenant_id= %(tenant_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND jwt_refresh_jti = %(jwt_jti)s
|
||||
LIMIT 1;""",
|
||||
{"userId": user_id, "tenant_id": tenant_id, "jwt_jti": jwt_jti})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return r is not None
|
||||
|
||||
|
||||
def change_jwt_iat_jti(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""UPDATE public.users
|
||||
SET jwt_iat = timezone('utc'::text, now()),
|
||||
jwt_refresh_jti = 0,
|
||||
jwt_refresh_iat = timezone('utc'::text, now())
|
||||
WHERE user_id = %(user_id)s
|
||||
RETURNING EXTRACT (epoch FROM jwt_iat)::BIGINT AS jwt_iat,
|
||||
jwt_refresh_jti,
|
||||
EXTRACT (epoch FROM jwt_refresh_iat)::BIGINT AS jwt_refresh_iat;""",
|
||||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
return cur.fetchone().get("jwt_iat")
|
||||
row = cur.fetchone()
|
||||
return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat")
|
||||
|
||||
|
||||
def authenticate(email, password, for_change_password=False) -> dict | None:
|
||||
def refresh_jwt_iat_jti(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""UPDATE public.users
|
||||
SET jwt_iat = timezone('utc'::text, now()),
|
||||
jwt_refresh_jti = jwt_refresh_jti + 1
|
||||
WHERE user_id = %(user_id)s
|
||||
RETURNING EXTRACT (epoch FROM jwt_iat)::BIGINT AS jwt_iat,
|
||||
jwt_refresh_jti,
|
||||
EXTRACT (epoch FROM jwt_refresh_iat)::BIGINT AS jwt_refresh_iat""",
|
||||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat")
|
||||
|
||||
|
||||
def authenticate(email, password, for_change_password=False) -> dict | bool | None:
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT
|
||||
|
|
@ -713,11 +748,16 @@ def authenticate(email, password, for_change_password=False) -> dict | None:
|
|||
elif config("enforce_SSO", cast=bool, default=False) and helper.is_saml2_available():
|
||||
return {"errors": ["must sign-in with SSO, enforced by admin"]}
|
||||
|
||||
jwt_iat = change_jwt_iat(r['userId'])
|
||||
iat = TimeUTC.datetime_to_timestamp(jwt_iat)
|
||||
jwt_iat, jwt_r_jti, jwt_r_iat = change_jwt_iat_jti(user_id=r['userId'])
|
||||
# jwt_iat = TimeUTC.datetime_to_timestamp(jwt_iat)
|
||||
# jwt_r_iat = TimeUTC.datetime_to_timestamp(jwt_r_iat)
|
||||
return {
|
||||
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'], iat=iat,
|
||||
"jwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'], iat=jwt_iat,
|
||||
aud=f"front:{helper.get_stage_name()}"),
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'], tenant_id=r['tenantId'],
|
||||
iat=jwt_r_iat, aud=f"front:{helper.get_stage_name()}",
|
||||
jwt_jti=jwt_r_jti),
|
||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int),
|
||||
"email": email,
|
||||
**r
|
||||
}
|
||||
|
|
@ -791,6 +831,27 @@ def __hard_delete_user(user_id):
|
|||
cur.execute(query)
|
||||
|
||||
|
||||
def logout(user_id: int):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""UPDATE public.users
|
||||
SET jwt_iat = NULL, jwt_refresh_jti = NULL, jwt_refresh_iat = NULL
|
||||
WHERE user_id = %(user_id)s;""",
|
||||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
|
||||
|
||||
def refresh(user_id: int, tenant_id: int) -> dict:
|
||||
jwt_iat, jwt_r_jti, jwt_r_iat = refresh_jwt_iat_jti(user_id=user_id)
|
||||
return {
|
||||
"jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=jwt_iat,
|
||||
aud=f"front:{helper.get_stage_name()}"),
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=user_id, tenant_id=tenant_id,
|
||||
iat=jwt_r_iat, aud=f"front:{helper.get_stage_name()}",
|
||||
jwt_jti=jwt_r_jti),
|
||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int) - (jwt_iat - jwt_r_iat)
|
||||
}
|
||||
|
||||
def authenticate_sso(email, internal_id, exp=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
|
|
|
|||
|
|
@ -29,9 +29,11 @@ idp_x509cert=
|
|||
invitation_link=/api/users/invitation?token=%s
|
||||
js_cache_bucket=sessions-assets
|
||||
jwt_algorithm=HS512
|
||||
JWT_EXPIRATION=2592000
|
||||
JWT_ISSUER=openreplay-ee
|
||||
JWT_EXPIRATION=120
|
||||
JWT_REFRESH_EXPIRATION=604800
|
||||
JWT_ISSUER=openreplay-oss
|
||||
jwt_secret="SET A RANDOM STRING HERE"
|
||||
JWT_REFRESH_SECRET="SET A RANDOM STRING HERE"
|
||||
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
|
||||
ASSIST_KEY=
|
||||
assist=/sockets-live
|
||||
|
|
|
|||
|
|
@ -58,10 +58,10 @@ class ORRoute(APIRoute):
|
|||
def __check(security_scopes: SecurityScopes, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
s_p = 0
|
||||
for scope in security_scopes.scopes:
|
||||
if isinstance(scope, schemas_ee.ServicePermissions):
|
||||
if isinstance(scope, schemas.ServicePermissions):
|
||||
s_p += 1
|
||||
if context.service_account and not isinstance(scope, schemas_ee.ServicePermissions) \
|
||||
or not context.service_account and not isinstance(scope, schemas_ee.Permissions):
|
||||
if context.service_account and not isinstance(scope, schemas.ServicePermissions) \
|
||||
or not context.service_account and not isinstance(scope, schemas.Permissions):
|
||||
continue
|
||||
if scope not in context.permissions:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
requests==2.31.0
|
||||
boto3==1.28.41
|
||||
boto3==1.28.42
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.7
|
||||
elasticsearch==8.9.0
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
requests==2.31.0
|
||||
boto3==1.28.41
|
||||
boto3==1.28.42
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.7
|
||||
elasticsearch==8.9.0
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
requests==2.31.0
|
||||
boto3==1.28.41
|
||||
boto3==1.28.42
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.7
|
||||
elasticsearch==8.9.0
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from typing import Optional, Union
|
|||
from decouple import config
|
||||
from fastapi import Body, Depends, BackgroundTasks, Request
|
||||
from fastapi import HTTPException, status
|
||||
from starlette.responses import RedirectResponse, FileResponse
|
||||
from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Response
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \
|
||||
|
|
@ -39,11 +39,17 @@ if config("MULTI_TENANTS", cast=bool, default=False) or not tenants.tenants_exis
|
|||
@public_app.post('/signup', tags=['signup'])
|
||||
@public_app.put('/signup', tags=['signup'])
|
||||
def signup_handler(data: schemas.UserSignupSchema = Body(...)):
|
||||
return signup.create_tenant(data)
|
||||
content = signup.create_tenant(data)
|
||||
refresh_token = content.pop("refreshToken")
|
||||
refresh_token_max_age = content.pop("refreshTokenMaxAge")
|
||||
response = JSONResponse(content=content)
|
||||
response.set_cookie(key="refreshToken", value=refresh_token, path="/api/refresh",
|
||||
max_age=refresh_token_max_age, secure=True, httponly=True)
|
||||
return response
|
||||
|
||||
|
||||
@public_app.post('/login', tags=["authentication"])
|
||||
def login_user(data: schemas.UserLoginSchema = Body(...)):
|
||||
def login_user(response: JSONResponse, data: schemas.UserLoginSchema = Body(...)):
|
||||
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
|
|
@ -63,21 +69,37 @@ def login_user(data: schemas.UserLoginSchema = Body(...)):
|
|||
)
|
||||
|
||||
r["smtp"] = smtp.has_smtp()
|
||||
refresh_token = r.pop("refreshToken")
|
||||
refresh_token_max_age = r.pop("refreshTokenMaxAge")
|
||||
content = {
|
||||
'jwt': r.pop('jwt'),
|
||||
'data': {
|
||||
"user": r
|
||||
}
|
||||
}
|
||||
|
||||
return content
|
||||
response = JSONResponse(content=content)
|
||||
response.set_cookie(key="refreshToken", value=refresh_token, path="/api/refresh",
|
||||
max_age=refresh_token_max_age, secure=True, httponly=True)
|
||||
return response
|
||||
|
||||
|
||||
@app.get('/logout', tags=["login", "logout"])
|
||||
def logout_user(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
@app.get('/logout', tags=["login"])
|
||||
def logout_user(response: Response, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
users.logout(user_id=context.user_id)
|
||||
response.delete_cookie(key="refreshToken", path="/api/refresh")
|
||||
return {"data": "success"}
|
||||
|
||||
|
||||
@app.get('/refresh', tags=["login"])
|
||||
def refresh_login(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
r = users.refresh(user_id=context.user_id, tenant_id=context.tenant_id)
|
||||
content = {"jwt": r.get("jwt")}
|
||||
response = JSONResponse(content=content)
|
||||
response.set_cookie(key="refreshToken", value=r.get("refreshToken"), path="/api/refresh",
|
||||
max_age=r.pop("refreshTokenMaxAge"), secure=True, httponly=True)
|
||||
return response
|
||||
|
||||
|
||||
@app.get('/account', tags=['accounts'])
|
||||
def get_account(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
r = users.get(tenant_id=context.tenant_id, user_id=context.user_id)
|
||||
|
|
|
|||
|
|
@ -3,5 +3,8 @@ CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.15.0-ee';
|
|||
ALTER TABLE experimental.events
|
||||
ADD COLUMN IF NOT EXISTS transfer_size Nullable(UInt32);
|
||||
|
||||
ALTER TABLE experimental.events
|
||||
ADD COLUMN IF NOT EXISTS selector Nullable(String);
|
||||
|
||||
ALTER TABLE experimental.sessions
|
||||
ADD COLUMN IF NOT EXISTS timezone LowCardinality(Nullable(String));
|
||||
|
|
@ -77,9 +77,10 @@ CREATE TABLE IF NOT EXISTS experimental.events
|
|||
response_body Nullable(String),
|
||||
issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21)),
|
||||
issue_id Nullable(String),
|
||||
error_tags_keys Array(String),
|
||||
error_tags_values Array(Nullable(String)),
|
||||
error_tags_keys Array(String),
|
||||
error_tags_values Array(Nullable(String)),
|
||||
transfer_size Nullable(UInt32),
|
||||
selector Nullable(String),
|
||||
message_id UInt64 DEFAULT 0,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
|
|
@ -152,7 +153,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
|
|||
metadata_8 Nullable(String),
|
||||
metadata_9 Nullable(String),
|
||||
metadata_10 Nullable(String),
|
||||
issue_types Array(LowCardinality(String)),
|
||||
issue_types Array(LowCardinality(String)),
|
||||
referrer Nullable(String),
|
||||
base_referrer Nullable(String) MATERIALIZED lower(concat(domain(referrer), path(referrer))),
|
||||
issue_score Nullable(UInt32),
|
||||
|
|
@ -203,7 +204,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues
|
|||
issue_id String,
|
||||
type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21),
|
||||
context_string String,
|
||||
context_keys Array(String),
|
||||
context_keys Array(String),
|
||||
context_values Array(Nullable(String)),
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
|
|
@ -276,6 +277,7 @@ SELECT session_id,
|
|||
error_tags_keys,
|
||||
error_tags_values,
|
||||
transfer_size,
|
||||
selector,
|
||||
message_id,
|
||||
_timestamp
|
||||
FROM experimental.events
|
||||
|
|
|
|||
|
|
@ -24,6 +24,94 @@ ALTER TABLE IF EXISTS events_common.requests
|
|||
ALTER TABLE IF EXISTS public.sessions
|
||||
ADD COLUMN IF NOT EXISTS timezone text NULL;
|
||||
|
||||
ALTER TABLE IF EXISTS public.projects
|
||||
ADD COLUMN IF NOT EXISTS platform public.platform NOT NULL DEFAULT 'web';
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.crashes_ios
|
||||
(
|
||||
crash_ios_id text NOT NULL PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
reason text NOT NULL,
|
||||
stacktrace text NOT NULL
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS crashes_ios_project_id_crash_ios_id_idx ON public.crashes_ios (project_id, crash_ios_id);
|
||||
CREATE INDEX IF NOT EXISTS crashes_ios_project_id_idx ON public.crashes_ios (project_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_common.crashes
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
crash_ios_id text NULL REFERENCES public.crashes_ios (crash_ios_id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS crashes_crash_ios_id_timestamp_idx ON events_common.crashes (crash_ios_id, timestamp);
|
||||
CREATE INDEX IF NOT EXISTS crashes_timestamp_idx ON events_common.crashes (timestamp);
|
||||
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS events_ios;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.views
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
name text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.taps
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS taps_session_id_idx ON events_ios.taps (session_id);
|
||||
CREATE INDEX IF NOT EXISTS taps_label_idx ON events_ios.taps (label);
|
||||
CREATE INDEX IF NOT EXISTS taps_label_gin_idx ON events_ios.taps USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX IF NOT EXISTS taps_timestamp_idx ON events_ios.taps (timestamp);
|
||||
CREATE INDEX IF NOT EXISTS taps_label_session_id_timestamp_idx ON events_ios.taps (label, session_id, timestamp);
|
||||
CREATE INDEX IF NOT EXISTS taps_session_id_timestamp_idx ON events_ios.taps (session_id, timestamp);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.inputs
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS inputs_session_id_idx ON events_ios.inputs (session_id);
|
||||
CREATE INDEX IF NOT EXISTS inputs_label_gin_idx ON events_ios.inputs USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX IF NOT EXISTS inputs_timestamp_idx ON events_ios.inputs (timestamp);
|
||||
CREATE INDEX IF NOT EXISTS inputs_label_session_id_timestamp_idx ON events_ios.inputs (label, session_id, timestamp);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.swipes
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
direction text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS swipes_session_id_idx ON events_ios.swipes (session_id);
|
||||
CREATE INDEX IF NOT EXISTS swipes_label_gin_idx ON events_ios.swipes USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX IF NOT EXISTS swipes_timestamp_idx ON events_ios.swipes (timestamp);
|
||||
CREATE INDEX IF NOT EXISTS swipes_label_session_id_timestamp_idx ON events_ios.swipes (label, session_id, timestamp);
|
||||
|
||||
ALTER TYPE issue_type ADD VALUE IF NOT EXISTS 'tap_rage';
|
||||
|
||||
ALTER TABLE IF EXISTS public.users
|
||||
ADD COLUMN IF NOT EXISTS jwt_refresh_jti integer NULL DEFAULT NULL,
|
||||
ADD COLUMN IF NOT EXISTS jwt_refresh_iat timestamp without time zone NULL DEFAULT NULL;
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
|
|
|
|||
|
|
@ -1,13 +1,15 @@
|
|||
BEGIN;
|
||||
-- Schemas and functions definitions:
|
||||
CREATE SCHEMA IF NOT EXISTS events_common;
|
||||
CREATE SCHEMA IF NOT EXISTS events;
|
||||
CREATE SCHEMA IF NOT EXISTS events_ios;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
||||
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT 'v1.14.0-ee'
|
||||
SELECT 'v1.15.0-ee'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
|
||||
|
||||
|
|
@ -30,7 +32,6 @@ end;
|
|||
$$ LANGUAGE plpgsql;
|
||||
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS
|
||||
$$
|
||||
DECLARE
|
||||
|
|
@ -55,7 +56,6 @@ END;
|
|||
$$ LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS
|
||||
$$
|
||||
BEGIN
|
||||
|
|
@ -71,7 +71,6 @@ END;
|
|||
$$ LANGUAGE plpgsql;
|
||||
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS
|
||||
$$
|
||||
DECLARE
|
||||
|
|
@ -96,7 +95,7 @@ BEGIN
|
|||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
|
||||
-- All tables and types:
|
||||
|
||||
DO
|
||||
$$
|
||||
|
|
@ -136,7 +135,8 @@ $$
|
|||
('frontend_signals'),
|
||||
('feature_flags'),
|
||||
('feature_flags_conditions'),
|
||||
('sessions_feature_flags'))
|
||||
('sessions_feature_flags'),
|
||||
('crashes_ios'))
|
||||
select bool_and(exists(select *
|
||||
from information_schema.tables t
|
||||
where table_schema = 'public'
|
||||
|
|
@ -196,6 +196,8 @@ $$
|
|||
deleted_at timestamp without time zone NULL DEFAULT NULL,
|
||||
api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL,
|
||||
jwt_iat timestamp without time zone NULL DEFAULT NULL,
|
||||
jwt_refresh_jti integer NULL DEFAULT NULL,
|
||||
jwt_refresh_iat timestamp without time zone NULL DEFAULT NULL,
|
||||
data jsonb NOT NULL DEFAULT'{}'::jsonb,
|
||||
weekly_report boolean NOT NULL DEFAULT TRUE,
|
||||
origin text NULL DEFAULT NULL,
|
||||
|
|
@ -411,6 +413,7 @@ $$
|
|||
WHERE typ.typname = 'issue_type') THEN
|
||||
CREATE TYPE issue_type AS ENUM (
|
||||
'click_rage',
|
||||
'tap_rage',
|
||||
'dead_click',
|
||||
'excessive_scrolling',
|
||||
'bad_request',
|
||||
|
|
@ -543,11 +546,11 @@ $$
|
|||
watchdogs_score bigint NOT NULL DEFAULT 0,
|
||||
issue_score bigint NOT NULL DEFAULT 0,
|
||||
issue_types issue_type[] NOT NULL DEFAULT '{}'::issue_type[],
|
||||
utm_source text DEFAULT NULL,
|
||||
utm_medium text DEFAULT NULL,
|
||||
utm_campaign text DEFAULT NULL,
|
||||
referrer text DEFAULT NULL,
|
||||
base_referrer text DEFAULT NULL,
|
||||
utm_source text NULL DEFAULT NULL,
|
||||
utm_medium text NULL DEFAULT NULL,
|
||||
utm_campaign text NULL DEFAULT NULL,
|
||||
referrer text NULL DEFAULT NULL,
|
||||
base_referrer text NULL DEFAULT NULL,
|
||||
file_key bytea DEFAULT NULL,
|
||||
metadata_1 text DEFAULT NULL,
|
||||
metadata_2 text DEFAULT NULL,
|
||||
|
|
@ -808,6 +811,7 @@ $$
|
|||
config jsonb NOT NULL DEFAULT '{}'::jsonb
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS searches
|
||||
(
|
||||
search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
|
|
@ -906,9 +910,9 @@ $$
|
|||
(
|
||||
feature_flag_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
flag_key text NOT NULL,
|
||||
description text NOT NULL,
|
||||
description text DEFAULT NULL,
|
||||
payload jsonb DEFAULT NULL,
|
||||
flag_type text NOT NULL,
|
||||
is_persist boolean NOT NULL DEFAULT FALSE,
|
||||
is_active boolean NOT NULL DEFAULT FALSE,
|
||||
|
|
@ -921,6 +925,9 @@ $$
|
|||
|
||||
CREATE INDEX IF NOT EXISTS idx_feature_flags_project_id ON public.feature_flags (project_id);
|
||||
|
||||
ALTER TABLE feature_flags
|
||||
ADD CONSTRAINT unique_project_flag_deleted UNIQUE (project_id, flag_key, deleted_at);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.feature_flags_conditions
|
||||
(
|
||||
condition_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
|
|
@ -930,6 +937,16 @@ $$
|
|||
filters jsonb NOT NULL DEFAULT '[]'::jsonb
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.feature_flags_variants
|
||||
(
|
||||
variant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
feature_flag_id integer NOT NULL REFERENCES feature_flags (feature_flag_id) ON DELETE CASCADE,
|
||||
value text NOT NULL,
|
||||
description text DEFAULT NULL,
|
||||
payload jsonb DEFAULT NULL,
|
||||
rollout_percentage integer DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.sessions_feature_flags
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
|
|
@ -937,6 +954,17 @@ $$
|
|||
condition_id integer NULL REFERENCES feature_flags_conditions (condition_id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.crashes_ios
|
||||
(
|
||||
crash_ios_id text NOT NULL PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
reason text NOT NULL,
|
||||
stacktrace text NOT NULL
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS crashes_ios_project_id_crash_id_idx ON public.crashes_ios (project_id, crash_ios_id);
|
||||
CREATE INDEX IF NOT EXISTS crashes_ios_project_id_idx ON public.crashes_ios (project_id);
|
||||
|
||||
RAISE NOTICE 'Created missing public schema tables';
|
||||
END IF;
|
||||
END;
|
||||
|
|
@ -1212,7 +1240,8 @@ $$
|
|||
BEGIN
|
||||
IF (with to_check (name) as (values ('customs'),
|
||||
('issues'),
|
||||
('requests'))
|
||||
('requests'),
|
||||
('crashes'))
|
||||
select bool_and(exists(select *
|
||||
from information_schema.tables t
|
||||
where table_schema = 'events_common'
|
||||
|
|
@ -1288,6 +1317,89 @@ $$
|
|||
CREATE INDEX IF NOT EXISTS requests_path_nn_gin_idx ON events_common.requests USING GIN (path gin_trgm_ops) WHERE path IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_common.crashes
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
crash_ios_id text NULL REFERENCES public.crashes_ios (crash_ios_id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS crashes_crash_ios_id_timestamp_idx ON events_common.crashes (crash_ios_id, timestamp);
|
||||
CREATE INDEX IF NOT EXISTS crashes_timestamp_idx ON events_common.crashes (timestamp);
|
||||
END IF;
|
||||
END;
|
||||
$$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
|
||||
DO
|
||||
$$
|
||||
BEGIN
|
||||
IF (with to_check (name) as (values ('views'),
|
||||
('taps'),
|
||||
('inputs'),
|
||||
('swipes'))
|
||||
select bool_and(exists(select *
|
||||
from information_schema.tables t
|
||||
where table_schema = 'events_ios'
|
||||
AND table_name = to_check.name)) as all_present
|
||||
from to_check) THEN
|
||||
raise notice 'All events_common schema tables exists';
|
||||
ELSE
|
||||
CREATE TABLE IF NOT EXISTS events_ios.views
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
name text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.taps
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS taps_session_id_idx ON events_ios.taps (session_id);
|
||||
CREATE INDEX IF NOT EXISTS taps_label_idx ON events_ios.taps (label);
|
||||
CREATE INDEX IF NOT EXISTS taps_label_gin_idx ON events_ios.taps USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX IF NOT EXISTS taps_timestamp_idx ON events_ios.taps (timestamp);
|
||||
CREATE INDEX IF NOT EXISTS taps_label_session_id_timestamp_idx ON events_ios.taps (label, session_id, timestamp);
|
||||
CREATE INDEX IF NOT EXISTS taps_session_id_timestamp_idx ON events_ios.taps (session_id, timestamp);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.inputs
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS inputs_session_id_idx ON events_ios.inputs (session_id);
|
||||
CREATE INDEX IF NOT EXISTS inputs_label_gin_idx ON events_ios.inputs USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX IF NOT EXISTS inputs_timestamp_idx ON events_ios.inputs (timestamp);
|
||||
CREATE INDEX IF NOT EXISTS inputs_label_session_id_timestamp_idx ON events_ios.inputs (label, session_id, timestamp);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.swipes
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
direction text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS swipes_session_id_idx ON events_ios.swipes (session_id);
|
||||
CREATE INDEX IF NOT EXISTS swipes_label_gin_idx ON events_ios.swipes USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX IF NOT EXISTS swipes_timestamp_idx ON events_ios.swipes (timestamp);
|
||||
CREATE INDEX IF NOT EXISTS swipes_label_session_id_timestamp_idx ON events_ios.swipes (label, session_id, timestamp);
|
||||
|
||||
END IF;
|
||||
END;
|
||||
$$
|
||||
|
|
|
|||
|
|
@ -24,6 +24,93 @@ ALTER TABLE IF EXISTS events_common.requests
|
|||
ALTER TABLE IF EXISTS public.sessions
|
||||
ADD COLUMN IF NOT EXISTS timezone text NULL;
|
||||
|
||||
ALTER TABLE IF EXISTS public.projects
|
||||
ADD COLUMN IF NOT EXISTS platform public.platform NOT NULL DEFAULT 'web';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.crashes_ios
|
||||
(
|
||||
crash_ios_id text NOT NULL PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
reason text NOT NULL,
|
||||
stacktrace text NOT NULL
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS crashes_ios_project_id_crash_ios_id_idx ON public.crashes_ios (project_id, crash_ios_id);
|
||||
CREATE INDEX IF NOT EXISTS crashes_ios_project_id_idx ON public.crashes_ios (project_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_common.crashes
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
crash_ios_id text NULL REFERENCES public.crashes_ios (crash_ios_id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS crashes_crash_ios_id_timestamp_idx ON events_common.crashes (crash_ios_id, timestamp);
|
||||
CREATE INDEX IF NOT EXISTS crashes_timestamp_idx ON events_common.crashes (timestamp);
|
||||
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS events_ios;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.views
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
name text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.taps
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS taps_session_id_idx ON events_ios.taps (session_id);
|
||||
CREATE INDEX IF NOT EXISTS taps_label_idx ON events_ios.taps (label);
|
||||
CREATE INDEX IF NOT EXISTS taps_label_gin_idx ON events_ios.taps USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX IF NOT EXISTS taps_timestamp_idx ON events_ios.taps (timestamp);
|
||||
CREATE INDEX IF NOT EXISTS taps_label_session_id_timestamp_idx ON events_ios.taps (label, session_id, timestamp);
|
||||
CREATE INDEX IF NOT EXISTS taps_session_id_timestamp_idx ON events_ios.taps (session_id, timestamp);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.inputs
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS inputs_session_id_idx ON events_ios.inputs (session_id);
|
||||
CREATE INDEX IF NOT EXISTS inputs_label_gin_idx ON events_ios.inputs USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX IF NOT EXISTS inputs_timestamp_idx ON events_ios.inputs (timestamp);
|
||||
CREATE INDEX IF NOT EXISTS inputs_label_session_id_timestamp_idx ON events_ios.inputs (label, session_id, timestamp);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events_ios.swipes
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
direction text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS swipes_session_id_idx ON events_ios.swipes (session_id);
|
||||
CREATE INDEX IF NOT EXISTS swipes_label_gin_idx ON events_ios.swipes USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX IF NOT EXISTS swipes_timestamp_idx ON events_ios.swipes (timestamp);
|
||||
CREATE INDEX IF NOT EXISTS swipes_label_session_id_timestamp_idx ON events_ios.swipes (label, session_id, timestamp);
|
||||
|
||||
ALTER TYPE issue_type ADD VALUE IF NOT EXISTS 'tap_rage';
|
||||
|
||||
ALTER TABLE IF EXISTS public.users
|
||||
ADD COLUMN IF NOT EXISTS jwt_refresh_jti integer NULL DEFAULT NULL,
|
||||
ADD COLUMN IF NOT EXISTS jwt_refresh_iat timestamp without time zone NULL DEFAULT NULL;
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ BEGIN;
|
|||
-- Schemas and functions definitions:
|
||||
CREATE SCHEMA IF NOT EXISTS events_common;
|
||||
CREATE SCHEMA IF NOT EXISTS events;
|
||||
CREATE SCHEMA IF NOT EXISTS events_ios;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
||||
|
||||
|
|
@ -129,16 +130,18 @@ $$
|
|||
|
||||
CREATE TABLE users
|
||||
(
|
||||
user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
email text NOT NULL UNIQUE,
|
||||
role user_role NOT NULL DEFAULT 'member',
|
||||
name text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
deleted_at timestamp without time zone NULL DEFAULT NULL,
|
||||
api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL,
|
||||
jwt_iat timestamp without time zone NULL DEFAULT NULL,
|
||||
data jsonb NOT NULL DEFAULT '{}'::jsonb,
|
||||
weekly_report boolean NOT NULL DEFAULT TRUE
|
||||
user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
email text NOT NULL UNIQUE,
|
||||
role user_role NOT NULL DEFAULT 'member',
|
||||
name text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
deleted_at timestamp without time zone NULL DEFAULT NULL,
|
||||
api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL,
|
||||
jwt_iat timestamp without time zone NULL DEFAULT NULL,
|
||||
jwt_refresh_jti integer NULL DEFAULT NULL,
|
||||
jwt_refresh_iat timestamp without time zone NULL DEFAULT NULL,
|
||||
data jsonb NOT NULL DEFAULT '{}'::jsonb,
|
||||
weekly_report boolean NOT NULL DEFAULT TRUE
|
||||
);
|
||||
|
||||
CREATE TABLE basic_authentication
|
||||
|
|
@ -300,6 +303,7 @@ $$
|
|||
|
||||
CREATE TYPE issue_type AS ENUM (
|
||||
'click_rage',
|
||||
'tap_rage',
|
||||
'dead_click',
|
||||
'excessive_scrolling',
|
||||
'bad_request',
|
||||
|
|
@ -980,7 +984,7 @@ $$
|
|||
|
||||
CREATE INDEX projects_stats_project_id_idx ON public.projects_stats (project_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.feature_flags
|
||||
CREATE TABLE public.feature_flags
|
||||
(
|
||||
feature_flag_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
|
|
@ -997,12 +1001,12 @@ $$
|
|||
deleted_at timestamp without time zone NULL DEFAULT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_feature_flags_project_id ON public.feature_flags (project_id);
|
||||
CREATE INDEX idx_feature_flags_project_id ON public.feature_flags (project_id);
|
||||
|
||||
ALTER TABLE feature_flags
|
||||
ADD CONSTRAINT unique_project_flag_deleted UNIQUE (project_id, flag_key, deleted_at);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.feature_flags_conditions
|
||||
CREATE TABLE public.feature_flags_conditions
|
||||
(
|
||||
condition_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
feature_flag_id integer NOT NULL REFERENCES feature_flags (feature_flag_id) ON DELETE CASCADE,
|
||||
|
|
@ -1011,7 +1015,7 @@ $$
|
|||
filters jsonb NOT NULL DEFAULT '[]'::jsonb
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.feature_flags_variants
|
||||
CREATE TABLE public.feature_flags_variants
|
||||
(
|
||||
variant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
feature_flag_id integer NOT NULL REFERENCES feature_flags (feature_flag_id) ON DELETE CASCADE,
|
||||
|
|
@ -1021,13 +1025,90 @@ $$
|
|||
rollout_percentage integer DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.sessions_feature_flags
|
||||
CREATE TABLE public.sessions_feature_flags
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
feature_flag_id integer NOT NULL REFERENCES feature_flags (feature_flag_id) ON DELETE CASCADE,
|
||||
condition_id integer NULL REFERENCES feature_flags_conditions (condition_id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE events_ios.views
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
name text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
|
||||
CREATE TABLE events_ios.taps
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX taps_session_id_idx ON events_ios.taps (session_id);
|
||||
CREATE INDEX taps_label_idx ON events_ios.taps (label);
|
||||
CREATE INDEX taps_label_gin_idx ON events_ios.taps USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX taps_timestamp_idx ON events_ios.taps (timestamp);
|
||||
CREATE INDEX taps_label_session_id_timestamp_idx ON events_ios.taps (label, session_id, timestamp);
|
||||
CREATE INDEX taps_session_id_timestamp_idx ON events_ios.taps (session_id, timestamp);
|
||||
|
||||
|
||||
CREATE TABLE events_ios.inputs
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX inputs_session_id_idx ON events_ios.inputs (session_id);
|
||||
CREATE INDEX inputs_label_gin_idx ON events_ios.inputs USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX inputs_timestamp_idx ON events_ios.inputs (timestamp);
|
||||
CREATE INDEX inputs_label_session_id_timestamp_idx ON events_ios.inputs (label, session_id, timestamp);
|
||||
|
||||
|
||||
CREATE TABLE public.crashes_ios
|
||||
(
|
||||
crash_ios_id text NOT NULL PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
name text NOT NULL,
|
||||
reason text NOT NULL,
|
||||
stacktrace text NOT NULL
|
||||
);
|
||||
CREATE INDEX crashes_ios_project_id_crash_ios_id_idx ON public.crashes_ios (project_id, crash_ios_id);
|
||||
CREATE INDEX crashes_ios_project_id_idx ON public.crashes_ios (project_id);
|
||||
|
||||
CREATE TABLE events_common.crashes
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
crash_ios_id text NULL REFERENCES public.crashes_ios (crash_ios_id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX crashes_crash_ios_id_timestamp_idx ON events_common.crashes (crash_ios_id, timestamp);
|
||||
CREATE INDEX crashes_timestamp_idx ON events_common.crashes (timestamp);
|
||||
|
||||
|
||||
CREATE TABLE events_ios.swipes
|
||||
(
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
timestamp bigint NOT NULL,
|
||||
seq_index integer NOT NULL,
|
||||
label text NOT NULL,
|
||||
direction text NOT NULL,
|
||||
PRIMARY KEY (session_id, timestamp, seq_index)
|
||||
);
|
||||
CREATE INDEX swipes_session_id_idx ON events_ios.swipes (session_id);
|
||||
CREATE INDEX swipes_label_gin_idx ON events_ios.swipes USING GIN (label gin_trgm_ops);
|
||||
CREATE INDEX swipes_timestamp_idx ON events_ios.swipes (timestamp);
|
||||
CREATE INDEX swipes_label_session_id_timestamp_idx ON events_ios.swipes (label, session_id, timestamp);
|
||||
|
||||
raise notice 'DB created';
|
||||
END IF;
|
||||
END;
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue