Api v1.15.0 (#1464)

* feat(DB): rearranged queries
feat(DB): ready for v1.15.0

* refactor(chalice): upgraded dependencies
refactor(crons): upgraded dependencies
refactor(alerts): upgraded dependencies

* fix(chalice): return error when updating inexistant webhook

* feat(chalice): fixed delete webhook response

* feat(chalice): limit webhooks name length

* feat(chalice): upgraded dependencies
feat(alerts): upgraded dependencies
feat(crons): upgraded dependencies

* fix(chalice): remove urllib3 dependency

* feat(chalice): remove FOSS to pydantic v2

* fix(chalice): freeze urllib3 to not have conflicts between boto3 and requests

* feat(chalice): refactoring schema in progress

* feat(chalice): refactoring schema in progress

* feat(chalice): refactoring schema in progress

* feat(chalice): refactoring schema in progress
feat(chalice): upgraded dependencies

* feat(chalice): refactored schema

* fix(chalice): pull rebase dev

* feat(DB): transfer size support

* feat(chalice): support service account

* feat(chalice): support service account

* fix(chalice): fixed refactored PayloadSchema-name

* feat(chalice): path analysis

* feat(chalice): support service account 1/2

* feat(DB): timezone support

* feat(chalice): upgraded dependencies
feat(alerts): upgraded dependencies
feat(crons): upgraded dependencies
feat(assist): upgraded dependencies
feat(sourcemaps): upgraded dependencies

* feat(chalice): path analysis schema changes

* feat(chalice): path analysis query change

* feat(chalice): path analysis query change

* feat(chalice): ios replay support

* feat(chalice): ios replay support

* feat(chalice): path analysis changes

* feat(chalice): upgraded dependencies

* feat(chalice): simple hide minor paths

* feat(chalice): path analysis density

* feat(chalice): session's replay ios events

* feat(chalice): fixed typo

* feat(chalice): support project's platform

* feat(DB): support project's platform

* feat(chalice): path analysis EE in progress

* feat(chalice): project's platform API

* feat(chalice): fixed create project

* feat(chalice): EE path analysis in progress

* feat(chalice): EE path analysis
refactor(chalice): support specific database name for clickhouse-client

* feat(chalice): upgraded dependencies
feat(chalice): path analysis specific event type for startPoint
feat(chalice): path analysis specific event type for endPoint
feat(chalice): path analysis specific event type for exclude

* refactoring(chalice): changed IOS click event type
This commit is contained in:
Kraiem Taha Yassine 2023-09-06 17:06:33 +01:00 committed by GitHub
parent 0ece13064a
commit a34179365e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
99 changed files with 4999 additions and 4049 deletions

View file

@ -5,18 +5,18 @@ name = "pypi"
[packages] [packages]
requests = "==2.31.0" requests = "==2.31.0"
urllib3 = "==1.26.16" boto3 = "==1.28.40"
boto3 = "==1.26.148" pyjwt = "==2.8.0"
pyjwt = "==2.7.0" psycopg2-binary = "==2.9.7"
psycopg2-binary = "==2.9.6" elasticsearch = "==8.9.0"
elasticsearch = "==8.8.0" jira = "==3.5.2"
jira = "==3.5.1" fastapi = "==0.103.1"
fastapi = "==0.96.0"
uvicorn = {version = "==0.22.0", extras = ["standard"]}
python-decouple = "==3.8" python-decouple = "==3.8"
pydantic = {version = "==1.10.8", extras = ["email"]} apscheduler = "==3.10.4"
apscheduler = "==3.10.1" redis = "==5.0.0"
redis = "==4.5.5" urllib3 = "==1.26.16"
uvicorn = {version = "==0.23.2", extras = ["standard"]}
pydantic = {version = "==2.3.0", extras = ["email"]}
[dev-packages] [dev-packages]

View file

@ -18,7 +18,7 @@ class JWTAuth(HTTPBearer):
if credentials: if credentials:
if not credentials.scheme == "Bearer": if not credentials.scheme == "Bearer":
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.")
jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials) jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
auth_exists = jwt_payload is not None \ auth_exists = jwt_payload is not None \
and users.auth_exists(user_id=jwt_payload.get("userId", -1), and users.auth_exists(user_id=jwt_payload.get("userId", -1),
tenant_id=jwt_payload.get("tenantId", -1), tenant_id=jwt_payload.get("tenantId", -1),
@ -27,18 +27,13 @@ class JWTAuth(HTTPBearer):
if jwt_payload is None \ if jwt_payload is None \
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
or not auth_exists: or not auth_exists:
print("JWTAuth: Token issue")
if jwt_payload is not None: if jwt_payload is not None:
print(jwt_payload) print(jwt_payload)
print(f"JWTAuth: user_id={jwt_payload.get('userId')} tenant_id={jwt_payload.get('tenantId')}") if jwt_payload.get("iat") is None:
if jwt_payload is None: print("JWTAuth: iat is None")
print("JWTAuth: jwt_payload is None") if jwt_payload.get("aud") is None:
print(credentials.scheme + " " + credentials.credentials) print("JWTAuth: aud is None")
if jwt_payload is not None and jwt_payload.get("iat") is None: if not auth_exists:
print("JWTAuth: iat is None")
if jwt_payload is not None and jwt_payload.get("aud") is None:
print("JWTAuth: aud is None")
if jwt_payload is not None and not auth_exists:
print("JWTAuth: not users.auth_exists") print("JWTAuth: not users.auth_exists")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
@ -47,10 +42,9 @@ class JWTAuth(HTTPBearer):
print("JWTAuth: User not found.") print("JWTAuth: User not found.")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
jwt_payload["authorizer_identity"] = "jwt" jwt_payload["authorizer_identity"] = "jwt"
print(jwt_payload)
request.state.authorizer_identity = "jwt" request.state.authorizer_identity = "jwt"
request.state.currentContext = schemas.CurrentContext(tenant_id=jwt_payload.get("tenantId", -1), request.state.currentContext = schemas.CurrentContext(tenantId=jwt_payload.get("tenantId", -1),
user_id=jwt_payload.get("userId", -1), userId=jwt_payload.get("userId", -1),
email=user["email"]) email=user["email"])
return request.state.currentContext return request.state.currentContext

View file

@ -55,7 +55,7 @@ def __process_circular(alert):
def create(project_id, data: schemas.AlertSchema): def create(project_id, data: schemas.AlertSchema):
data = data.dict() data = data.model_dump()
data["query"] = json.dumps(data["query"]) data["query"] = json.dumps(data["query"])
data["options"] = json.dumps(data["options"]) data["options"] = json.dumps(data["options"])
@ -72,7 +72,7 @@ def create(project_id, data: schemas.AlertSchema):
def update(id, data: schemas.AlertSchema): def update(id, data: schemas.AlertSchema):
data = data.dict() data = data.model_dump()
data["query"] = json.dumps(data["query"]) data["query"] = json.dumps(data["query"])
data["options"] = json.dumps(data["options"]) data["options"] = json.dumps(data["options"])

View file

@ -6,13 +6,12 @@ from chalicelib.core import tenants
from chalicelib.core import users from chalicelib.core import users
def jwt_authorizer(token): def jwt_authorizer(scheme: str, token: str):
token = token.split(" ") if scheme.lower() != "bearer":
if len(token) != 2 or token[0].lower() != "bearer":
return None return None
try: try:
payload = jwt.decode( payload = jwt.decode(
token[1], token,
config("jwt_secret"), config("jwt_secret"),
algorithms=config("jwt_algorithm"), algorithms=config("jwt_algorithm"),
audience=[f"front:{helper.get_stage_name()}"] audience=[f"front:{helper.get_stage_name()}"]
@ -22,6 +21,7 @@ def jwt_authorizer(token):
return None return None
except BaseException as e: except BaseException as e:
print("! JWT Base Exception") print("! JWT Base Exception")
print(e)
return None return None
return payload return payload

View file

@ -27,7 +27,7 @@ COALESCE((SELECT TRUE
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """
def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, user_id, include_mobs: bool = True): def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_id, include_mobs: bool = True):
no_platform = True no_platform = True
for f in data.filters: for f in data.filters:
if f.type == schemas.FilterType.platform: if f.type == schemas.FilterType.platform:
@ -62,7 +62,7 @@ def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, u
print("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------") print("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------")
print(main_query.decode('UTF-8')) print(main_query.decode('UTF-8'))
print("--------- PAYLOAD -----------") print("--------- PAYLOAD -----------")
print(data.json()) print(data.model_dump_json())
print("--------------------") print("--------------------")
raise err raise err

View file

@ -13,25 +13,24 @@ from chalicelib.utils.storage import StorageClient
PIE_CHART_GROUP = 5 PIE_CHART_GROUP = 5
# TODO: refactor this to split
# timeseries /
# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs
def __try_live(project_id, data: schemas.CardSchema): def __try_live(project_id, data: schemas.CardSchema):
results = [] results = []
for i, s in enumerate(data.series): for i, s in enumerate(data.series):
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type, view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value)) metric_of=data.metric_of, metric_value=data.metric_value))
if data.view_type == schemas.MetricTimeseriesViewType.progress: if data.view_type == schemas.MetricTimeseriesViewType.progress:
r = {"count": results[-1]} r = {"count": results[-1]}
diff = s.filter.endDate - s.filter.startDate diff = s.filter.endTimestamp - s.filter.startTimestamp
s.filter.endDate = s.filter.startDate s.filter.endTimestamp = s.filter.startTimestamp
s.filter.startDate = s.filter.endDate - diff s.filter.startTimestamp = s.filter.endTimestamp - diff
r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type, view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value) metric_of=data.metric_of, metric_value=data.metric_value)
r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"]) r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"])
# r["countProgress"] = ((r["count"] - r["previousCount"]) / r["previousCount"]) * 100 \
# if r["previousCount"] > 0 else 0
r["seriesName"] = s.name if s.name else i + 1 r["seriesName"] = s.name if s.name else i + 1
r["seriesId"] = s.series_id if s.series_id else None r["seriesId"] = s.series_id if s.series_id else None
results[-1] = r results[-1] = r
@ -50,14 +49,12 @@ def __is_funnel_chart(data: schemas.CardSchema):
return data.metric_type == schemas.MetricType.funnel return data.metric_type == schemas.MetricType.funnel
def __get_funnel_chart(project_id, data: schemas.CardSchema): def __get_funnel_chart(project_id: int, data: schemas.CardFunnel, user_id: int = None):
if len(data.series) == 0: if len(data.series) == 0:
return { return {
"stages": [], "stages": [],
"totalDropDueToIssues": 0 "totalDropDueToIssues": 0
} }
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
@ -72,10 +69,6 @@ def __get_errors_list(project_id, user_id, data: schemas.CardSchema):
"total": 0, "total": 0,
"errors": [] "errors": []
} }
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
data.series[0].filter.page = data.page
data.series[0].filter.limit = data.limit
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
@ -91,10 +84,6 @@ def __get_sessions_list(project_id, user_id, data: schemas.CardSchema):
"total": 0, "total": 0,
"sessions": [] "sessions": []
} }
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
data.series[0].filter.page = data.page
data.series[0].filter.limit = data.limit
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id) return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
@ -106,48 +95,33 @@ def __is_click_map(data: schemas.CardSchema):
return data.metric_type == schemas.MetricType.click_map return data.metric_type == schemas.MetricType.click_map
def __get_click_map_chart(project_id, user_id, data: schemas.CardSchema, include_mobs: bool = True): def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, include_mobs: bool = True):
if len(data.series) == 0: if len(data.series) == 0:
return None return None
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
return click_maps.search_short_session(project_id=project_id, user_id=user_id, return click_maps.search_short_session(project_id=project_id, user_id=user_id,
data=schemas.FlatClickMapSessionsSearch(**data.series[0].filter.dict()), data=schemas.ClickMapSessionsSearch(
**data.series[0].filter.model_dump()),
include_mobs=include_mobs) include_mobs=include_mobs)
def __get_path_analysis_chart(project_id, data: schemas.CardSchema): def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.series) == 0: if len(data.series) == 0:
data.series.append(schemas.CardSeriesSchema()) data.series.append(
schemas.CardPathAnalysisSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp))
elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema): elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema):
data.series[0].filter = schemas.PathAnalysisSchema() data.series[0].filter = schemas.PathAnalysisSchema()
data.series[0].filter.startTimestamp = data.startTimestamp
data.series[0].filter.endTimestamp = data.endTimestamp return product_analytics.path_analysis(project_id=project_id, data=data.series[0].filter, density=data.density,
return product_analytics.path_analysis(project_id=project_id, selected_event_type=data.metric_value, hide_minor_paths=data.hide_excess)
data=schemas.PathAnalysisSchema(**data.series[0].filter.dict()))
def __is_path_analysis(data: schemas.CardSchema): def __is_path_analysis(data: schemas.CardSchema):
return data.metric_type == schemas.MetricType.pathAnalysis return data.metric_type == schemas.MetricType.pathAnalysis
def merged_live(project_id, data: schemas.CardSchema, user_id=None): def __get_timeseries_chart(project_id: int, data: schemas.CardTimeSeries, user_id: int = None):
if data.is_template:
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.dict())
elif __is_funnel_chart(data):
return __get_funnel_chart(project_id=project_id, data=data)
elif __is_errors_list(data):
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
elif __is_sessions_list(data):
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
elif __is_click_map(data):
return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data)
elif __is_path_analysis(data):
return __get_path_analysis_chart(project_id=project_id, data=data)
elif len(data.series) == 0:
return []
series_charts = __try_live(project_id=project_id, data=data) series_charts = __try_live(project_id=project_id, data=data)
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: if data.view_type == schemas.MetricTimeseriesViewType.progress:
return series_charts return series_charts
results = [{}] * len(series_charts[0]) results = [{}] * len(series_charts[0])
for i in range(len(results)): for i in range(len(results)):
@ -157,26 +131,131 @@ def merged_live(project_id, data: schemas.CardSchema, user_id=None):
return results return results
def empty(**args):
raise Exception("not supported")
def __get_table_of_user_ids(project_id: int, data: schemas.CardTable, user_id: int = None):
series_charts = __try_live(project_id=project_id, data=data)
return series_charts
def __get_table_of_sessions(project_id: int, data: schemas.CardTable, user_id):
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
def __get_table_of_errors(project_id: int, data: schemas.CardTable, user_id: int):
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
def __get_table_of_issues(project_id: int, data: schemas.CardTable, user_id: int = None):
return __try_live(project_id=project_id, data=data)
def __get_table_of_browsers(project_id: int, data: schemas.CardTable, user_id: int = None):
return __try_live(project_id=project_id, data=data)
def __get_table_of_devises(project_id: int, data: schemas.CardTable, user_id: int = None):
return __try_live(project_id=project_id, data=data)
def __get_table_of_countries(project_id: int, data: schemas.CardTable, user_id: int = None):
return __try_live(project_id=project_id, data=data)
def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = None):
return __try_live(project_id=project_id, data=data)
def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int):
supported = {
schemas.MetricOfTable.sessions: __get_table_of_sessions,
schemas.MetricOfTable.errors: __get_table_of_errors,
schemas.MetricOfTable.user_id: __get_table_of_user_ids,
schemas.MetricOfTable.issues: __get_table_of_issues,
schemas.MetricOfTable.user_browser: __get_table_of_browsers,
schemas.MetricOfTable.user_device: __get_table_of_devises,
schemas.MetricOfTable.user_country: __get_table_of_countries,
schemas.MetricOfTable.visited_url: __get_table_of_urls,
}
return supported.get(data.metric_of, empty)(project_id=project_id, data=data, user_id=user_id)
def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
if data.is_template:
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump())
supported = {
schemas.MetricType.timeseries: __get_timeseries_chart,
schemas.MetricType.table: __get_table_chart,
schemas.MetricType.click_map: __get_click_map_chart,
schemas.MetricType.funnel: __get_funnel_chart,
schemas.MetricType.insights: empty,
schemas.MetricType.pathAnalysis: __get_path_analysis_chart
}
return supported.get(data.metric_type, empty)(project_id=project_id, data=data, user_id=user_id)
def merged_live(project_id, data: schemas.CardSchema, user_id=None):
return get_chart(project_id=project_id, data=data, user_id=user_id)
print("---1")
if data.is_template:
print("---2")
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump())
elif __is_funnel_chart(data):
print("---3")
return __get_funnel_chart(project_id=project_id, data=data)
elif __is_errors_list(data):
print("---4")
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
elif __is_sessions_list(data):
print("---5")
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
elif __is_click_map(data):
print("---6")
return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data)
elif __is_path_analysis(data):
print("---7")
return __get_path_analysis_chart(project_id=project_id, data=data)
elif len(data.series) == 0:
print("---8")
return []
series_charts = __try_live(project_id=project_id, data=data)
print("---9")
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
print("---10")
return series_charts
results = [{}] * len(series_charts[0])
print("---11")
for i in range(len(results)):
for j, series_chart in enumerate(series_charts):
results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]}
return results
def __merge_metric_with_data(metric: schemas.CardSchema, def __merge_metric_with_data(metric: schemas.CardSchema,
data: schemas.CardChartSchema) -> schemas.CardSchema: data: schemas.CardSessionsSchema) -> schemas.CardSchema:
if data.series is not None and len(data.series) > 0: if data.series is not None and len(data.series) > 0:
metric.series = data.series metric.series = data.series
metric: schemas.CardSchema = schemas.CardSchema( # TODO: try to refactor this
**{**data.dict(by_alias=True), **metric.dict(by_alias=True)}) metric: schemas.CardSchema = schemas.CardSchema(**{**data.model_dump(by_alias=True),
**metric.model_dump(by_alias=True)})
if len(data.filters) > 0 or len(data.events) > 0: if len(data.filters) > 0 or len(data.events) > 0:
for s in metric.series: for s in metric.series:
if len(data.filters) > 0: if len(data.filters) > 0:
s.filter.filters += data.filters s.filter.filters += data.filters
if len(data.events) > 0: if len(data.events) > 0:
s.filter.events += data.events s.filter.events += data.events
metric.limit = data.limit # metric.limit = data.limit
metric.page = data.page # metric.page = data.page
metric.startTimestamp = data.startTimestamp # metric.startTimestamp = data.startTimestamp
metric.endTimestamp = data.endTimestamp # metric.endTimestamp = data.endTimestamp
return metric return metric
def make_chart(project_id, user_id, data: schemas.CardChartSchema, metric: schemas.CardSchema): def make_chart(project_id, user_id, data: schemas.CardSessionsSchema, metric: schemas.CardSchema):
if metric is None: if metric is None:
return None return None
metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
@ -198,10 +277,10 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchem
# if __is_click_map(metric) and raw_metric.get("data") is not None: # if __is_click_map(metric) and raw_metric.get("data") is not None:
# is_click_map = True # is_click_map = True
for s in metric.series: for s in metric.series:
s.filter.startDate = data.startTimestamp # s.filter.startTimestamp = data.startTimestamp
s.filter.endDate = data.endTimestamp # s.filter.endTimestamp = data.endTimestamp
s.filter.limit = data.limit # s.filter.limit = data.limit
s.filter.page = data.page # s.filter.page = data.page
# if is_click_map: # if is_click_map:
# results.append( # results.append(
# {"seriesId": s.series_id, "seriesName": s.name, "total": 1, "sessions": [raw_metric["data"]]}) # {"seriesId": s.series_id, "seriesName": s.name, "total": 1, "sessions": [raw_metric["data"]]})
@ -221,10 +300,6 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessions
if metric is None: if metric is None:
return None return None
for s in metric.series: for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
return {"seriesId": s.series_id, "seriesName": s.name, return {"seriesId": s.series_id, "seriesName": s.name,
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)} **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
@ -238,23 +313,15 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSc
if metric is None: if metric is None:
return None return None
for s in metric.series: for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
return {"seriesId": s.series_id, "seriesName": s.name, return {"seriesId": s.series_id, "seriesName": s.name,
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)} **errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema): def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
results = [] results = []
if data.series is None: if len(data.series) == 0:
return results return results
for s in data.series: for s in data.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
if len(data.filters) > 0: if len(data.filters) > 0:
s.filter.filters += data.filters s.filter.filters += data.filters
if len(data.events) > 0: if len(data.events) > 0:
@ -265,7 +332,7 @@ def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
return results return results
def create(project_id, user_id, data: schemas.CardSchema, dashboard=False): def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
session_data = None session_data = None
if __is_click_map(data): if __is_click_map(data):
@ -275,13 +342,13 @@ def create(project_id, user_id, data: schemas.CardSchema, dashboard=False):
session_data = json.dumps(session_data) session_data = json.dumps(session_data)
_data = {"session_data": session_data} _data = {"session_data": session_data}
for i, s in enumerate(data.series): for i, s in enumerate(data.series):
for k in s.dict().keys(): for k in s.model_dump().keys():
_data[f"{k}_{i}"] = s.__getattribute__(k) _data[f"{k}_{i}"] = s.__getattribute__(k)
_data[f"index_{i}"] = i _data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json() _data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series) series_len = len(data.series)
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data} params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data}
params["default_config"] = json.dumps(data.default_config.dict()) params["default_config"] = json.dumps(data.default_config.model_dump())
query = """INSERT INTO metrics (project_id, user_id, name, is_public, query = """INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value, view_type, metric_type, metric_of, metric_value,
metric_format, default_config, thumbnail, data) metric_format, default_config, thumbnail, data)
@ -307,7 +374,7 @@ def create(project_id, user_id, data: schemas.CardSchema, dashboard=False):
return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)} return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
def update(metric_id, user_id, project_id, data: schemas.UpdateCardSchema): def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None: if metric is None:
return None return None
@ -320,7 +387,7 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCardSchema):
"user_id": user_id, "project_id": project_id, "view_type": data.view_type, "user_id": user_id, "project_id": project_id, "view_type": data.view_type,
"metric_type": data.metric_type, "metric_of": data.metric_of, "metric_type": data.metric_type, "metric_of": data.metric_of,
"metric_value": data.metric_value, "metric_format": data.metric_format, "metric_value": data.metric_value, "metric_format": data.metric_format,
"config": json.dumps(data.default_config.dict()), "thumbnail": data.thumbnail} "config": json.dumps(data.default_config.model_dump()), "thumbnail": data.thumbnail}
for i, s in enumerate(data.series): for i, s in enumerate(data.series):
prefix = "u_" prefix = "u_"
if s.index is None: if s.index is None:
@ -331,7 +398,7 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCardSchema):
else: else:
u_series.append({"i": i, "s": s}) u_series.append({"i": i, "s": s})
u_series_ids.append(s.series_id) u_series_ids.append(s.series_id)
ns = s.dict() ns = s.model_dump()
for k in ns.keys(): for k in ns.keys():
if k == "filter": if k == "filter":
ns[k] = json.dumps(ns[k]) ns[k] = json.dumps(ns[k])
@ -453,7 +520,7 @@ def get_all(project_id, user_id):
return result return result
def delete(project_id, metric_id, user_id): def delete_card(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute( cur.execute(
cur.mogrify("""\ cur.mogrify("""\
@ -562,8 +629,8 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
if metric is None: if metric is None:
return None return None
for s in metric.series: for s in metric.series:
s.filter.startDate = data.startTimestamp s.filter.startTimestamp = data.startTimestamp
s.filter.endDate = data.endTimestamp s.filter.endTimestamp = data.endTimestamp
s.filter.limit = data.limit s.filter.limit = data.limit
s.filter.page = data.page s.filter.page = data.page
issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {}) issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
@ -589,13 +656,15 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
"issue": issue} "issue": issue}
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema): def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True) raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True)
if raw_metric is None: if raw_metric is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found")
raw_metric["startTimestamp"] = data.startTimestamp
raw_metric["endTimestamp"] = data.endTimestamp
metric: schemas.CardSchema = schemas.CardSchema(**raw_metric) metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
if metric.is_template: if metric.is_template:
return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.dict()) return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.model_dump())
elif __is_click_map(metric): elif __is_click_map(metric):
if raw_metric["data"]: if raw_metric["data"]:
keys = sessions_mobs. \ keys = sessions_mobs. \
@ -615,53 +684,52 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChart
return make_chart(project_id=project_id, user_id=user_id, data=data, metric=metric) return make_chart(project_id=project_id, user_id=user_id, data=data, metric=metric)
PREDEFINED = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \ def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict): schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data) supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
return supported.get(key, lambda *args: None)(project_id=project_id, **data)

View file

@ -12,7 +12,7 @@ def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description) pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description)
VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s) VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s)
RETURNING *""" RETURNING *"""
params = {"userId": user_id, "projectId": project_id, **data.dict()} params = {"userId": user_id, "projectId": project_id, **data.model_dump()}
if data.metrics is not None and len(data.metrics) > 0: if data.metrics is not None and len(data.metrics) > 0:
pg_query = f"""WITH dash AS ({pg_query}) pg_query = f"""WITH dash AS ({pg_query})
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
@ -109,7 +109,7 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo
pg_query = """SELECT COALESCE(COUNT(*),0) AS count pg_query = """SELECT COALESCE(COUNT(*),0) AS count
FROM dashboard_widgets FROM dashboard_widgets
WHERE dashboard_id = %(dashboard_id)s;""" WHERE dashboard_id = %(dashboard_id)s;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()} params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.model_dump()}
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = cur.fetchone()
offset = row["count"] offset = row["count"]
@ -178,7 +178,7 @@ def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashb
AND dashboard_id = %(dashboard_id)s AND dashboard_id = %(dashboard_id)s
AND (dashboards.user_id = %(userId)s OR is_public)) AND (dashboards.user_id = %(userId)s OR is_public))
RETURNING *;""" RETURNING *;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()} params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.model_dump()}
params["config"] = json.dumps(data.config) params["config"] = json.dumps(data.config)
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = cur.fetchone()
@ -192,7 +192,7 @@ def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.Up
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s
RETURNING *;""" RETURNING *;"""
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id,
"widget_id": widget_id, **data.dict()} "widget_id": widget_id, **data.model_dump()}
params["config"] = json.dumps(data.config) params["config"] = json.dumps(data.config)
cur.execute(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone() row = cur.fetchone()
@ -224,7 +224,7 @@ def pin_dashboard(project_id, user_id, dashboard_id):
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CardSchema): def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CardSchema):
metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True) metric_id = custom_metrics.create_card(project_id=project_id, user_id=user_id, data=data, dashboard=True)
return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id, return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id)) data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
@ -234,7 +234,7 @@ def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.Ca
# return None # return None
# metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate(**raw_metric) # metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate(**raw_metric)
# if metric.is_template: # if metric.is_template:
# return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict()) # return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.model_dump())
# else: # else:
# return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"], # return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
# data=data, metric=raw_metric) # data=data, metric=raw_metric)

View file

@ -454,10 +454,10 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
pg_sub_query_chart.append("errors.error_id =details.error_id") pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = [] statuses = []
error_ids = None error_ids = None
if data.startDate is None: if data.startTimestamp is None:
data.startDate = TimeUTC.now(-30) data.startTimestamp = TimeUTC.now(-30)
if data.endDate is None: if data.endTimestamp is None:
data.endDate = TimeUTC.now(1) data.endTimestamp = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0: if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors") print("-- searching for sessions before errors")
statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True, statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
@ -466,18 +466,18 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
return empty_response return empty_response
error_ids = [e["errorId"] for e in statuses] error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1) step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1)
sort = __get_sort_key('datetime') sort = __get_sort_key('datetime')
if data.sort is not None: if data.sort is not None:
sort = __get_sort_key(data.sort) sort = __get_sort_key(data.sort)
order = schemas.SortOrderType.desc.value order = schemas.SortOrderType.desc
if data.order is not None: if data.order is not None:
order = data.order.value order = data.order
extra_join = "" extra_join = ""
params = { params = {
"startDate": data.startDate, "startDate": data.startTimestamp,
"endDate": data.endDate, "endDate": data.endTimestamp,
"project_id": project_id, "project_id": project_id,
"userId": user_id, "userId": user_id,
"step_size": step_size} "step_size": step_size}
@ -709,41 +709,3 @@ def change_state(project_id, user_id, error_id, action):
for e in errors: for e in errors:
e["status"] = row["status"] e["status"] = row["status"]
return {"data": errors} return {"data": errors}
MAX_RANK = 2
def __status_rank(status):
return {
'unresolved': MAX_RANK - 2,
'ignored': MAX_RANK - 1,
'resolved': MAX_RANK
}.get(status)
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s)
SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed
FROM (SELECT root_error.error_id
FROM events.errors
INNER JOIN public.errors AS root_error USING (error_id)
LEFT JOIN user_viewed USING (error_id)
WHERE project_id = %(project_id)s
AND timestamp >= %(startTimestamp)s
AND timestamp <= %(endTimestamp)s
AND source = 'js_exception'
AND root_error.status = 'unresolved'
AND user_viewed.error_id ISNULL
LIMIT 1
) AS timed_errors;""",
{"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp})
cur.execute(query=query)
row = cur.fetchone()
return {
"data": helper.dict_to_camel_case(row)
}

View file

@ -110,11 +110,12 @@ class EventType:
column=None) # column=None because errors are searched by name or message column=None) # column=None because errors are searched by name or message
METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None) METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None)
# IOS # IOS
CLICK_IOS = Event(ui_type=schemas.EventType.click_ios, table="events_ios.clicks", column="label") CLICK_IOS = Event(ui_type=schemas.EventType.click_ios, table="events_ios.taps", column="label")
INPUT_IOS = Event(ui_type=schemas.EventType.input_ios, table="events_ios.inputs", column="label") INPUT_IOS = Event(ui_type=schemas.EventType.input_ios, table="events_ios.inputs", column="label")
VIEW_IOS = Event(ui_type=schemas.EventType.view_ios, table="events_ios.views", column="name") VIEW_IOS = Event(ui_type=schemas.EventType.view_ios, table="events_ios.views", column="name")
SWIPE_IOS = Event(ui_type=schemas.EventType.swipe_ios, table="events_ios.swipes", column="label")
CUSTOM_IOS = Event(ui_type=schemas.EventType.custom_ios, table="events_common.customs", column="name") CUSTOM_IOS = Event(ui_type=schemas.EventType.custom_ios, table="events_common.customs", column="name")
REQUEST_IOS = Event(ui_type=schemas.EventType.request_ios, table="events_common.requests", column="url") REQUEST_IOS = Event(ui_type=schemas.EventType.request_ios, table="events_common.requests", column="path")
ERROR_IOS = Event(ui_type=schemas.EventType.error_ios, table="events_ios.crashes", ERROR_IOS = Event(ui_type=schemas.EventType.error_ios, table="events_ios.crashes",
column=None) # column=None because errors are searched by name or message column=None) # column=None because errors are searched by name or message

View file

@ -2,20 +2,8 @@ from chalicelib.utils import pg_client, helper
from chalicelib.core import events from chalicelib.core import events
def get_customs_by_sessionId(session_id, project_id): def get_customs_by_session_id(session_id, project_id):
with pg_client.PostgresClient() as cur: return events.get_customs_by_session_id(session_id=session_id, project_id=project_id)
cur.execute(cur.mogrify(f"""\
SELECT
c.*,
'{events.EventType.CUSTOM_IOS.ui_type}' AS type
FROM {events.EventType.CUSTOM_IOS.table} AS c
WHERE
c.session_id = %(session_id)s
ORDER BY c.timestamp;""",
{"project_id": project_id, "session_id": session_id})
)
rows = cur.fetchall()
return helper.dict_to_camel_case(rows)
def get_by_sessionId(session_id, project_id): def get_by_sessionId(session_id, project_id):
@ -23,8 +11,8 @@ def get_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f""" cur.execute(cur.mogrify(f"""
SELECT SELECT
c.*, c.*,
'{events.EventType.CLICK_IOS.ui_type}' AS type 'TAP' AS type
FROM {events.EventType.CLICK_IOS.table} AS c FROM events_ios.taps AS c
WHERE WHERE
c.session_id = %(session_id)s c.session_id = %(session_id)s
ORDER BY c.timestamp;""", ORDER BY c.timestamp;""",
@ -35,8 +23,8 @@ def get_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f""" cur.execute(cur.mogrify(f"""
SELECT SELECT
i.*, i.*,
'{events.EventType.INPUT_IOS.ui_type}' AS type 'INPUT' AS type
FROM {events.EventType.INPUT_IOS.table} AS i FROM events_ios.inputs AS i
WHERE WHERE
i.session_id = %(session_id)s i.session_id = %(session_id)s
ORDER BY i.timestamp;""", ORDER BY i.timestamp;""",
@ -46,12 +34,21 @@ def get_by_sessionId(session_id, project_id):
cur.execute(cur.mogrify(f""" cur.execute(cur.mogrify(f"""
SELECT SELECT
v.*, v.*,
'{events.EventType.VIEW_IOS.ui_type}' AS type 'VIEW' AS type
FROM {events.EventType.VIEW_IOS.table} AS v FROM events_ios.views AS v
WHERE WHERE
v.session_id = %(session_id)s v.session_id = %(session_id)s
ORDER BY v.timestamp;""", {"project_id": project_id, "session_id": session_id})) ORDER BY v.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall() rows += cur.fetchall()
cur.execute(cur.mogrify(f"""
SELECT
s.*,
'SWIPE' AS type
FROM events_ios.swipes AS s
WHERE
s.session_id = %(session_id)s
ORDER BY s.timestamp;""", {"project_id": project_id, "session_id": session_id}))
rows += cur.fetchall()
rows = helper.list_to_camel_case(rows) rows = helper.list_to_camel_case(rows)
rows = sorted(rows, key=lambda k: k["timestamp"]) rows = sorted(rows, key=lambda k: k["timestamp"])
return rows return rows

View file

@ -63,7 +63,7 @@ def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlag
SELECT COUNT(1) OVER () AS count, {", ".join(feature_flag_columns)} SELECT COUNT(1) OVER () AS count, {", ".join(feature_flag_columns)}
FROM feature_flags FROM feature_flags
WHERE {" AND ".join(constraints)} WHERE {" AND ".join(constraints)}
ORDER BY updated_at {data.order.value} ORDER BY updated_at {data.order}
LIMIT %(limit)s OFFSET %(offset)s; LIMIT %(limit)s OFFSET %(offset)s;
""" """
@ -206,7 +206,7 @@ def prepare_params_to_create_flag(feature_flag_data, project_id, user_id):
params = { params = {
"project_id": project_id, "project_id": project_id,
"created_by": user_id, "created_by": user_id,
**feature_flag_data.dict(), **feature_flag_data.model_dump(),
**conditions_data, **conditions_data,
**variants_data, **variants_data,
"payload": json.dumps(feature_flag_data.payload) "payload": json.dumps(feature_flag_data.payload)
@ -218,7 +218,7 @@ def prepare_params_to_create_flag(feature_flag_data, project_id, user_id):
def prepare_variants_values(feature_flag_data): def prepare_variants_values(feature_flag_data):
variants_data = {} variants_data = {}
for i, v in enumerate(feature_flag_data.variants): for i, v in enumerate(feature_flag_data.variants):
for k in v.dict().keys(): for k in v.model_dump().keys():
variants_data[f"v_{k}_{i}"] = v.__getattribute__(k) variants_data[f"v_{k}_{i}"] = v.__getattribute__(k)
variants_data[f"v_value_{i}"] = v.value variants_data[f"v_value_{i}"] = v.value
variants_data[f"v_description_{i}"] = v.description variants_data[f"v_description_{i}"] = v.description
@ -230,11 +230,11 @@ def prepare_variants_values(feature_flag_data):
def prepare_conditions_values(feature_flag_data): def prepare_conditions_values(feature_flag_data):
conditions_data = {} conditions_data = {}
for i, s in enumerate(feature_flag_data.conditions): for i, s in enumerate(feature_flag_data.conditions):
for k in s.dict().keys(): for k in s.model_dump().keys():
conditions_data[f"{k}_{i}"] = s.__getattribute__(k) conditions_data[f"{k}_{i}"] = s.__getattribute__(k)
conditions_data[f"name_{i}"] = s.name conditions_data[f"name_{i}"] = s.name
conditions_data[f"rollout_percentage_{i}"] = s.rollout_percentage conditions_data[f"rollout_percentage_{i}"] = s.rollout_percentage
conditions_data[f"filters_{i}"] = json.dumps([filter_.dict() for filter_ in s.filters]) conditions_data[f"filters_{i}"] = json.dumps([filter_.model_dump() for filter_ in s.filters])
return conditions_data return conditions_data
@ -299,7 +299,7 @@ def create_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlag
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
params = [ params = [
(feature_flag_id, c.name, c.rollout_percentage, json.dumps([filter_.dict() for filter_ in c.filters])) (feature_flag_id, c.name, c.rollout_percentage, json.dumps([filter_.model_dump() for filter_ in c.filters]))
for c in conditions] for c in conditions]
query = cur.mogrify(sql, params) query = cur.mogrify(sql, params)
cur.execute(query) cur.execute(query)
@ -327,10 +327,10 @@ def update_feature_flag(project_id: int, feature_flag_id: int,
) )
params = { params = {
**feature_flag.dict(),
"updated_by": user_id, "updated_by": user_id,
"feature_flag_id": feature_flag_id, "feature_flag_id": feature_flag_id,
"project_id": project_id, "project_id": project_id,
**feature_flag.model_dump(),
"payload": json.dumps(feature_flag.payload), "payload": json.dumps(feature_flag.payload),
} }

View file

@ -6,7 +6,7 @@ from chalicelib.utils import helper
from chalicelib.utils import sql_helper as sh from chalicelib.utils import sql_helper as sh
def filter_stages(stages: List[schemas._SessionSearchEventSchema]): def filter_stages(stages: List[schemas.SessionSearchEventSchema2]):
ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input, ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
schemas.EventType.location, schemas.EventType.custom, schemas.EventType.location, schemas.EventType.custom,
schemas.EventType.click_ios, schemas.EventType.input_ios, schemas.EventType.click_ios, schemas.EventType.input_ios,
@ -15,10 +15,10 @@ def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
def __parse_events(f_events: List[dict]): def __parse_events(f_events: List[dict]):
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events] return [schemas.SessionSearchEventSchema2.parse_obj(e) for e in f_events]
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]): def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]):
if f_events is None: if f_events is None:
return return
events = [] events = []
@ -41,7 +41,7 @@ def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilte
data.events = __fix_stages(data.events) data.events = __fix_stages(data.events)
if len(data.events) == 0: if len(data.events) == 0:
return {"stages": [], "totalDropDueToIssues": 0} return {"stages": [], "totalDropDueToIssues": 0}
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id) insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id)
insights = helper.list_to_camel_case(insights) insights = helper.list_to_camel_case(insights)
if len(insights) > 0: if len(insights) > 0:
# TODO: check if this correct # TODO: check if this correct
@ -64,5 +64,5 @@ def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchem
return { return {
"issues": helper.dict_to_camel_case( "issues": helper.dict_to_camel_case(
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1, significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=1,
last_stage=len(data.events)))} last_stage=len(data.events)))}

View file

@ -4,7 +4,7 @@ from chalicelib.utils import helper, pg_client
def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema): def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
args = {"startDate": data.startDate, "endDate": data.endDate, args = {"startDate": data.startTimestamp, "endDate": data.endTimestamp,
"project_id": project_id, "url": data.url} "project_id": project_id, "url": data.url}
constraints = ["sessions.project_id = %(project_id)s", constraints = ["sessions.project_id = %(project_id)s",
"(url = %(url)s OR path= %(url)s)", "(url = %(url)s OR path= %(url)s)",
@ -43,13 +43,6 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
f.value, value_key=f_k)) f.value, value_key=f_k))
constraints.append(sh.multi_conditions(f"mis.type = %({f_k})s", constraints.append(sh.multi_conditions(f"mis.type = %({f_k})s",
f.value, value_key=f_k)) f.value, value_key=f_k))
if len(f.filters) > 0:
for j, sf in enumerate(f.filters):
f_k = f"issue_svalue{i}{j}"
args = {**args, **sh.multi_values(sf.value, value_key=f_k)}
if sf.type == schemas.IssueFilterType._selector and len(sf.value) > 0:
constraints.append(sh.multi_conditions(f"clicks.selector = %({f_k})s",
sf.value, value_key=f_k))
if data.click_rage and not has_click_rage_filter: if data.click_rage and not has_click_rage_filter:
constraints.append("""(issues.session_id IS NULL constraints.append("""(issues.session_id IS NULL

View file

@ -72,16 +72,15 @@ class GitHubIntegration(integration_base.BaseIntegration):
) )
return {"state": "success"} return {"state": "success"}
def add_edit(self, data): def add_edit(self, data: schemas.IssueTrackingGithubSchema):
s = self.get() s = self.get()
if s is not None: if s is not None:
return self.update( return self.update(
changes={ changes={
"token": data["token"] \ "token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
if data.get("token") and len(data["token"]) > 0 and data["token"].find("***") == -1 \ else s.token
else s["token"]
}, },
obfuscate=True obfuscate=True
) )
else: else:
return self.add(token=data["token"], obfuscate=True) return self.add(token=data.token, obfuscate=True)

View file

@ -113,21 +113,20 @@ class JIRAIntegration(integration_base.BaseIntegration):
) )
return {"state": "success"} return {"state": "success"}
def add_edit(self, data): def add_edit(self, data: schemas.IssueTrackingJiraSchema):
if self.integration is not None: if self.integration is not None:
return self.update( return self.update(
changes={ changes={
"username": data["username"], "username": data.username,
"token": data["token"] \ "token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \
if data.get("token") and len(data["token"]) > 0 and data["token"].find("***") == -1 \ else self.integration.token,
else self.integration["token"], "url": data.url
"url": data["url"]
}, },
obfuscate=True obfuscate=True
) )
else: else:
return self.add( return self.add(
username=data["username"], username=data.username,
token=data["token"], token=data.token,
url=data["url"] url=data.url
) )

View file

@ -1,6 +1,8 @@
from chalicelib.core import log_tools from chalicelib.core import log_tools
import requests import requests
from schemas import schemas
IN_TY = "bugsnag" IN_TY = "bugsnag"
@ -60,14 +62,14 @@ def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data): def add_edit(tenant_id, project_id, data:schemas.IntegrationBugsnagSchema ):
s = get(project_id) s = get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return update(tenant_id=tenant_id, project_id=project_id,
changes={"authorizationToken": data["authorizationToken"], changes={"authorizationToken": data.authorization_token,
"bugsnagProjectId": data["bugsnagProjectId"]}) "bugsnagProjectId": data.bugsnag_project_id})
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
authorization_token=data["authorizationToken"], authorization_token=data.authorization_token,
bugsnag_project_id=data["bugsnagProjectId"]) bugsnag_project_id=data.bugsnag_project_id)

View file

@ -1,5 +1,6 @@
import boto3 import boto3
from chalicelib.core import log_tools from chalicelib.core import log_tools
from schemas import schemas
IN_TY = "cloudwatch" IN_TY = "cloudwatch"
@ -102,18 +103,18 @@ def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data): def add_edit(tenant_id, project_id, data: schemas.IntegrationCloudwatchSchema):
s = get(project_id) s = get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return update(tenant_id=tenant_id, project_id=project_id,
changes={"awsAccessKeyId": data["awsAccessKeyId"], changes={"awsAccessKeyId": data.aws_access_key_id,
"awsSecretAccessKey": data["awsSecretAccessKey"], "awsSecretAccessKey": data.aws_secret_access_key,
"logGroupName": data["logGroupName"], "logGroupName": data.log_group_name,
"region": data["region"]}) "region": data.region})
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
aws_access_key_id=data["awsAccessKeyId"], aws_access_key_id=data.aws_access_key_id,
aws_secret_access_key=data["awsSecretAccessKey"], aws_secret_access_key=data.aws_secret_access_key,
log_group_name=data["logGroupName"], log_group_name=data.log_group_name,
region=data["region"]) region=data.region)

View file

@ -1,4 +1,5 @@
from chalicelib.core import log_tools from chalicelib.core import log_tools
from schemas import schemas
IN_TY = "datadog" IN_TY = "datadog"
@ -30,14 +31,14 @@ def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data): def add_edit(tenant_id, project_id, data: schemas.IntegrationDatadogSchema):
s = get(project_id) s = get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return update(tenant_id=tenant_id, project_id=project_id,
changes={"apiKey": data["apiKey"], changes={"apiKey": data.api_key,
"applicationKey": data["applicationKey"]}) "applicationKey": data.application_key})
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
api_key=data["apiKey"], api_key=data.api_key,
application_key=data["applicationKey"]) application_key=data.application_key)

View file

@ -1,8 +1,9 @@
# from elasticsearch import Elasticsearch, RequestsHttpConnection
from elasticsearch import Elasticsearch from elasticsearch import Elasticsearch
from chalicelib.core import log_tools from chalicelib.core import log_tools
import logging import logging
from schemas import schemas
logging.getLogger('elasticsearch').level = logging.ERROR logging.getLogger('elasticsearch').level = logging.ERROR
IN_TY = "elasticsearch" IN_TY = "elasticsearch"
@ -44,17 +45,16 @@ def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data): def add_edit(tenant_id, project_id, data: schemas.IntegrationElasticsearchSchema):
s = get(project_id) s = get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return update(tenant_id=tenant_id, project_id=project_id,
changes={"host": data["host"], "apiKeyId": data["apiKeyId"], "apiKey": data["apiKey"], changes={"host": data.host, "apiKeyId": data.api_key_id, "apiKey": data.api_key,
"indexes": data["indexes"], "port": data["port"]}) "indexes": data.indexes, "port": data.port})
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id, project_id=project_id,
project_id=project_id, host=data.host, api_key=data.api_key, api_key_id=data.api_key_id,
host=data["host"], api_key=data["apiKey"], api_key_id=data["apiKeyId"], indexes=data["indexes"], indexes=data.indexes, port=data.port)
port=data["port"])
def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15): def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15):
@ -64,15 +64,9 @@ def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15):
args = { args = {
"hosts": [{"host": host, "port": port, "scheme": scheme}], "hosts": [{"host": host, "port": port, "scheme": scheme}],
"verify_certs": False, "verify_certs": False,
# "ca_certs": False,
# "connection_class": RequestsHttpConnection,
"request_timeout": timeout, "request_timeout": timeout,
"api_key": (api_key_id, api_key) "api_key": (api_key_id, api_key)
} }
# if api_key_id is not None and len(api_key_id) > 0:
# # args["http_auth"] = (username, password)
# token = "ApiKey " + base64.b64encode(f"{api_key_id}:{api_key}".encode("utf-8")).decode("utf-8")
# args["headers"] = {"Authorization": token}
es = Elasticsearch( es = Elasticsearch(
**args **args
) )
@ -88,8 +82,8 @@ def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15):
return es return es
def ping(tenant_id, host, port, apiKeyId, apiKey): def ping(tenant_id, data: schemas.IntegrationElasticsearchTestSchema):
es = __get_es_client(host, port, apiKeyId, apiKey, timeout=3) es = __get_es_client(data.host, data.port, data.api_key_id, data.api_key, timeout=3)
if es is None: if es is None:
return {"state": False} return {"state": False}
return {"state": es.ping()} return {"state": es.ping()}

View file

@ -1,4 +1,5 @@
from chalicelib.core import log_tools from chalicelib.core import log_tools
from schemas import schemas
IN_TY = "newrelic" IN_TY = "newrelic"
@ -34,17 +35,16 @@ def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data): def add_edit(tenant_id, project_id, data: schemas.IntegrationNewrelicSchema):
s = get(project_id) s = get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return update(tenant_id=tenant_id, project_id=project_id,
changes={"applicationId": data["applicationId"], changes={"applicationId": data.application_id,
"xQueryKey": data["xQueryKey"], "xQueryKey": data.x_query_key,
"region": data["region"]}) "region": data.region})
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
application_id=data["applicationId"], application_id=data.application_id,
x_query_key=data["xQueryKey"], x_query_key=data.x_query_key,
region=data["region"] region=data.region)
)

View file

@ -1,4 +1,5 @@
from chalicelib.core import log_tools from chalicelib.core import log_tools
from schemas import schemas
IN_TY = "rollbar" IN_TY = "rollbar"
@ -27,12 +28,12 @@ def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data): def add_edit(tenant_id, project_id, data: schemas.IntegrationRollbarSchema):
s = get(project_id) s = get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return update(tenant_id=tenant_id, project_id=project_id,
changes={"accessToken": data["accessToken"]}) changes={"accessToken": data.access_token})
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
access_token=data["accessToken"]) access_token=data.access_token)

View file

@ -1,5 +1,6 @@
import requests import requests
from chalicelib.core import log_tools from chalicelib.core import log_tools
from schemas import schemas
IN_TY = "sentry" IN_TY = "sentry"
@ -35,18 +36,19 @@ def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data): def add_edit(tenant_id, project_id, data: schemas.IntegrationSentrySchema):
s = get(project_id) s = get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return update(tenant_id=tenant_id, project_id=project_id,
changes={"projectSlug": data["projectSlug"], changes={"projectSlug": data.project_slug,
"organizationSlug": data["organizationSlug"], "organizationSlug": data.organization_slug,
"token": data["token"]}) "token": data.token})
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
project_slug=data["projectSlug"], project_slug=data.project_slug,
organization_slug=data["organizationSlug"], token=data["token"]) organization_slug=data.organization_slug,
token=data.token)
def proxy_get(tenant_id, project_id, event_id): def proxy_get(tenant_id, project_id, event_id):

View file

@ -1,4 +1,5 @@
from chalicelib.core import log_tools from chalicelib.core import log_tools
from schemas import schemas
IN_TY = "stackdriver" IN_TY = "stackdriver"
@ -29,14 +30,13 @@ def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data): def add_edit(tenant_id, project_id, data: schemas.IntegartionStackdriverSchema):
s = get(project_id) s = get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return update(tenant_id=tenant_id, project_id=project_id,
changes={"serviceAccountCredentials": data["serviceAccountCredentials"], changes={"serviceAccountCredentials": data.service_account_credentials,
"logName": data["logName"]}) "logName": data.log_name})
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id, project_id=project_id,
project_id=project_id, service_account_credentials=data.service_account_credentials,
service_account_credentials=data["serviceAccountCredentials"], log_name=data.log_name)
log_name=data["logName"])

View file

@ -1,4 +1,5 @@
from chalicelib.core import log_tools from chalicelib.core import log_tools
from schemas import schemas
IN_TY = "sumologic" IN_TY = "sumologic"
@ -38,16 +39,16 @@ def delete(tenant_id, project_id):
return log_tools.delete(project_id=project_id, integration=IN_TY) return log_tools.delete(project_id=project_id, integration=IN_TY)
def add_edit(tenant_id, project_id, data): def add_edit(tenant_id, project_id, data: schemas.IntegrationSumologicSchema):
s = get(project_id) s = get(project_id)
if s is not None: if s is not None:
return update(tenant_id=tenant_id, project_id=project_id, return update(tenant_id=tenant_id, project_id=project_id,
changes={"accessId": data["accessId"], changes={"accessId": data.access_id,
"accessKey": data["accessKey"], "accessKey": data.access_key,
"region": data["region"]}) "region": data.region})
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id,
project_id=project_id, project_id=project_id,
access_id=data["accessId"], access_id=data.access_id,
access_key=data["accessKey"], access_key=data.access_key,
region=data["region"]) region=data.region)

View file

@ -243,45 +243,45 @@ def get_keys_by_projects(project_ids):
return results return results
def add_edit_delete(tenant_id, project_id, new_metas): # def add_edit_delete(tenant_id, project_id, new_metas):
old_metas = get(project_id) # old_metas = get(project_id)
old_indexes = [k["index"] for k in old_metas] # old_indexes = [k["index"] for k in old_metas]
new_indexes = [k["index"] for k in new_metas if "index" in k] # new_indexes = [k["index"] for k in new_metas if "index" in k]
new_keys = [k["key"] for k in new_metas] # new_keys = [k["key"] for k in new_metas]
#
add_metas = [k["key"] for k in new_metas # add_metas = [k["key"] for k in new_metas
if "index" not in k] # if "index" not in k]
new_metas = {k["index"]: {"key": k["key"]} for # new_metas = {k["index"]: {"key": k["key"]} for
k in new_metas if # k in new_metas if
"index" in k} # "index" in k}
old_metas = {k["index"]: {"key": k["key"]} for k in old_metas} # old_metas = {k["index"]: {"key": k["key"]} for k in old_metas}
#
if len(new_keys) > 20: # if len(new_keys) > 20:
return {"errors": ["you cannot add more than 20 key"]} # return {"errors": ["you cannot add more than 20 key"]}
for k in new_metas.keys(): # for k in new_metas.keys():
if re.match(regex, new_metas[k]["key"]) is None: # if re.match(regex, new_metas[k]["key"]) is None:
return {"errors": [f"invalid key {k}"]} # return {"errors": [f"invalid key {k}"]}
for k in add_metas: # for k in add_metas:
if re.match(regex, k) is None: # if re.match(regex, k) is None:
return {"errors": [f"invalid key {k}"]} # return {"errors": [f"invalid key {k}"]}
if len(new_indexes) > len(set(new_indexes)): # if len(new_indexes) > len(set(new_indexes)):
return {"errors": ["duplicate indexes"]} # return {"errors": ["duplicate indexes"]}
if len(new_keys) > len(set(new_keys)): # if len(new_keys) > len(set(new_keys)):
return {"errors": ["duplicate keys"]} # return {"errors": ["duplicate keys"]}
to_delete = list(set(old_indexes) - set(new_indexes)) # to_delete = list(set(old_indexes) - set(new_indexes))
#
with pg_client.PostgresClient() as cur: # with pg_client.PostgresClient() as cur:
for d in to_delete: # for d in to_delete:
delete(tenant_id=tenant_id, project_id=project_id, index=d) # delete(tenant_id=tenant_id, project_id=project_id, index=d)
#
for k in add_metas: # for k in add_metas:
add(tenant_id=tenant_id, project_id=project_id, new_name=k) # add(tenant_id=tenant_id, project_id=project_id, new_name=k)
#
for k in new_metas.keys(): # for k in new_metas.keys():
if new_metas[k]["key"].lower() != old_metas[k]["key"]: # if new_metas[k]["key"].lower() != old_metas[k]["key"]:
edit(tenant_id=tenant_id, project_id=project_id, index=k, new_name=new_metas[k]["key"]) # edit(tenant_id=tenant_id, project_id=project_id, index=k, new_name=new_metas[k]["key"])
#
return {"data": get(project_id)} # return {"data": get(project_id)}
def get_remaining_metadata_with_count(tenant_id): def get_remaining_metadata_with_count(tenant_id):

View file

@ -42,7 +42,7 @@ def get_all_count(tenant_id, user_id):
def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None): def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None):
if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None: if len(notification_ids) == 0 and endTimestamp is None:
return False return False
if startTimestamp is None: if startTimestamp is None:
startTimestamp = 0 startTimestamp = 0

View file

@ -1,8 +1,13 @@
from typing import List
import schemas import schemas
from chalicelib.core import metadata
from chalicelib.core.metrics import __get_constraints, __get_constraint_values from chalicelib.core.metrics import __get_constraints, __get_constraint_values
from chalicelib.utils import helper, dev from chalicelib.utils import helper, dev
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils import sql_helper as sh
from time import time
def __transform_journey(rows): def __transform_journey(rows):
@ -19,90 +24,346 @@ def __transform_journey(rows):
return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)}
JOURNEY_DEPTH = 5 def __transform_journey2(rows, reverse_path=False):
# nodes should contain duplicates for different steps otherwise the UI crashes
nodes = []
nodes_values = []
links = []
for r in rows:
source = f"{r['event_number_in_session']}_{r['event_type']}_{r['e_value']}"
if source not in nodes:
nodes.append(source)
# TODO: remove this after UI supports long values
nodes_values.append({"name": r['e_value'][:10], "eventType": r['event_type']})
if r['next_value']:
target = f"{r['event_number_in_session'] + 1}_{r['next_type']}_{r['next_value']}"
if target not in nodes:
nodes.append(target)
# TODO: remove this after UI supports long values
nodes_values.append({"name": r['next_value'][:10], "eventType": r['next_type']})
link = {"eventType": r['event_type'], "value": r["sessions_count"],
"avgTimeToTarget": r["avg_time_to_target"]}
if not reverse_path:
link["source"] = nodes.index(source)
link["target"] = nodes.index(target)
else:
link["source"] = nodes.index(target)
link["target"] = nodes.index(source)
links.append(link)
return {"nodes": nodes_values,
"links": sorted(links, key=lambda x: x["value"], reverse=True)}
JOURNEY_TYPES = { JOURNEY_TYPES = {
schemas.ProductAnalyticsEventType.location: {"table": "events.pages", "column": "path", "table_id": "message_id"}, schemas.ProductAnalyticsSelectedEventType.location: {"table": "events.pages", "column": "path"},
schemas.ProductAnalyticsEventType.click: {"table": "events.clicks", "column": "label", "table_id": "message_id"}, schemas.ProductAnalyticsSelectedEventType.click: {"table": "events.clicks", "column": "label"},
schemas.ProductAnalyticsEventType.input: {"table": "events.inputs", "column": "label", "table_id": "message_id"}, schemas.ProductAnalyticsSelectedEventType.input: {"table": "events.inputs", "column": "label"},
schemas.ProductAnalyticsEventType.custom_event: {"table": "events_common.customs", "column": "name", schemas.ProductAnalyticsSelectedEventType.custom_event: {"table": "events_common.customs", "column": "name"}
"table_id": "seq_index"}
} }
def path_analysis(project_id, data: schemas.PathAnalysisSchema): def path_analysis(project_id: int, data: schemas.PathAnalysisSchema,
selected_event_type: List[schemas.ProductAnalyticsSelectedEventType],
density: int = 4, hide_minor_paths: bool = False):
# pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", # pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
# time_constraint=True) # time_constraint=True)
# TODO: check if data=args is required # TODO: check if data=args is required
pg_sub_query_subset = __get_constraints(project_id=project_id, duration=True, main_table="sessions", pg_sub_query_subset = __get_constraints(project_id=project_id, duration=True, main_table="s", time_constraint=True)
time_constraint=True) sub_events = []
event_start = None start_points_join = ""
event_table = JOURNEY_TYPES[schemas.ProductAnalyticsEventType.location]["table"] start_points_conditions = []
event_column = JOURNEY_TYPES[schemas.ProductAnalyticsEventType.location]["column"] sessions_conditions = ["start_ts>=%(startTimestamp)s", "start_ts<%(endTimestamp)s",
event_table_id = JOURNEY_TYPES[schemas.ProductAnalyticsEventType.location]["table_id"] "project_id=%(project_id)s", "events_count > 1", "duration>0"]
if len(selected_event_type) == 0:
selected_event_type.append(schemas.ProductAnalyticsSelectedEventType.location)
sub_events.append({"table": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["table"],
"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"],
"eventType": schemas.ProductAnalyticsSelectedEventType.location.value})
else:
for v in selected_event_type:
if JOURNEY_TYPES.get(v):
sub_events.append({"table": JOURNEY_TYPES[v]["table"],
"column": JOURNEY_TYPES[v]["column"],
"eventType": v})
extra_values = {} extra_values = {}
for f in data.filters: reverse = False
if f.type == schemas.ProductAnalyticsFilterType.start_point: meta_keys = None
event_start = f.value[0] exclusions = {}
elif f.type == schemas.ProductAnalyticsFilterType.event_type and JOURNEY_TYPES.get(f.value[0]): for i, f in enumerate(data.filters):
event_table = JOURNEY_TYPES[f.value[0]]["table"] op = sh.get_sql_operator(f.operator)
event_column = JOURNEY_TYPES[f.value[0]]["column"] is_any = sh.isAny_opreator(f.operator)
elif f.type == schemas.ProductAnalyticsFilterType.user_id: is_not = sh.is_negation_operator(f.operator)
pg_sub_query_subset.append(f"sessions.user_id = %(user_id)s") is_undefined = sh.isUndefined_operator(f.operator)
extra_values["user_id"] = f.value f_k = f"f_value_{i}"
extra_values = {**extra_values, **sh.multi_values(f.value, value_key=f_k)}
if f.type in [schemas.ProductAnalyticsFilterType.start_point, schemas.ProductAnalyticsFilterType.end_point]:
for sf in f.filters:
extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)}
start_points_conditions.append(f"(event_type='{sf.type}' AND " +
sh.multi_conditions(f'e_value {op} %({f_k})s', sf.value, is_not=is_not,
value_key=f_k)
+ ")")
reverse = f.type == schemas.ProductAnalyticsFilterType.end_point
elif f.type == schemas.ProductAnalyticsFilterType.exclude:
for sf in f.filters:
if sf.type in selected_event_type:
extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)}
exclusions[sf.type] = [
sh.multi_conditions(f'{JOURNEY_TYPES[sf.type]["column"]} != %({f_k})s', sf.value, is_not=True,
value_key=f_k)]
# ---- meta-filters
if f.type == schemas.FilterType.user_browser:
if is_any:
sessions_conditions.append('user_browser IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_os]:
if is_any:
sessions_conditions.append('user_os IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_device]:
if is_any:
sessions_conditions.append('user_device IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_country]:
if is_any:
sessions_conditions.append('user_country IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.user_city:
if is_any:
sessions_conditions.append('user_city IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.user_state:
if is_any:
sessions_conditions.append('user_state IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.utm_source]:
if is_any:
sessions_conditions.append('utm_source IS NOT NULL')
elif is_undefined:
sessions_conditions.append('utm_source IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.utm_medium]:
if is_any:
sessions_conditions.append('utm_medium IS NOT NULL')
elif is_undefined:
sessions_conditions.append('utm_medium IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.utm_campaign]:
if is_any:
sessions_conditions.append('utm_campaign IS NOT NULL')
elif is_undefined:
sessions_conditions.append('utm_campaign IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.duration:
if len(f.value) > 0 and f.value[0] is not None:
sessions_conditions.append("duration >= %(minDuration)s")
extra_values["minDuration"] = f.value[0]
if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0:
sessions_conditions.append("duration <= %(maxDuration)s")
extra_values["maxDuration"] = f.value[1]
elif f.type == schemas.FilterType.referrer:
# extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
if is_any:
sessions_conditions.append('base_referrer IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"base_referrer {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.metadata:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
if f.source in meta_keys.keys():
if is_any:
sessions_conditions.append(f"{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL")
elif is_undefined:
sessions_conditions.append(f"{metadata.index_to_colname(meta_keys[f.source])} IS NULL")
else:
sessions_conditions.append(
sh.multi_conditions(
f"{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
if is_any:
sessions_conditions.append('user_id IS NOT NULL')
elif is_undefined:
sessions_conditions.append('user_id IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
if is_any:
sessions_conditions.append('user_anonymous_id IS NOT NULL')
elif is_undefined:
sessions_conditions.append('user_anonymous_id IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
if is_any:
sessions_conditions.append('rev_id IS NOT NULL')
elif is_undefined:
sessions_conditions.append('rev_id IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.platform:
# op = __ sh.get_sql_operator(f.operator)
sessions_conditions.append(
sh.multi_conditions(f"user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.issue:
if is_any:
sessions_conditions.append("array_length(issue_types, 1) > 0")
else:
sessions_conditions.append(
sh.multi_conditions(f"%({f_k})s {op} ANY (issue_types)", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.events_count:
sessions_conditions.append(
sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
events_subquery = []
for t in sub_events:
sub_events_conditions = ["e.timestamp >= %(startTimestamp)s",
"e.timestamp < %(endTimestamp)s"] + exclusions.get(t["eventType"], [])
events_subquery.append(f"""\
SELECT session_id, {t["column"]} AS e_value, timestamp, '{t["eventType"]}' AS event_type
FROM {t["table"]} AS e
INNER JOIN sub_sessions USING (session_id)
WHERE {" AND ".join(sub_events_conditions)}""")
events_subquery = "\n UNION ALL \n".join(events_subquery)
if reverse:
path_direction = "DESC"
else:
path_direction = ""
if len(start_points_conditions) == 0:
start_points_join = """INNER JOIN
(SELECT event_type, e_value
FROM ranked_events
WHERE event_number_in_session = 1
GROUP BY event_type, e_value
ORDER BY count(1) DESC
LIMIT 2
) AS top_start_events USING (event_type, e_value)"""
else:
start_points_conditions = ["(" + " OR ".join(start_points_conditions) + ")"]
start_points_conditions.append("event_number_in_session = 1")
start_points_conditions.append("next_value IS NOT NULL")
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT source_event, pg_query = f"""\
target_event, WITH sub_sessions AS ( SELECT session_id
count(*) AS value FROM public.sessions
FROM (SELECT event_number || '_' || value as target_event, WHERE {" AND ".join(sessions_conditions)}),
LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event sub_events AS ({events_subquery}),
FROM (SELECT value, ranked_events AS (SELECT *
session_rank, FROM (SELECT session_id,
message_id, event_type,
ROW_NUMBER() OVER ( PARTITION BY session_rank ORDER BY timestamp ) AS event_number e_value,
row_number() OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS event_number_in_session,
{f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY timestamp ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN timestamp ELSE NULL END as mark" LEAD(e_value, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS next_value,
if event_start else ""} LEAD(event_type, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS next_type,
abs(LEAD(timestamp, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) -
FROM (SELECT session_id, timestamp) AS time_to_next
message_id, FROM sub_events
timestamp, ORDER BY session_id) AS full_ranked_events
value, WHERE event_number_in_session < %(density)s
SUM(new_session) OVER (ORDER BY session_id, timestamp) AS session_rank ),
FROM (SELECT *, start_points AS (SELECT session_id
CASE FROM ranked_events {start_points_join}
WHEN source_timestamp IS NULL THEN 1 WHERE {" AND ".join(start_points_conditions)}),
ELSE 0 END AS new_session limited_events AS (SELECT *
FROM (SELECT session_id, FROM (SELECT *,
{event_table_id} AS message_id, row_number()
timestamp, OVER (PARTITION BY event_number_in_session, event_type, e_value ORDER BY sessions_count DESC ) AS _event_number_in_group
{event_column} AS value, FROM (SELECT event_number_in_session,
LAG(timestamp) event_type,
OVER (PARTITION BY session_id ORDER BY timestamp) AS source_timestamp e_value,
FROM {event_table} INNER JOIN public.sessions USING (session_id) next_type,
WHERE {" AND ".join(pg_sub_query_subset)} next_value,
) AS related_events) AS ranked_events) AS processed time_to_next,
{") AS marked) AS maxed WHERE timestamp >= max) AS filtered" if event_start else ""} count(1) AS sessions_count
) AS sorted_events FROM ranked_events
WHERE event_number <= %(JOURNEY_DEPTH)s) AS final INNER JOIN start_points USING (session_id)
WHERE source_event IS NOT NULL GROUP BY event_number_in_session, event_type, e_value, next_type, next_value,
and target_event IS NOT NULL time_to_next) AS groupped_events) AS ranked_groupped_events
GROUP BY source_event, target_event WHERE _event_number_in_group < %(eventThresholdNumberInGroup)s)
ORDER BY value DESC SELECT event_number_in_session,
LIMIT 20;""" event_type,
e_value,
next_type,
next_value,
sessions_count,
avg(time_to_next) AS avg_time_to_target
FROM limited_events
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, sessions_count
ORDER BY event_number_in_session, e_value, next_value;"""
params = {"project_id": project_id, "startTimestamp": data.startTimestamp, params = {"project_id": project_id, "startTimestamp": data.startTimestamp,
"endTimestamp": data.endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, "endTimestamp": data.endTimestamp, "density": density,
"eventThresholdNumberInGroup": 8 if hide_minor_paths else 6,
# TODO: add if data=args is required # TODO: add if data=args is required
# **__get_constraint_values(args), # **__get_constraint_values(args),
**extra_values} **extra_values}
query = cur.mogrify(pg_query, params) query = cur.mogrify(pg_query, params)
print("----------------------") _now = time()
print(query)
print("----------------------")
cur.execute(query) cur.execute(query)
if time() - _now > 3:
print(f">>>>>>>>>PathAnalysis long query ({int(time() - _now)}s)<<<<<<<<<")
print("----------------------")
print(query)
print("----------------------")
rows = cur.fetchall() rows = cur.fetchall()
return __transform_journey(rows) return __transform_journey2(rows=rows, reverse_path=reverse)
# #
# def __compute_weekly_percentage(rows): # def __compute_weekly_percentage(rows):

View file

@ -41,12 +41,12 @@ def __update(tenant_id, project_id, changes):
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(cur.fetchone())
def __create(tenant_id, name): def __create(tenant_id, data):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""INSERT INTO public.projects (name, active) query = cur.mogrify(f"""INSERT INTO public.projects (name, platform, active)
VALUES (%(name)s,TRUE) VALUES (%(name)s,%(platform)s,TRUE)
RETURNING project_id;""", RETURNING project_id;""",
{"name": name}) data)
cur.execute(query=query) cur.execute(query=query)
project_id = cur.fetchone()["project_id"] project_id = cur.fetchone()["project_id"]
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True) return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
@ -69,7 +69,8 @@ def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""} query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""}
SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at, SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at,
created_at, sessions_last_check_at, sample_rate {extra_projection} s.created_at, s.sessions_last_check_at, s.sample_rate, s.platform
{extra_projection}
FROM public.projects AS s FROM public.projects AS s
WHERE s.deleted_at IS NULL WHERE s.deleted_at IS NULL
ORDER BY s.name {") AS raw" if recorded else ""};""", ORDER BY s.name {") AS raw" if recorded else ""};""",
@ -159,7 +160,7 @@ def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authoriza
admin = users.get(user_id=user_id, tenant_id=tenant_id) admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
return {"data": __create(tenant_id=tenant_id, name=data.name)} return {"data": __create(tenant_id=tenant_id, data=data.model_dump())}
def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema): def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
@ -169,7 +170,7 @@ def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
return {"data": __update(tenant_id=tenant_id, project_id=project_id, return {"data": __update(tenant_id=tenant_id, project_id=project_id,
changes={"name": data.name})} changes=data.model_dump())}
def delete(tenant_id, user_id, project_id): def delete(tenant_id, user_id, project_id):
@ -200,14 +201,14 @@ def get_gdpr(project_id):
return row return row
def edit_gdpr(project_id, gdpr): def edit_gdpr(project_id, gdpr: schemas.GdprSchema):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify("""UPDATE public.projects query = cur.mogrify("""UPDATE public.projects
SET gdpr = gdpr|| %(gdpr)s SET gdpr = gdpr|| %(gdpr)s
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
AND deleted_at ISNULL AND deleted_at ISNULL
RETURNING gdpr;""", RETURNING gdpr;""",
{"project_id": project_id, "gdpr": json.dumps(gdpr)}) {"project_id": project_id, "gdpr": json.dumps(gdpr.model_dump_json())})
cur.execute(query=query) cur.execute(query=query)
row = cur.fetchone() row = cur.fetchone()
if not row: if not row:
@ -252,15 +253,9 @@ def get_capture_status(project_id):
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(cur.fetchone())
def update_capture_status(project_id, changes): def update_capture_status(project_id, changes: schemas.SampleRateSchema):
if "rate" not in changes and "captureAll" not in changes: sample_rate = changes.rate
return {"errors": ["please provide 'rate' and/or 'captureAll' attributes to update."]} if changes.capture_all:
if int(changes["rate"]) < 0 or int(changes["rate"]) > 100:
return {"errors": ["'rate' must be between 0..100."]}
sample_rate = 0
if "rate" in changes:
sample_rate = int(changes["rate"])
if changes.get("captureAll"):
sample_rate = 100 sample_rate = 100
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify("""UPDATE public.projects query = cur.mogrify("""UPDATE public.projects

View file

@ -7,7 +7,7 @@ from chalicelib.utils.TimeUTC import TimeUTC
def create(project_id, user_id, data: schemas.SavedSearchSchema): def create(project_id, user_id, data: schemas.SavedSearchSchema):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
data = data.dict() data = data.model_dump()
data["filter"] = json.dumps(data["filter"]) data["filter"] = json.dumps(data["filter"])
query = cur.mogrify("""\ query = cur.mogrify("""\
INSERT INTO public.searches (project_id, user_id, name, filter,is_public) INSERT INTO public.searches (project_id, user_id, name, filter,is_public)
@ -25,7 +25,7 @@ def create(project_id, user_id, data: schemas.SavedSearchSchema):
def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema): def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
data = data.dict() data = data.model_dump()
data["filter"] = json.dumps(data["filter"]) data["filter"] = json.dumps(data["filter"])
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
UPDATE public.searches UPDATE public.searches
@ -43,7 +43,6 @@ def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema):
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r["filter"] = helper.old_search_payload_to_flat(r["filter"]) r["filter"] = helper.old_search_payload_to_flat(r["filter"])
r = helper.dict_to_camel_case(r) r = helper.dict_to_camel_case(r)
# r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
return r return r

View file

@ -36,7 +36,7 @@ COALESCE((SELECT TRUE
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False):
if data.bookmarked: if data.bookmarked:
data.startDate, data.endDate = sessions_favorite.get_start_end_timestamp(project_id, user_id) data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project_id, user_id)
full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue, project_id=project_id, favorite_only=data.bookmarked, issue=issue, project_id=project_id,
@ -69,7 +69,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
if data.order is None: if data.order is None:
data.order = schemas.SortOrderType.desc.value data.order = schemas.SortOrderType.desc.value
else: else:
data.order = data.order.value data.order = data.order
if data.sort is not None and data.sort != 'sessionsCount': if data.sort is not None and data.sort != 'sessionsCount':
sort = helper.key_to_snake_case(data.sort) sort = helper.key_to_snake_case(data.sort)
g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})"
@ -104,7 +104,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
if data.order is None: if data.order is None:
data.order = schemas.SortOrderType.desc.value data.order = schemas.SortOrderType.desc.value
else: else:
data.order = data.order.value data.order = data.order
sort = 'session_id' sort = 'session_id'
if data.sort is not None and data.sort != "session_id": if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
@ -129,7 +129,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
print(main_query.decode('UTF-8')) print(main_query.decode('UTF-8'))
print("--------- PAYLOAD -----------") print("--------- PAYLOAD -----------")
print(data.json()) print(data.model_dump_json())
print("--------------------") print("--------------------")
raise err raise err
if errors_only or ids_only: if errors_only or ids_only:
@ -164,7 +164,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int, def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType, view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType,
metric_of: schemas.MetricOfTable, metric_value: List): metric_of: schemas.MetricOfTable, metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate, step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density, factor=1, decimal=True)) density=density, factor=1, decimal=True))
extra_event = None extra_event = None
if metric_of == schemas.MetricOfTable.visited_url: if metric_of == schemas.MetricOfTable.visited_url:
@ -204,7 +204,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
print("--------- SESSIONS-SERIES QUERY EXCEPTION -----------") print("--------- SESSIONS-SERIES QUERY EXCEPTION -----------")
print(main_query.decode('UTF-8')) print(main_query.decode('UTF-8'))
print("--------- PAYLOAD -----------") print("--------- PAYLOAD -----------")
print(data.json()) print(data.model_dump_json())
print("--------------------") print("--------------------")
raise err raise err
if view_type == schemas.MetricTimeseriesViewType.line_chart: if view_type == schemas.MetricTimeseriesViewType.line_chart:
@ -267,7 +267,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
return sessions return sessions
def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details,
schemas.EventType.graphql] \ schemas.EventType.graphql] \
or event.type in [schemas.PerformanceEventType.location_dom_complete, or event.type in [schemas.PerformanceEventType.location_dom_complete,
@ -284,7 +284,7 @@ def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema):
def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue, def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
project_id, user_id, extra_event=None): project_id, user_id, extra_event=None):
ss_constraints = [] ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate, full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
"projectId": project_id, "userId": user_id} "projectId": project_id, "userId": user_id}
extra_constraints = [ extra_constraints = [
"s.project_id = %(project_id)s", "s.project_id = %(project_id)s",
@ -516,22 +516,6 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
ss_constraints.append( ss_constraints.append(
sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not, sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
value_key=f_k)) value_key=f_k))
# search sessions with click_rage on a specific selector
if len(f.filters) > 0 and schemas.IssueType.click_rage in f.value:
for j, sf in enumerate(f.filters):
if sf.operator == schemas.IssueFilterOperator._on_selector:
f_k = f"f_value{i}_{j}"
full_args = {**full_args, **sh.multi_values(sf.value, value_key=f_k)}
extra_constraints += ["mc.timestamp>=%(startDate)s",
"mc.timestamp<=%(endDate)s",
"mis.type='click_rage'",
sh.multi_conditions(f"mc.selector=%({f_k})s",
sf.value, is_not=is_not,
value_key=f_k)]
extra_from += """INNER JOIN events.clicks AS mc USING(session_id)
INNER JOIN events_common.issues USING (session_id,timestamp)
INNER JOIN public.issues AS mis USING (issue_id)\n"""
elif filter_type == schemas.FilterType.events_count: elif filter_type == schemas.FilterType.events_count:
extra_constraints.append( extra_constraints.append(
@ -582,11 +566,11 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
event_where.append(f"event_{event_index - 1}.timestamp <= main.timestamp") event_where.append(f"event_{event_index - 1}.timestamp <= main.timestamp")
e_k = f"e_value{i}" e_k = f"e_value{i}"
s_k = e_k + "_source" s_k = e_k + "_source"
if event.type != schemas.PerformanceEventType.time_between_events:
event.value = helper.values_for_operator(value=event.value, op=event.operator) event.value = helper.values_for_operator(value=event.value, op=event.operator)
full_args = {**full_args, full_args = {**full_args,
**sh.multi_values(event.value, value_key=e_k), **sh.multi_values(event.value, value_key=e_k),
**sh.multi_values(event.source, value_key=s_k)} **sh.multi_values(event.source, value_key=s_k)}
if event_type == events.EventType.CLICK.ui_type: if event_type == events.EventType.CLICK.ui_type:
event_from = event_from % f"{events.EventType.CLICK.table} AS main " event_from = event_from % f"{events.EventType.CLICK.table} AS main "
@ -741,44 +725,6 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s", sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s",
event.source, value_key=e_k)) event.source, value_key=e_k))
elif event_type == schemas.PerformanceEventType.time_between_events:
event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) "
if not isinstance(event.value[0].value, list):
event.value[0].value = [event.value[0].value]
if not isinstance(event.value[1].value, list):
event.value[1].value = [event.value[1].value]
event.value[0].value = helper.values_for_operator(value=event.value[0].value,
op=event.value[0].operator)
event.value[1].value = helper.values_for_operator(value=event.value[1].value,
op=event.value[0].operator)
e_k1 = e_k + "_e1"
e_k2 = e_k + "_e2"
full_args = {**full_args,
**sh.multi_values(event.value[0].value, value_key=e_k1),
**sh.multi_values(event.value[1].value, value_key=e_k2)}
s_op = sh.get_sql_operator(event.value[0].operator)
event_where += ["main2.timestamp >= %(startDate)s", "main2.timestamp <= %(endDate)s"]
if event_index > 0 and not or_events:
event_where.append("main2.session_id=event_0.session_id")
is_any = sh.isAny_opreator(event.value[0].operator)
if not is_any:
event_where.append(
sh.multi_conditions(
f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s",
event.value[0].value, value_key=e_k1))
s_op = sh.get_sql_operator(event.value[1].operator)
is_any = sh.isAny_opreator(event.value[1].operator)
if not is_any:
event_where.append(
sh.multi_conditions(
f"main2.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s",
event.value[1].value, value_key=e_k2))
e_k += "_custom"
full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)}
event_where.append(
sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator.value} %({e_k})s",
event.source, value_key=e_k))
elif event_type == schemas.EventType.request_details: elif event_type == schemas.EventType.request_details:
event_from = event_from % f"{events.EventType.REQUEST.table} AS main " event_from = event_from % f"{events.EventType.REQUEST.table} AS main "
@ -905,9 +851,9 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
else: else:
data.events = [] data.events = []
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
if data.startDate is not None: if data.startTimestamp is not None:
extra_constraints.append("s.start_ts >= %(startDate)s") extra_constraints.append("s.start_ts >= %(startDate)s")
if data.endDate is not None: if data.endTimestamp is not None:
extra_constraints.append("s.start_ts <= %(endDate)s") extra_constraints.append("s.start_ts <= %(endDate)s")
# if data.platform is not None: # if data.platform is not None:
# if data.platform == schemas.PlatformType.mobile: # if data.platform == schemas.PlatformType.mobile:

View file

@ -14,6 +14,16 @@ def __get_mob_keys(project_id, session_id):
] ]
def __get_ios_video_keys(project_id, session_id):
params = {
"sessionId": session_id,
"projectId": project_id
}
return [
config("SESSION_IOS_VIDEO_PATTERN", default="replay.mp4") % params,
]
def __get_mob_keys_deprecated(session_id): def __get_mob_keys_deprecated(session_id):
return [str(session_id), str(session_id) + "e"] return [str(session_id), str(session_id) + "e"]
@ -44,12 +54,30 @@ def get_urls_depercated(session_id, check_existence: bool = True):
return results return results
def get_ios(session_id): def get_ios(session_id, project_id, check_existence=False):
return StorageClient.get_presigned_url_for_sharing( results = []
bucket=config("ios_bucket"), for k in __get_mob_keys(project_id=project_id, session_id=session_id):
expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), if check_existence and not StorageClient.exists(bucket=config("IOS_BUCKET"), key=k):
key=str(session_id) continue
) results.append(StorageClient.get_presigned_url_for_sharing(
bucket=config("IOS_BUCKET"),
expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900),
key=k
))
return results
def get_ios_videos(session_id, project_id, check_existence=False):
results = []
for k in __get_ios_video_keys(project_id=project_id, session_id=session_id):
if check_existence and not StorageClient.exists(bucket=config("IOS_VIDEO_BUCKET"), key=k):
continue
results.append(StorageClient.get_presigned_url_for_sharing(
bucket=config("IOS_VIDEO_BUCKET"),
expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900),
key=k
))
return results
def delete_mobs(project_id, session_ids): def delete_mobs(project_id, session_ids):

View file

@ -69,7 +69,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name
FROM sessions_notes INNER JOIN users USING (user_id) FROM sessions_notes INNER JOIN users USING (user_id)
WHERE {" AND ".join(conditions)} WHERE {" AND ".join(conditions)}
ORDER BY created_at {data.order.value} ORDER BY created_at {data.order}
LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""",
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params}) {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params})
@ -86,7 +86,7 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public) query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public)
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s)
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""", RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
{"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.model_dump()})
cur.execute(query) cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone()) result = helper.dict_to_camel_case(cur.fetchone())
if result: if result:
@ -115,7 +115,7 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
AND note_id = %(note_id)s AND note_id = %(note_id)s
AND deleted_at ISNULL AND deleted_at ISNULL
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""", RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""",
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict()}) {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump()})
) )
row = helper.dict_to_camel_case(cur.fetchone()) row = helper.dict_to_camel_case(cur.fetchone())
if row: if row:

View file

@ -41,8 +41,6 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
AND s.session_id = %(session_id)s;""", AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id, "userId": context.user_id} {"project_id": project_id, "session_id": session_id, "userId": context.user_id}
) )
# print("===============")
# print(query)
cur.execute(query=query) cur.execute(query=query)
data = cur.fetchone() data = cur.fetchone()
@ -55,9 +53,9 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
if e["type"].endswith("_IOS"): if e["type"].endswith("_IOS"):
e["type"] = e["type"][:-len("_IOS")] e["type"] = e["type"][:-len("_IOS")]
data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id)
data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id,
session_id=session_id) session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id, project_id=project_id)
else: else:
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
group_clickrage=True) group_clickrage=True)
@ -117,8 +115,6 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
AND s.session_id = %(session_id)s;""", AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id, "userId": context.user_id} {"project_id": project_id, "session_id": session_id, "userId": context.user_id}
) )
# print("===============")
# print(query)
cur.execute(query=query) cur.execute(query=query)
data = cur.fetchone() data = cur.fetchone()
@ -126,7 +122,10 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
data = helper.dict_to_camel_case(data) data = helper.dict_to_camel_case(data)
if full_data: if full_data:
if data["platform"] == 'ios': if data["platform"] == 'ios':
data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) data['domURL'] = sessions_mobs.get_ios(session_id=session_id, project_id=project_id,
check_existence=False)
data['videoURL'] = sessions_mobs.get_ios_videos(session_id=session_id, project_id=project_id,
check_existence=False)
else: else:
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id, data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id,
check_existence=False) check_existence=False)
@ -154,8 +153,6 @@ def get_events(project_id, session_id):
AND s.session_id = %(session_id)s;""", AND s.session_id = %(session_id)s;""",
{"project_id": project_id, "session_id": session_id} {"project_id": project_id, "session_id": session_id}
) )
# print("===============")
# print(query)
cur.execute(query=query) cur.execute(query=query)
s_data = cur.fetchone() s_data = cur.fetchone()
@ -168,8 +165,8 @@ def get_events(project_id, session_id):
if e["type"].endswith("_IOS"): if e["type"].endswith("_IOS"):
e["type"] = e["type"][:-len("_IOS")] e["type"] = e["type"][:-len("_IOS")]
data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id)
data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id,
session_id=session_id) session_id=session_id)
else: else:
data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id,
group_clickrage=True) group_clickrage=True)
@ -199,11 +196,11 @@ def reduce_issues(issues_list):
i = 0 i = 0
# remove same-type issues if the time between them is <2s # remove same-type issues if the time between them is <2s
while i < len(issues_list) - 1: while i < len(issues_list) - 1:
for j in range(i+1,len(issues_list)): for j in range(i + 1, len(issues_list)):
if issues_list[i]["type"] == issues_list[j]["type"]: if issues_list[i]["type"] == issues_list[j]["type"]:
break break
else: else:
i+=1 i += 1
break break
if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000: if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000:

View file

@ -24,17 +24,19 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042} 21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
""" """
Add minimal timestamp Add minimal timestamp
:param filter_d: dict contains events&filters&... :param filter_d: dict contains events&filters&...
:return: :return:
""" """
stages: [dict] = filter_d.get("events", []) stages: [dict] = filter_d.events
filters: [dict] = filter_d.get("filters", []) filters: [dict] = filter_d.filters
filter_issues = filter_d.get("issueTypes") filter_issues = []
if filter_issues is None or len(filter_issues) == 0: # TODO: enable this if needed by an endpoint
filter_issues = [] # filter_issues = filter_d.get("issueTypes")
# if filter_issues is None or len(filter_issues) == 0:
# filter_issues = []
stage_constraints = ["main.timestamp <= %(endTimestamp)s"] stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s", first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s",
"s.start_ts <= %(endTimestamp)s"] "s.start_ts <= %(endTimestamp)s"]
@ -120,22 +122,22 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
i = -1 i = -1
for s in stages: for s in stages:
if s.get("operator") is None: if s.operator is None:
s["operator"] = "is" s.operator = schemas.SearchEventOperator._is
if not isinstance(s["value"], list): if not isinstance(s.value, list):
s["value"] = [s["value"]] s.value = [s.value]
is_any = sh.isAny_opreator(s["operator"]) is_any = sh.isAny_opreator(s.operator)
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0: if not is_any and isinstance(s.value, list) and len(s.value) == 0:
continue continue
i += 1 i += 1
if i == 0: if i == 0:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"] extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else: else:
extra_from = [] extra_from = []
op = sh.get_sql_operator(s["operator"]) op = sh.get_sql_operator(s.operator)
# event_type = s["type"].upper() # event_type = s["type"].upper()
event_type = s["type"] event_type = s.type
if event_type == events.EventType.CLICK.ui_type: if event_type == events.EventType.CLICK.ui_type:
next_table = events.EventType.CLICK.table next_table = events.EventType.CLICK.table
next_col_name = events.EventType.CLICK.column next_col_name = events.EventType.CLICK.column
@ -165,16 +167,16 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
print(f"=================UNDEFINED:{event_type}") print(f"=================UNDEFINED:{event_type}")
continue continue
values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator),
value_key=f"value{i + 1}")} value_key=f"value{i + 1}")}
if sh.is_negation_operator(s["operator"]) and i > 0: if sh.is_negation_operator(s.operator) and i > 0:
op = sh.reverse_sql_operator(op) op = sh.reverse_sql_operator(op)
main_condition = "left_not.session_id ISNULL" main_condition = "left_not.session_id ISNULL"
extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
FROM {next_table} AS s_main FROM {next_table} AS s_main
WHERE WHERE
{sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s", {sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")} values=s.value, value_key=f"value{i + 1}")}
AND s_main.timestamp >= T{i}.stage{i}_timestamp AND s_main.timestamp >= T{i}.stage{i}_timestamp
AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""") AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""")
else: else:
@ -182,7 +184,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
main_condition = "TRUE" main_condition = "TRUE"
else: else:
main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}") values=s.value, value_key=f"value{i + 1}")
n_stages_query.append(f""" n_stages_query.append(f"""
(SELECT main.session_id, (SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
@ -225,7 +227,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
""" """
# LIMIT 10000 # LIMIT 10000
params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"], params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp,
"endTimestamp": filter_d.endTimestamp,
"issueTypes": tuple(filter_issues), **values} "issueTypes": tuple(filter_issues), **values}
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify(n_stages_query, params) query = cur.mogrify(n_stages_query, params)
@ -239,7 +242,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------") print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8')) print(query.decode('UTF-8'))
print("--------- PAYLOAD -----------") print("--------- PAYLOAD -----------")
print(filter_d) print(filter_d.model_dump_json())
print("--------------------") print("--------------------")
raise err raise err
return rows return rows
@ -544,9 +547,9 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
return n_critical_issues, issues_dict, total_drop_due_to_issues return n_critical_issues, issues_dict, total_drop_due_to_issues
def get_top_insights(filter_d, project_id): def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
output = [] output = []
stages = filter_d.get("events", []) stages = filter_d.events
# TODO: handle 1 stage alone # TODO: handle 1 stage alone
if len(stages) == 0: if len(stages) == 0:
print("no stages found") print("no stages found")
@ -554,17 +557,24 @@ def get_top_insights(filter_d, project_id):
elif len(stages) == 1: elif len(stages) == 1:
# TODO: count sessions, and users for single stage # TODO: count sessions, and users for single stage
output = [{ output = [{
"type": stages[0]["type"], "type": stages[0].type,
"value": stages[0]["value"], "value": stages[0].value,
"dropPercentage": None, "dropPercentage": None,
"operator": stages[0]["operator"], "operator": stages[0].operator,
"sessionsCount": 0, "sessionsCount": 0,
"dropPct": 0, "dropPct": 0,
"usersCount": 0, "usersCount": 0,
"dropDueToIssues": 0 "dropDueToIssues": 0
}] }]
counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), # original
# counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d),
# project_id=project_id, user_id=None, count_only=True)
# first change
# counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d),
# project_id=project_id, user_id=None, count_only=True)
# last change
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d),
project_id=project_id, user_id=None, count_only=True) project_id=project_id, user_id=None, count_only=True)
output[0]["sessionsCount"] = counts["countSessions"] output[0]["sessionsCount"] = counts["countSessions"]
output[0]["usersCount"] = counts["countUsers"] output[0]["usersCount"] = counts["countUsers"]
@ -583,9 +593,9 @@ def get_top_insights(filter_d, project_id):
return stages_list, total_drop_due_to_issues return stages_list, total_drop_due_to_issues
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None): def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None):
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []}) output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
stages = filter_d.get("events", []) stages = filter_d.events
# The result of the multi-stage query # The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
# print(json.dumps(rows[0],indent=4)) # print(json.dumps(rows[0],indent=4))

View file

@ -16,7 +16,7 @@ def create_tenant(data: schemas.UserSignupSchema):
email = data.email email = data.email
print(f"=====================> {email}") print(f"=====================> {email}")
password = data.password password = data.password.get_secret_value()
if email is None or len(email) < 5: if email is None or len(email) < 5:
errors.append("Invalid email address.") errors.append("Invalid email address.")

View file

@ -223,29 +223,29 @@ def update(tenant_id, user_id, changes, output=True):
return get(user_id=user_id, tenant_id=tenant_id) return get(user_id=user_id, tenant_id=tenant_id)
def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks): def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, background_tasks: BackgroundTasks):
admin = get(tenant_id=tenant_id, user_id=user_id) admin = get(tenant_id=tenant_id, user_id=user_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
if data.get("userId") is not None: if data.user_id is not None:
return {"errors": ["please use POST/PUT /client/members/{memberId} for update"]} return {"errors": ["please use POST/PUT /client/members/{memberId} for update"]}
user = get_by_email_only(email=data["email"]) user = get_by_email_only(email=data.email)
if user: if user:
return {"errors": ["user already exists"]} return {"errors": ["user already exists"]}
name = data.get("name", None)
if name is None or len(name) == 0: if data.name is None or len(data.name) == 0:
name = data["email"] data.name = data.email
invitation_token = __generate_invitation_token() invitation_token = __generate_invitation_token()
user = get_deleted_user_by_email(email=data["email"]) user = get_deleted_user_by_email(email=data.email)
if user is not None: if user is not None:
new_member = restore_member(email=data["email"], invitation_token=invitation_token, new_member = restore_member(email=data.email, invitation_token=invitation_token,
admin=data.get("admin", False), name=name, user_id=user["userId"]) admin=data.admin, name=data.name, user_id=user["userId"])
else: else:
new_member = create_new_member(email=data["email"], invitation_token=invitation_token, new_member = create_new_member(email=data.email, invitation_token=invitation_token,
admin=data.get("admin", False), name=name) admin=data.admin, name=data.name)
new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken")) new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken"))
background_tasks.add_task(email_helper.send_team_invitation, **{ background_tasks.add_task(email_helper.send_team_invitation, **{
"recipient": data["email"], "recipient": data.email,
"invitation_link": new_member["invitationLink"], "invitation_link": new_member["invitationLink"],
"client_id": tenants.get_by_tenant_id(tenant_id)["name"], "client_id": tenants.get_by_tenant_id(tenant_id)["name"],
"sender_name": admin["name"] "sender_name": admin["name"]
@ -607,11 +607,7 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
r = cur.fetchone() r = cur.fetchone()
return r is not None \ return r is not None \
and r.get("jwt_iat") is not None \ and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ and abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
def change_jwt_iat(user_id): def change_jwt_iat(user_id):

View file

@ -77,6 +77,8 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
{"id": webhook_id, **changes}) {"id": webhook_id, **changes})
) )
w = helper.dict_to_camel_case(cur.fetchone()) w = helper.dict_to_camel_case(cur.fetchone())
if w is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"webhook not found.")
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
if replace_none: if replace_none:
for k in w.keys(): for k in w.keys():
@ -120,20 +122,22 @@ def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = sch
return row["exists"] return row["exists"]
def add_edit(tenant_id, data, replace_none=None): def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None):
if "name" in data and len(data["name"]) > 0 \ if len(data.name) > 0 \
and exists_by_name(name=data["name"], exclude_id=data.get("webhookId")): and exists_by_name(name=data.name, exclude_id=data.webhook_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if data.get("webhookId") is not None: if data.webhook_id is not None:
return update(tenant_id=tenant_id, webhook_id=data["webhookId"], return update(tenant_id=tenant_id, webhook_id=data.webhook_id,
changes={"endpoint": data["endpoint"], changes={"endpoint": data.endpoint,
"authHeader": None if "authHeader" not in data else data["authHeader"], "authHeader": data.auth_header,
"name": data["name"] if "name" in data else ""}, replace_none=replace_none) "name": data.name},
replace_none=replace_none)
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id,
endpoint=data["endpoint"], endpoint=data.endpoint,
auth_header=None if "authHeader" not in data else data["authHeader"], auth_header=data.auth_header,
name=data["name"] if "name" in data else "", replace_none=replace_none) name=data.name,
replace_none=replace_none)
def delete(tenant_id, webhook_id): def delete(tenant_id, webhook_id):

View file

@ -49,9 +49,12 @@ EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)sdevtools
SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs
SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe
DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
SESSION_IOS_VIDEO_PATTERN=%(sessionId)s/replay.mp4
PRESIGNED_URL_EXPIRATION=3600 PRESIGNED_URL_EXPIRATION=3600
ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_EXPIRATION=144000
ASSIST_JWT_SECRET= ASSIST_JWT_SECRET=
PYTHONUNBUFFERED=1 PYTHONUNBUFFERED=1
REDIS_STRING=redis://redis-master.db.svc.cluster.local:6379 REDIS_STRING=redis://redis-master.db.svc.cluster.local:6379
SCH_DELETE_DAYS=30 SCH_DELETE_DAYS=30
IOS_BUCKET=mobs
IOS_VIDEO_BUCKET=mobs

View file

@ -1,15 +1,16 @@
requests==2.31.0 # Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16 urllib3==1.26.16
boto3==1.26.148 requests==2.31.0
pyjwt==2.7.0 boto3==1.28.30
psycopg2-binary==2.9.6 pyjwt==2.8.0
elasticsearch==8.8.0 psycopg2-binary==2.9.7
jira==3.5.1 elasticsearch==8.9.0
jira==3.5.2
fastapi==0.96.0 fastapi==0.101.1
uvicorn[standard]==0.22.0 uvicorn[standard]==0.23.2
python-decouple==3.8 python-decouple==3.8
pydantic[email]==1.10.8 pydantic[email]==2.2.1
apscheduler==3.10.1 apscheduler==3.10.4

View file

@ -1,17 +1,18 @@
requests==2.31.0 # Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16 urllib3==1.26.16
boto3==1.26.148 requests==2.31.0
pyjwt==2.7.0 boto3==1.28.40
psycopg2-binary==2.9.6 pyjwt==2.8.0
elasticsearch==8.8.0 psycopg2-binary==2.9.7
jira==3.5.1 elasticsearch==8.9.0
jira==3.5.2
fastapi==0.97.0 fastapi==0.103.1
uvicorn[standard]==0.22.0 uvicorn[standard]==0.23.2
python-decouple==3.8 python-decouple==3.8
pydantic[email]==1.10.8 pydantic[email]==2.3.0
apscheduler==3.10.1 apscheduler==3.10.4
redis==4.5.5 redis==5.0.0

View file

@ -1,5 +1,6 @@
from typing import Union from typing import Union
from decouple import config
from fastapi import Depends, Body from fastapi import Depends, Body
import schemas import schemas
@ -18,20 +19,6 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers() public_app, app, app_apikey = get_routers()
@app.post('/{projectId}/sessions/search', tags=["sessions"])
def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
return {'data': data}
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"])
def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True)
return {'data': data}
@app.get('/{projectId}/events/search', tags=["events"]) @app.get('/{projectId}/events/search', tags=["events"])
def events_search(projectId: int, q: str, def events_search(projectId: int, q: str,
type: Union[schemas.FilterType, schemas.EventType, type: Union[schemas.FilterType, schemas.EventType,
@ -107,9 +94,9 @@ def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_cont
@app.post('/{projectId}/integrations/sentry', tags=["integrations"]) @app.post('/{projectId}/integrations/sentry', tags=["integrations"])
def add_edit_sentry(projectId: int, data: schemas.SentrySchema = Body(...), def add_edit_sentry(projectId: int, data: schemas.IntegrationSentrySchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
@app.delete('/{projectId}/integrations/sentry', tags=["integrations"]) @app.delete('/{projectId}/integrations/sentry', tags=["integrations"])
@ -133,9 +120,9 @@ def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_con
@app.post('/{projectId}/integrations/datadog', tags=["integrations"]) @app.post('/{projectId}/integrations/datadog', tags=["integrations"])
def add_edit_datadog(projectId: int, data: schemas.DatadogSchema = Body(...), def add_edit_datadog(projectId: int, data: schemas.IntegrationDatadogSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
@app.delete('/{projectId}/integrations/datadog', tags=["integrations"]) @app.delete('/{projectId}/integrations/datadog', tags=["integrations"])
@ -154,9 +141,9 @@ def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR
@app.post('/{projectId}/integrations/stackdriver', tags=["integrations"]) @app.post('/{projectId}/integrations/stackdriver', tags=["integrations"])
def add_edit_stackdriver(projectId: int, data: schemas.StackdriverSchema = Body(...), def add_edit_stackdriver(projectId: int, data: schemas.IntegartionStackdriverSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
@app.delete('/{projectId}/integrations/stackdriver', tags=["integrations"]) @app.delete('/{projectId}/integrations/stackdriver', tags=["integrations"])
@ -175,9 +162,9 @@ def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_co
@app.post('/{projectId}/integrations/newrelic', tags=["integrations"]) @app.post('/{projectId}/integrations/newrelic', tags=["integrations"])
def add_edit_newrelic(projectId: int, data: schemas.NewrelicSchema = Body(...), def add_edit_newrelic(projectId: int, data: schemas.IntegrationNewrelicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
@app.delete('/{projectId}/integrations/newrelic', tags=["integrations"]) @app.delete('/{projectId}/integrations/newrelic', tags=["integrations"])
@ -196,9 +183,9 @@ def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_con
@app.post('/{projectId}/integrations/rollbar', tags=["integrations"]) @app.post('/{projectId}/integrations/rollbar', tags=["integrations"])
def add_edit_rollbar(projectId: int, data: schemas.RollbarSchema = Body(...), def add_edit_rollbar(projectId: int, data: schemas.IntegrationRollbarSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
@app.delete('/{projectId}/integrations/rollbar', tags=["integrations"]) @app.delete('/{projectId}/integrations/rollbar', tags=["integrations"])
@ -207,9 +194,9 @@ def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext
@app.post('/integrations/bugsnag/list_projects', tags=["integrations"]) @app.post('/integrations/bugsnag/list_projects', tags=["integrations"])
def list_projects_bugsnag(data: schemas.BugsnagBasicSchema = Body(...), def list_projects_bugsnag(data: schemas.IntegrationBugsnagBasicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_bugsnag.list_projects(auth_token=data.authorizationToken)} return {"data": log_tool_bugsnag.list_projects(auth_token=data.authorization_token)}
@app.get('/integrations/bugsnag', tags=["integrations"]) @app.get('/integrations/bugsnag', tags=["integrations"])
@ -223,9 +210,9 @@ def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_con
@app.post('/{projectId}/integrations/bugsnag', tags=["integrations"]) @app.post('/{projectId}/integrations/bugsnag', tags=["integrations"])
def add_edit_bugsnag(projectId: int, data: schemas.BugsnagSchema = Body(...), def add_edit_bugsnag(projectId: int, data: schemas.IntegrationBugsnagSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
@app.delete('/{projectId}/integrations/bugsnag', tags=["integrations"]) @app.delete('/{projectId}/integrations/bugsnag', tags=["integrations"])
@ -234,7 +221,7 @@ def delete_bugsnag(projectId: int, _=Body(None), context: schemas.CurrentContext
@app.post('/integrations/cloudwatch/list_groups', tags=["integrations"]) @app.post('/integrations/cloudwatch/list_groups', tags=["integrations"])
def list_groups_cloudwatch(data: schemas.CloudwatchBasicSchema = Body(...), def list_groups_cloudwatch(data: schemas.IntegrationCloudwatchBasicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId, return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId,
aws_secret_access_key=data.awsSecretAccessKey, aws_secret_access_key=data.awsSecretAccessKey,
@ -252,9 +239,9 @@ def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_
@app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"]) @app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"])
def add_edit_cloudwatch(projectId: int, data: schemas.CloudwatchSchema = Body(...), def add_edit_cloudwatch(projectId: int, data: schemas.IntegrationCloudwatchSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
@app.delete('/{projectId}/integrations/cloudwatch', tags=["integrations"]) @app.delete('/{projectId}/integrations/cloudwatch', tags=["integrations"])
@ -273,16 +260,16 @@ def get_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(
@app.post('/integrations/elasticsearch/test', tags=["integrations"]) @app.post('/integrations/elasticsearch/test', tags=["integrations"])
def test_elasticsearch_connection(data: schemas.ElasticsearchBasicSchema = Body(...), def test_elasticsearch_connection(data: schemas.IntegrationElasticsearchTestSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_elasticsearch.ping(tenant_id=context.tenant_id, **data.dict())} return {"data": log_tool_elasticsearch.ping(tenant_id=context.tenant_id, data=data)}
@app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"]) @app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"])
def add_edit_elasticsearch(projectId: int, data: schemas.ElasticsearchSchema = Body(...), def add_edit_elasticsearch(projectId: int, data: schemas.IntegrationElasticsearchSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return { return {
"data": log_tool_elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} "data": log_tool_elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
@app.delete('/{projectId}/integrations/elasticsearch', tags=["integrations"]) @app.delete('/{projectId}/integrations/elasticsearch', tags=["integrations"])
@ -301,9 +288,9 @@ def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_c
@app.post('/{projectId}/integrations/sumologic', tags=["integrations"]) @app.post('/{projectId}/integrations/sumologic', tags=["integrations"])
def add_edit_sumologic(projectId: int, data: schemas.SumologicSchema = Body(...), def add_edit_sumologic(projectId: int, data: schemas.IntegrationSumologicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)}
@app.delete('/{projectId}/integrations/sumologic', tags=["integrations"]) @app.delete('/{projectId}/integrations/sumologic', tags=["integrations"])
@ -341,7 +328,7 @@ def get_integration_status_github(context: schemas.CurrentContext = Depends(OR_c
@app.post('/integrations/jira', tags=["integrations"]) @app.post('/integrations/jira', tags=["integrations"])
def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...), def add_edit_jira_cloud(data: schemas.IssueTrackingJiraSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if not data.url.endswith('atlassian.net'): if not data.url.endswith('atlassian.net'):
return {"errors": ["url must be a valid JIRA URL (example.atlassian.net)"]} return {"errors": ["url must be a valid JIRA URL (example.atlassian.net)"]}
@ -350,18 +337,18 @@ def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...),
user_id=context.user_id) user_id=context.user_id)
if error is not None and integration is None: if error is not None and integration is None:
return error return error
return {"data": integration.add_edit(data=data.dict())} return {"data": integration.add_edit(data=data)}
@app.post('/integrations/github', tags=["integrations"]) @app.post('/integrations/github', tags=["integrations"])
def add_edit_github(data: schemas.GithubSchema = Body(...), def add_edit_github(data: schemas.IssueTrackingGithubSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER, error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER,
tenant_id=context.tenant_id, tenant_id=context.tenant_id,
user_id=context.user_id) user_id=context.user_id)
if error is not None: if error is not None:
return error return error
return {"data": integration.add_edit(data=data.dict())} return {"data": integration.add_edit(data=data)}
@app.delete('/integrations/issues', tags=["integrations"]) @app.delete('/integrations/issues', tags=["integrations"])
@ -452,7 +439,7 @@ def get_gdpr(projectId: int, context: schemas.CurrentContext = Depends(OR_contex
@app.post('/{projectId}/gdpr', tags=["projects", "gdpr"]) @app.post('/{projectId}/gdpr', tags=["projects", "gdpr"])
def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...), def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
result = projects.edit_gdpr(project_id=projectId, gdpr=data.dict()) result = projects.edit_gdpr(project_id=projectId, gdpr=data)
if "errors" in result: if "errors" in result:
return result return result
return {"data": result} return {"data": result}
@ -470,20 +457,20 @@ def get_metadata(projectId: int, context: schemas.CurrentContext = Depends(OR_co
return {"data": metadata.get(project_id=projectId)} return {"data": metadata.get(project_id=projectId)}
@app.post('/{projectId}/metadata/list', tags=["metadata"]) # @app.post('/{projectId}/metadata/list', tags=["metadata"])
def add_edit_delete_metadata(projectId: int, data: schemas.MetadataListSchema = Body(...), # def add_edit_delete_metadata(projectId: int, data: schemas.MetadataListSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): # context: schemas.CurrentContext = Depends(OR_context)):
return metadata.add_edit_delete(tenant_id=context.tenant_id, project_id=projectId, new_metas=data.list) # return metadata.add_edit_delete(tenant_id=context.tenant_id, project_id=projectId, new_metas=data.list)
@app.post('/{projectId}/metadata', tags=["metadata"]) @app.post('/{projectId}/metadata', tags=["metadata"])
def add_metadata(projectId: int, data: schemas.MetadataBasicSchema = Body(...), def add_metadata(projectId: int, data: schemas.MetadataSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return metadata.add(tenant_id=context.tenant_id, project_id=projectId, new_name=data.key) return metadata.add(tenant_id=context.tenant_id, project_id=projectId, new_name=data.key)
@app.post('/{projectId}/metadata/{index}', tags=["metadata"]) @app.post('/{projectId}/metadata/{index}', tags=["metadata"])
def edit_metadata(projectId: int, index: int, data: schemas.MetadataBasicSchema = Body(...), def edit_metadata(projectId: int, index: int, data: schemas.MetadataSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return metadata.edit(tenant_id=context.tenant_id, project_id=projectId, index=index, return metadata.edit(tenant_id=context.tenant_id, project_id=projectId, index=index,
new_name=data.key) new_name=data.key)
@ -519,7 +506,7 @@ def get_capture_status(projectId: int, context: schemas.CurrentContext = Depends
@app.post('/{projectId}/sample_rate', tags=["projects"]) @app.post('/{projectId}/sample_rate', tags=["projects"])
def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...), def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.update_capture_status(project_id=projectId, changes=data.dict())} return {"data": projects.update_capture_status(project_id=projectId, changes=data)}
@app.get('/announcements', tags=["announcements"]) @app.get('/announcements', tags=["announcements"])
@ -688,6 +675,8 @@ def batch_view_notifications(data: schemas.NotificationsViewSchema,
@app.get('/boarding', tags=['boarding']) @app.get('/boarding', tags=['boarding'])
def get_boarding_state(context: schemas.CurrentContext = Depends(OR_context)): def get_boarding_state(context: schemas.CurrentContext = Depends(OR_context)):
if config("LOCAL_DEV", cast=bool, default=False):
return {"data": ""}
return {"data": boarding.get_state(tenant_id=context.tenant_id)} return {"data": boarding.get_state(tenant_id=context.tenant_id)}
@ -727,9 +716,9 @@ def delete_slack_integration(webhookId: int, _=Body(None), context: schemas.Curr
@app.put('/webhooks', tags=["webhooks"]) @app.put('/webhooks', tags=["webhooks"])
def add_edit_webhook(data: schemas.CreateEditWebhookSchema = Body(...), def add_edit_webhook(data: schemas.WebhookSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.add_edit(tenant_id=context.tenant_id, data=data.dict(), replace_none=True)} return {"data": webhook.add_edit(tenant_id=context.tenant_id, data=data, replace_none=True)}
@app.get('/webhooks', tags=["webhooks"]) @app.get('/webhooks', tags=["webhooks"])
@ -739,7 +728,7 @@ def get_webhooks(context: schemas.CurrentContext = Depends(OR_context)):
@app.delete('/webhooks/{webhookId}', tags=["webhooks"]) @app.delete('/webhooks/{webhookId}', tags=["webhooks"])
def delete_webhook(webhookId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): def delete_webhook(webhookId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)} return webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)
@app.get('/client/members', tags=["client"]) @app.get('/client/members', tags=["client"])
@ -765,8 +754,8 @@ def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context
@app.post('/account/password', tags=["account"]) @app.post('/account/password', tags=["account"])
def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return users.change_password(email=context.email, old_password=data.old_password, return users.change_password(email=context.email, old_password=data.old_password.get_secret_value(),
new_password=data.new_password, tenant_id=context.tenant_id, new_password=data.new_password.get_secret_value(), tenant_id=context.tenant_id,
user_id=context.user_id) user_id=context.user_id)

View file

@ -45,7 +45,7 @@ def login_user(data: schemas.UserLoginSchema = Body(...)):
detail="Invalid captcha." detail="Invalid captcha."
) )
r = users.authenticate(data.email, data.password) r = users.authenticate(data.email, data.password.get_secret_value())
if r is None: if r is None:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, status_code=status.HTTP_401_UNAUTHORIZED,
@ -129,7 +129,7 @@ def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSc
@app.post('/client/members', tags=["client"]) @app.post('/client/members', tags=["client"])
def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...), def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data,
background_tasks=background_tasks) background_tasks=background_tasks)
@ -160,7 +160,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
if user["expiredChange"]: if user["expiredChange"]:
return {"errors": ["expired change, please re-use the invitation link"]} return {"errors": ["expired change, please re-use the invitation link"]}
return users.set_password_invitation(new_password=data.password, user_id=user["userId"]) return users.set_password_invitation(new_password=data.password.get_secret_value(), user_id=user["userId"])
@app.put('/client/members/{memberId}', tags=["client"]) @app.put('/client/members/{memberId}', tags=["client"])
@ -193,8 +193,10 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"]) @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"])
def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str): if not sessionId.isnumeric():
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
else:
sessionId = int(sessionId)
data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True,
include_fav_viewed=True, group_metadata=True, context=context) include_fav_viewed=True, group_metadata=True, context=context)
if data is None: if data is None:
@ -207,11 +209,27 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
} }
@app.post('/{projectId}/sessions/search', tags=["sessions"])
def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
return {'data': data}
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"])
def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True)
return {'data': data}
@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"]) @app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"])
def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str): if not sessionId.isnumeric():
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
else:
sessionId = int(sessionId)
data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True,
include_fav_viewed=True, group_metadata=True, context=context) include_fav_viewed=True, group_metadata=True, context=context)
if data is None: if data is None:
@ -227,8 +245,10 @@ def get_session_events(projectId: int, sessionId: Union[int, str], background_ta
@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"]) @app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"])
def get_session_events(projectId: int, sessionId: Union[int, str], def get_session_events(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str): if not sessionId.isnumeric():
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
else:
sessionId = int(sessionId)
data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) data = sessions_replay.get_events(project_id=projectId, session_id=sessionId)
if data is None: if data is None:
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
@ -249,18 +269,6 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
} }
@app.post('/{projectId}/errors/search', tags=['errors'])
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": errors.search(data, projectId, user_id=context.user_id)}
@app.get('/{projectId}/errors/stats', tags=['errors'])
def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int,
context: schemas.CurrentContext = Depends(OR_context)):
return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp)
@app.get('/{projectId}/errors/{errorId}', tags=['errors']) @app.get('/{projectId}/errors/{errorId}', tags=['errors'])
def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
@ -272,15 +280,6 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun
return data return data
@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'])
def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), density: int = 7,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId,
**{"startDate": startDate, "endDate": endDate, "density": density})
return data
@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors']) @app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'])
def errors_get_details_sourcemaps(projectId: int, errorId: str, def errors_get_details_sourcemaps(projectId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -329,9 +328,10 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Replay file not found"]} not_found = {"errors": ["Replay file not found"]}
if isinstance(sessionId, str): if not sessionId.isnumeric():
print(f"{sessionId} not a valid number.")
return not_found return not_found
else:
sessionId = int(sessionId)
if not sessions.session_exists(project_id=projectId, session_id=sessionId): if not sessions.session_exists(project_id=projectId, session_id=sessionId):
print(f"{projectId}/{sessionId} not found in DB.") print(f"{projectId}/{sessionId} not found in DB.")
if not assist.session_exists(project_id=projectId, session_id=sessionId): if not assist.session_exists(project_id=projectId, session_id=sessionId):
@ -349,9 +349,10 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Devtools file not found"]} not_found = {"errors": ["Devtools file not found"]}
if isinstance(sessionId, str): if not sessionId.isnumeric():
print(f"{sessionId} not a valid number.")
return not_found return not_found
else:
sessionId = int(sessionId)
if not sessions.session_exists(project_id=projectId, session_id=sessionId): if not sessions.session_exists(project_id=projectId, session_id=sessionId):
print(f"{projectId}/{sessionId} not found in DB.") print(f"{projectId}/{sessionId} not found in DB.")
if not assist.session_exists(project_id=projectId, session_id=sessionId): if not assist.session_exists(project_id=projectId, session_id=sessionId):
@ -484,7 +485,7 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
@app.post('/{projectId}/click_maps/search', tags=["click maps"]) @app.post('/{projectId}/click_maps/search', tags=["click maps"])
def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...), def click_map_search(projectId: int, data: schemas.ClickMapSessionsSearch = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)}
@ -515,7 +516,7 @@ def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.Fea
@app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) @app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"])
async def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)):
return {"data": feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)} return {"data": feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)}

View file

@ -1,3 +1,4 @@
from decouple import config
from fastapi import HTTPException, status from fastapi import HTTPException, status
from chalicelib.core import health, tenants from chalicelib.core import health, tenants
@ -8,6 +9,8 @@ public_app, app, app_apikey = get_routers()
@app.get('/healthz', tags=["health-check"]) @app.get('/healthz', tags=["health-check"])
def get_global_health_status(): def get_global_health_status():
if config("LOCAL_DEV", cast=bool, default=False):
return {"data": ""}
return {"data": health.get_health()} return {"data": health.get_health()}

View file

@ -11,10 +11,10 @@ public_app, app, app_apikey = get_routers()
async def get_insights_journey(projectId: int): async def get_insights_journey(projectId: int):
return {"data": product_analytics.path_analysis(project_id=projectId, data=schemas.PathAnalysisSchema())} return {"data": product_analytics.path_analysis(project_id=projectId, data=schemas.PathAnalysisSchema())}
#
# @app.post('/{projectId}/insights/journey', tags=["insights"]) @app.post('/{projectId}/insights/journey', tags=["insights"])
# async def get_insights_journey(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): async def get_insights_journey(projectId: int, data: schemas.PathAnalysisSchema = Body(...)):
# return {"data": product_analytics.journey(project_id=projectId, data=data)} return {"data": product_analytics.path_analysis(project_id=projectId, data=data)}
# #
# #
# @app.post('/{projectId}/insights/users_acquisition', tags=["insights"]) # @app.post('/{projectId}/insights/users_acquisition', tags=["insights"])

View file

@ -11,7 +11,7 @@ public_app, app, app_apikey = get_routers()
@app.post('/{projectId}/dashboards', tags=["dashboard"]) @app.post('/{projectId}/dashboards', tags=["dashboard"])
@app.put('/{projectId}/dashboards', tags=["dashboard"]) # @app.put('/{projectId}/dashboards', tags=["dashboard"])
def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...), def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data) return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data)
@ -30,7 +30,7 @@ def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont
return {"data": data} return {"data": data}
@app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) # @app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) @app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...), def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -50,8 +50,8 @@ def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont
@app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"]) @app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"])
@app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) # @app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) # @app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
def add_card_to_dashboard(projectId: int, dashboardId: int, def add_card_to_dashboard(projectId: int, dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...), data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -60,7 +60,7 @@ def add_card_to_dashboard(projectId: int, dashboardId: int,
@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) @app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) # @app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
data: schemas.CardSchema = Body(...), data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -69,7 +69,7 @@ def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) @app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) # @app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int, def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int,
data: schemas.UpdateWidgetPayloadSchema = Body(...), data: schemas.UpdateWidgetPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -96,18 +96,18 @@ def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int
@app.post('/{projectId}/cards/try', tags=["cards"]) @app.post('/{projectId}/cards/try', tags=["cards"])
@app.post('/{projectId}/metrics/try', tags=["dashboard"]) # @app.post('/{projectId}/metrics/try', tags=["dashboard"])
@app.put('/{projectId}/metrics/try', tags=["dashboard"]) # @app.put('/{projectId}/metrics/try', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) # @app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
def try_card(projectId: int, data: schemas.CardSchema = Body(...), def try_card(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)} return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)}
@app.post('/{projectId}/cards/try/sessions', tags=["cards"]) @app.post('/{projectId}/cards/try/sessions', tags=["cards"])
@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) # @app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"])
def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...), def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data) data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data)
@ -115,48 +115,50 @@ def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(..
@app.post('/{projectId}/cards/try/issues', tags=["cards"]) @app.post('/{projectId}/cards/try/issues', tags=["cards"])
@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"]) # @app.post('/{projectId}/metrics/try/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"])
def try_card_funnel_issues(projectId: int, data: schemas.CardSessionsSchema = Body(...), def try_card_funnel_issues(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if len(data.series) == 0: if len(data.series) == 0:
return {"data": []} return {"data": []}
data.series[0].filter.startDate = data.startTimestamp data.series[0].filter.startTimestamp = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp data.series[0].filter.endTimestamp = data.endTimestamp
data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter) data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter)
return {"data": data} return {"data": data}
@app.get('/{projectId}/cards', tags=["cards"]) @app.get('/{projectId}/cards', tags=["cards"])
@app.get('/{projectId}/metrics', tags=["dashboard"]) # @app.get('/{projectId}/metrics', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics', tags=["customMetrics"]) # @app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)} return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
@app.post('/{projectId}/cards', tags=["cards"]) @app.post('/{projectId}/cards', tags=["cards"])
@app.post('/{projectId}/metrics', tags=["dashboard"]) # @app.post('/{projectId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/metrics', tags=["dashboard"]) # @app.put('/{projectId}/metrics', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics', tags=["customMetrics"]) # @app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
def create_card(projectId: int, data: schemas.CardSchema = Body(...), def create_card(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data) return custom_metrics.create_card(project_id=projectId, user_id=context.user_id, data=data)
@app.post('/{projectId}/cards/search', tags=["cards"]) @app.post('/{projectId}/cards/search', tags=["cards"])
@app.post('/{projectId}/metrics/search', tags=["dashboard"]) # @app.post('/{projectId}/metrics/search', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"])
def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...), def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)} return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)}
@app.get('/{projectId}/cards/{metric_id}', tags=["cards"]) @app.get('/{projectId}/cards/{metric_id}', tags=["cards"])
@app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) # @app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) # @app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if not isinstance(metric_id, int): if metric_id.isnumeric():
metric_id = int(metric_id)
else:
return {"errors": ["invalid card_id"]} return {"errors": ["invalid card_id"]}
data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id) data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
if data is None: if data is None:
@ -173,8 +175,8 @@ def get_card(projectId: int, metric_id: Union[int, str], context: schemas.Curren
@app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"]) @app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"])
@app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"])
def get_card_sessions(projectId: int, metric_id: int, def get_card_sessions(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -185,13 +187,15 @@ def get_card_sessions(projectId: int, metric_id: int,
@app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"]) @app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"])
@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"])
def get_card_funnel_issues(projectId: int, metric_id: Union[int, str], def get_card_funnel_issues(projectId: int, metric_id: Union[int, str],
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if not isinstance(metric_id, int): if metric_id.isnumeric():
return {"errors": [f"invalid card_id: {metric_id}"]} metric_id = int(metric_id)
else:
return {"errors": ["invalid card_id"]}
data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data) data=data)
@ -201,8 +205,8 @@ def get_card_funnel_issues(projectId: int, metric_id: Union[int, str],
@app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"])
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str, def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -214,22 +218,22 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st
@app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"])
def get_custom_metric_errors_list(projectId: int, metric_id: int, def get_custom_metric_errors_list(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id,
data=data) metric_id=metric_id, data=data)
if data is None: if data is None:
return {"errors": ["custom metric not found"]} return {"errors": ["custom metric not found"]}
return {"data": data} return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"]) @app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"])
@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardChartSchema = Body(...), def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data) data=data)
@ -237,25 +241,25 @@ def get_card_chart(projectId: int, metric_id: int, request: Request, data: schem
@app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) # @app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) # @app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCardSchema = Body(...), def update_custom_metric(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
if data is None: if data is None:
return {"errors": ["custom metric not found"]} return {"errors": ["custom metric not found"]}
return {"data": data} return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
@app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) # @app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) # @app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
def update_custom_metric_state(projectId: int, metric_id: int, def update_custom_metric_state(projectId: int, metric_id: int,
data: schemas.UpdateCustomMetricsStatusSchema = Body(...), data: schemas.UpdateCardStatusSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return { return {
"data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id, "data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
@ -263,8 +267,8 @@ def update_custom_metric_state(projectId: int, metric_id: int,
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"]) @app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) # @app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) # @app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def delete_custom_metric(projectId: int, metric_id: int, _=Body(None), def delete_custom_metric(projectId: int, metric_id: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}

File diff suppressed because it is too large Load diff

2
api/schemas/__init__.py Normal file
View file

@ -0,0 +1,2 @@
from .schemas import *
from . import overrides as _overrides

62
api/schemas/overrides.py Normal file
View file

@ -0,0 +1,62 @@
from typing import TypeVar, Annotated, Union, Any
from enum import Enum as _Enum
from pydantic import BaseModel as _BaseModel
from pydantic import ConfigDict, TypeAdapter, Field
from pydantic.types import AnyType
def attribute_to_camel_case(snake_str: str) -> str:
components = snake_str.split("_")
return components[0] + ''.join(x.title() for x in components[1:])
def transform_email(email: str) -> str:
return email.lower().strip() if isinstance(email, str) else email
def remove_whitespace(value: str) -> str:
return " ".join(value.split()) if isinstance(value, str) else value
def remove_duplicate_values(value: list) -> list:
if value is not None and isinstance(value, list):
if len(value) > 0 \
and (isinstance(value[0], int) or isinstance(value[0], dict)):
return value
value = list(set(value))
return value
def single_to_list(value: Union[list, Any]) -> list:
if value is not None and not isinstance(value, list):
value = [value]
return value
def schema_extra(schema: dict, _):
props = {}
for k, v in schema.get('properties', {}).items():
if not v.get("doc_hidden", False):
props[k] = v
schema["properties"] = props
class BaseModel(_BaseModel):
model_config = ConfigDict(alias_generator=attribute_to_camel_case,
use_enum_values=True,
json_schema_extra=schema_extra)
class Enum(_Enum):
@classmethod
def has_value(cls, value) -> bool:
return value in cls._value2member_map_
T = TypeVar('T')
class ORUnion:
def __new__(self, union_types: Union[AnyType], discriminator: str) -> T:
return lambda **args: TypeAdapter(Annotated[union_types, Field(discriminator=discriminator)]) \
.validate_python(args)

1644
api/schemas/schemas.py Normal file

File diff suppressed because it is too large Load diff

View file

@ -18,10 +18,10 @@
}, },
"homepage": "https://github.com/openreplay/openreplay#readme", "homepage": "https://github.com/openreplay/openreplay#readme",
"dependencies": { "dependencies": {
"@maxmind/geoip2-node": "^3.5.0", "@maxmind/geoip2-node": "^4.2.0",
"express": "^4.18.2", "express": "^4.18.2",
"jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.1",
"socket.io": "^4.6.1", "socket.io": "^4.7.2",
"ua-parser-js": "^1.0.35" "ua-parser-js": "^1.0.35"
} }
} }

5
ee/api/.gitignore vendored
View file

@ -260,7 +260,6 @@ Pipfile.lock
/Dockerfile_bundle /Dockerfile_bundle
/entrypoint.bundle.sh /entrypoint.bundle.sh
/chalicelib/core/heatmaps.py /chalicelib/core/heatmaps.py
/schemas.py
#exp /chalicelib/core/custom_metrics.py #exp /chalicelib/core/custom_metrics.py
/chalicelib/core/performance_event.py /chalicelib/core/performance_event.py
/chalicelib/core/saved_search.py /chalicelib/core/saved_search.py
@ -270,4 +269,6 @@ Pipfile.lock
/run-dev.sh /run-dev.sh
/run-alerts-dev.sh /run-alerts-dev.sh
/routers/subs/v1_api.py /routers/subs/v1_api.py
#exp /chalicelib/core/dashboards.py #exp /chalicelib/core/dashboards.py
/schemas/overrides.py
/schemas/schemas.py

View file

@ -4,22 +4,24 @@ verify_ssl = true
name = "pypi" name = "pypi"
[packages] [packages]
requests = "==2.31.0"
urllib3 = "==1.26.16" urllib3 = "==1.26.16"
boto3 = "==1.26.148" requests = "==2.31.0"
pyjwt = "==2.7.0" boto3 = "==1.28.40"
psycopg2-binary = "==2.9.6" pyjwt = "==2.8.0"
elasticsearch = "==8.8.0" psycopg2-binary = "==2.9.7"
jira = "==3.5.1" elasticsearch = "==8.9.0"
fastapi = "==0.96.0" jira = "==3.5.2"
uvicorn = {version = "==0.22.0", extras = ["standard"]} fastapi = "==0.103.1"
gunicorn = "==21.2.0"
python-decouple = "==3.8" python-decouple = "==3.8"
pydantic = {version = "==1.10.8", extras = ["email"]} apscheduler = "==3.10.4"
apscheduler = "==3.10.1" python3-saml = "==1.15.0"
clickhouse-driver = {version = "==0.2.5", extras = ["lz4"]}
python-multipart = "==0.0.6" python-multipart = "==0.0.6"
redis = "==4.5.5" redis = "==5.0.0"
azure-storage-blob = "==12.16.0" azure-storage-blob = "==12.17.0"
uvicorn = {version = "==0.23.2", extras = ["standard"]}
pydantic = {version = "==2.3.0", extras = ["email"]}
clickhouse-driver = {version = "==0.2.6", extras = ["lz4"]}
[dev-packages] [dev-packages]

View file

@ -14,7 +14,11 @@ from chalicelib.core import traces
from chalicelib.utils import events_queue from chalicelib.utils import events_queue
from chalicelib.utils import helper from chalicelib.utils import helper
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
from routers import core, core_dynamic, ee, saml from routers import core, core_dynamic
from routers import ee
if config("ENABLE_SSO", cast=bool, default=True):
from routers import saml
from crons import core_crons, ee_crons, core_dynamic_crons from crons import core_crons, ee_crons, core_dynamic_crons
from routers.subs import insights, metrics, v1_api_ee from routers.subs import insights, metrics, v1_api_ee
from routers.subs import v1_api, health from routers.subs import v1_api, health
@ -97,9 +101,6 @@ app.include_router(core_dynamic.app_apikey)
app.include_router(ee.public_app) app.include_router(ee.public_app)
app.include_router(ee.app) app.include_router(ee.app)
app.include_router(ee.app_apikey) app.include_router(ee.app_apikey)
app.include_router(saml.public_app)
app.include_router(saml.app)
app.include_router(saml.app_apikey)
app.include_router(metrics.app) app.include_router(metrics.app)
app.include_router(insights.app) app.include_router(insights.app)
app.include_router(v1_api.app_apikey) app.include_router(v1_api.app_apikey)
@ -107,3 +108,8 @@ app.include_router(v1_api_ee.app_apikey)
app.include_router(health.public_app) app.include_router(health.public_app)
app.include_router(health.app) app.include_router(health.app)
app.include_router(health.app_apikey) app.include_router(health.app_apikey)
if config("ENABLE_SSO", cast=bool, default=True):
app.include_router(saml.public_app)
app.include_router(saml.app)
app.include_router(saml.app_apikey)

View file

@ -6,19 +6,19 @@ from starlette import status
from starlette.exceptions import HTTPException from starlette.exceptions import HTTPException
from chalicelib.core import authorizers, users from chalicelib.core import authorizers, users
import schemas_ee import schemas
class JWTAuth(HTTPBearer): class JWTAuth(HTTPBearer):
def __init__(self, auto_error: bool = True): def __init__(self, auto_error: bool = True):
super(JWTAuth, self).__init__(auto_error=auto_error) super(JWTAuth, self).__init__(auto_error=auto_error)
async def __call__(self, request: Request) -> Optional[schemas_ee.CurrentContext]: async def __call__(self, request: Request) -> Optional[schemas.CurrentContext]:
credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request) credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request)
if credentials: if credentials:
if not credentials.scheme == "Bearer": if not credentials.scheme == "Bearer":
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.")
jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials) jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
auth_exists = jwt_payload is not None \ auth_exists = jwt_payload is not None \
and users.auth_exists(user_id=jwt_payload.get("userId", -1), and users.auth_exists(user_id=jwt_payload.get("userId", -1),
tenant_id=jwt_payload.get("tenantId", -1), tenant_id=jwt_payload.get("tenantId", -1),
@ -27,18 +27,13 @@ class JWTAuth(HTTPBearer):
if jwt_payload is None \ if jwt_payload is None \
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
or not auth_exists: or not auth_exists:
print("JWTAuth: Token issue")
if jwt_payload is not None: if jwt_payload is not None:
print(jwt_payload) print(jwt_payload)
print(f"JWTAuth: user_id={jwt_payload.get('userId')} tenant_id={jwt_payload.get('tenantId')}") if jwt_payload.get("iat") is None:
if jwt_payload is None: print("JWTAuth: iat is None")
print("JWTAuth: jwt_payload is None") if jwt_payload.get("aud") is None:
print(credentials.scheme + " " + credentials.credentials) print("JWTAuth: aud is None")
if jwt_payload is not None and jwt_payload.get("iat") is None: if not auth_exists:
print("JWTAuth: iat is None")
if jwt_payload is not None and jwt_payload.get("aud") is None:
print("JWTAuth: aud is None")
if jwt_payload is not None and not auth_exists:
print("JWTAuth: not users.auth_exists") print("JWTAuth: not users.auth_exists")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
@ -47,12 +42,14 @@ class JWTAuth(HTTPBearer):
print("JWTAuth: User not found.") print("JWTAuth: User not found.")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
jwt_payload["authorizer_identity"] = "jwt" jwt_payload["authorizer_identity"] = "jwt"
print(jwt_payload)
request.state.authorizer_identity = "jwt" request.state.authorizer_identity = "jwt"
request.state.currentContext = schemas_ee.CurrentContext(tenant_id=jwt_payload.get("tenantId", -1), if user["serviceAccount"]:
user_id=jwt_payload.get("userId", -1), user["permissions"] = [p.value for p in schemas_ee.ServicePermissions]
email=user["email"], request.state.currentContext = schemas.CurrentContext(tenantId=jwt_payload.get("tenantId", -1),
permissions=user["permissions"]) userId=jwt_payload.get("userId", -1),
email=user["email"],
permissions=user["permissions"],
serviceAccount=user["serviceAccount"])
return request.state.currentContext return request.state.currentContext
else: else:

View file

@ -3,7 +3,7 @@ import hashlib
from decouple import config from decouple import config
import schemas import schemas
import schemas_ee import schemas
from chalicelib.utils import pg_client, helper from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.storage import StorageClient from chalicelib.utils.storage import StorageClient
@ -14,16 +14,16 @@ def generate_file_key(project_id, key):
return f"{project_id}/{hashlib.md5(key.encode()).hexdigest()}" return f"{project_id}/{hashlib.md5(key.encode()).hexdigest()}"
def presign_record(project_id, data: schemas_ee.AssistRecordPayloadSchema, context: schemas_ee.CurrentContext): def presign_record(project_id, data: schemas.AssistRecordPayloadSchema, context: schemas.CurrentContext):
key = generate_file_key(project_id=project_id, key=f"{TimeUTC.now()}-{data.name}") key = generate_file_key(project_id=project_id, key=f"{TimeUTC.now()}-{data.name}")
presigned_url = StorageClient.get_presigned_url_for_upload(bucket=config('ASSIST_RECORDS_BUCKET'), expires_in=1800, presigned_url = StorageClient.get_presigned_url_for_upload(bucket=config('ASSIST_RECORDS_BUCKET'), expires_in=1800,
key=key) key=key)
return {"URL": presigned_url, "key": key} return {"URL": presigned_url, "key": key}
def save_record(project_id, data: schemas_ee.AssistRecordSavePayloadSchema, context: schemas_ee.CurrentContext): def save_record(project_id, data: schemas.AssistRecordSavePayloadSchema, context: schemas.CurrentContext):
extra.tag_record(file_key=data.key, tag_value=config('RETENTION_L_VALUE', default='vault')) extra.tag_record(file_key=data.key, tag_value=config('RETENTION_L_VALUE', default='vault'))
params = {"user_id": context.user_id, "project_id": project_id, **data.dict()} params = {"user_id": context.user_id, "project_id": project_id, **data.model_dump()}
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify( query = cur.mogrify(
f"""INSERT INTO assist_records(project_id, user_id, name, file_key, duration, session_id) f"""INSERT INTO assist_records(project_id, user_id, name, file_key, duration, session_id)
@ -40,8 +40,8 @@ def save_record(project_id, data: schemas_ee.AssistRecordSavePayloadSchema, cont
return result return result
def search_records(project_id: int, data: schemas_ee.AssistRecordSearchPayloadSchema, def search_records(project_id: int, data: schemas.AssistRecordSearchPayloadSchema,
context: schemas_ee.CurrentContext): context: schemas.CurrentContext):
conditions = ["projects.tenant_id=%(tenant_id)s", conditions = ["projects.tenant_id=%(tenant_id)s",
"projects.deleted_at ISNULL", "projects.deleted_at ISNULL",
"projects.project_id=%(project_id)s", "projects.project_id=%(project_id)s",
@ -54,7 +54,7 @@ def search_records(project_id: int, data: schemas_ee.AssistRecordSearchPayloadSc
params = {"tenant_id": context.tenant_id, "project_id": project_id, params = {"tenant_id": context.tenant_id, "project_id": project_id,
"startDate": data.startTimestamp, "endDate": data.endTimestamp, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
"p_start": (data.page - 1) * data.limit, "p_limit": data.limit, "p_start": (data.page - 1) * data.limit, "p_limit": data.limit,
**data.dict()} **data.model_dump()}
if data.user_id is not None: if data.user_id is not None:
conditions.append("assist_records.user_id=%(user_id)s") conditions.append("assist_records.user_id=%(user_id)s")
if data.query is not None and len(data.query) > 0: if data.query is not None and len(data.query) > 0:
@ -85,7 +85,7 @@ def search_records(project_id: int, data: schemas_ee.AssistRecordSearchPayloadSc
return results return results
def get_record(project_id, record_id, context: schemas_ee.CurrentContext): def get_record(project_id, record_id, context: schemas.CurrentContext):
conditions = ["projects.tenant_id=%(tenant_id)s", conditions = ["projects.tenant_id=%(tenant_id)s",
"projects.deleted_at ISNULL", "projects.deleted_at ISNULL",
"assist_records.record_id=%(record_id)s", "assist_records.record_id=%(record_id)s",
@ -110,8 +110,8 @@ def get_record(project_id, record_id, context: schemas_ee.CurrentContext):
return result return result
def update_record(project_id, record_id, data: schemas_ee.AssistRecordUpdatePayloadSchema, def update_record(project_id, record_id, data: schemas.AssistRecordUpdatePayloadSchema,
context: schemas_ee.CurrentContext): context: schemas.CurrentContext):
conditions = ["assist_records.record_id=%(record_id)s", "assist_records.deleted_at ISNULL"] conditions = ["assist_records.record_id=%(record_id)s", "assist_records.deleted_at ISNULL"]
params = {"tenant_id": context.tenant_id, "project_id": project_id, "record_id": record_id, "name": data.name} params = {"tenant_id": context.tenant_id, "project_id": project_id, "record_id": record_id, "name": data.name}
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
@ -136,7 +136,7 @@ def update_record(project_id, record_id, data: schemas_ee.AssistRecordUpdatePayl
return result return result
def delete_record(project_id, record_id, context: schemas_ee.CurrentContext): def delete_record(project_id, record_id, context: schemas.CurrentContext):
conditions = ["assist_records.record_id=%(record_id)s"] conditions = ["assist_records.record_id=%(record_id)s"]
params = {"tenant_id": context.tenant_id, "project_id": project_id, "record_id": record_id} params = {"tenant_id": context.tenant_id, "project_id": project_id, "record_id": record_id}
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:

View file

@ -7,13 +7,12 @@ from chalicelib.utils import helper
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
def jwt_authorizer(token): def jwt_authorizer(scheme: str, token: str):
token = token.split(" ") if scheme.lower() != "bearer":
if len(token) != 2 or token[0].lower() != "bearer":
return None return None
try: try:
payload = jwt.decode( payload = jwt.decode(
token[1], token,
config("jwt_secret"), config("jwt_secret"),
algorithms=config("jwt_algorithm"), algorithms=config("jwt_algorithm"),
audience=[f"front:{helper.get_stage_name()}"] audience=[f"front:{helper.get_stage_name()}"]
@ -23,6 +22,7 @@ def jwt_authorizer(token):
return None return None
except BaseException as e: except BaseException as e:
print("! JWT Base Exception") print("! JWT Base Exception")
print(e)
return None return None
return payload return payload

View file

@ -5,8 +5,8 @@ from decouple import config
from fastapi import HTTPException, status from fastapi import HTTPException, status
import schemas import schemas
import schemas_ee from chalicelib.core import funnels, issues, metrics, click_maps, sessions_insights, sessions_mobs, sessions_favorite, \
from chalicelib.core import funnels, issues, metrics, click_maps, sessions_insights, sessions_mobs, sessions_favorite product_analytics
from chalicelib.utils import helper, pg_client from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.storage import StorageClient, extra from chalicelib.utils.storage import StorageClient, extra
@ -25,25 +25,24 @@ else:
PIE_CHART_GROUP = 5 PIE_CHART_GROUP = 5
def __try_live(project_id, data: schemas_ee.CardSchema): # TODO: refactor this to split
# timeseries /
# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs
def __try_live(project_id, data: schemas.CardSchema):
results = [] results = []
for i, s in enumerate(data.series): for i, s in enumerate(data.series):
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type, view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value)) metric_of=data.metric_of, metric_value=data.metric_value))
if data.view_type == schemas.MetricTimeseriesViewType.progress: if data.view_type == schemas.MetricTimeseriesViewType.progress:
r = {"count": results[-1]} r = {"count": results[-1]}
diff = s.filter.endDate - s.filter.startDate diff = s.filter.endTimestamp - s.filter.startTimestamp
s.filter.endDate = s.filter.startDate s.filter.endTimestamp = s.filter.startTimestamp
s.filter.startDate = s.filter.endDate - diff s.filter.startTimestamp = s.filter.endTimestamp - diff
r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type, view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value) metric_of=data.metric_of, metric_value=data.metric_value)
r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"]) r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"])
# r["countProgress"] = ((r["count"] - r["previousCount"]) / r["previousCount"]) * 100 \
# if r["previousCount"] > 0 else 0
r["seriesName"] = s.name if s.name else i + 1 r["seriesName"] = s.name if s.name else i + 1
r["seriesId"] = s.series_id if s.series_id else None r["seriesId"] = s.series_id if s.series_id else None
results[-1] = r results[-1] = r
@ -58,108 +57,97 @@ def __try_live(project_id, data: schemas_ee.CardSchema):
return results return results
def __is_funnel_chart(data: schemas_ee.CardSchema): def __is_funnel_chart(data: schemas.CardSchema):
return data.metric_type == schemas.MetricType.funnel return data.metric_type == schemas.MetricType.funnel
def __get_funnel_chart(project_id, data: schemas_ee.CardSchema): def __get_funnel_chart(project_id: int, data: schemas.CardFunnel, user_id: int = None):
if len(data.series) == 0: if len(data.series) == 0:
return { return {
"stages": [], "stages": [],
"totalDropDueToIssues": 0 "totalDropDueToIssues": 0
} }
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
def __is_errors_list(data: schemas_ee.CardSchema): def __is_errors_list(data: schemas.CardSchema):
return data.metric_type == schemas.MetricType.table \ return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.MetricOfTable.errors and data.metric_of == schemas.MetricOfTable.errors
def __get_errors_list(project_id, user_id, data: schemas_ee.CardSchema): def __get_errors_list(project_id, user_id, data: schemas.CardSchema):
if len(data.series) == 0: if len(data.series) == 0:
return { return {
"total": 0, "total": 0,
"errors": [] "errors": []
} }
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
data.series[0].filter.page = data.page
data.series[0].filter.limit = data.limit
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
def __is_sessions_list(data: schemas_ee.CardSchema): def __is_sessions_list(data: schemas.CardSchema):
return data.metric_type == schemas.MetricType.table \ return data.metric_type == schemas.MetricType.table \
and data.metric_of == schemas.MetricOfTable.sessions and data.metric_of == schemas.MetricOfTable.sessions
def __get_sessions_list(project_id, user_id, data: schemas_ee.CardSchema): def __get_sessions_list(project_id, user_id, data: schemas.CardSchema):
if len(data.series) == 0: if len(data.series) == 0:
print("empty series") print("empty series")
return { return {
"total": 0, "total": 0,
"sessions": [] "sessions": []
} }
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
data.series[0].filter.page = data.page
data.series[0].filter.limit = data.limit
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id) return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
def __is_predefined(data: schemas_ee.CardSchema): def __is_predefined(data: schemas.CardSchema):
return data.is_template return data.is_template
def __is_click_map(data: schemas_ee.CardSchema): def __is_click_map(data: schemas.CardSchema):
return data.metric_type == schemas.MetricType.click_map return data.metric_type == schemas.MetricType.click_map
def __get_click_map_chart(project_id, user_id, data: schemas_ee.CardSchema, include_mobs: bool = True): def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, include_mobs: bool = True):
if len(data.series) == 0: if len(data.series) == 0:
return None return None
data.series[0].filter.startDate = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp
return click_maps.search_short_session(project_id=project_id, user_id=user_id, return click_maps.search_short_session(project_id=project_id, user_id=user_id,
data=schemas.FlatClickMapSessionsSearch(**data.series[0].filter.dict()), data=schemas.ClickMapSessionsSearch(
**data.series[0].filter.model_dump()),
include_mobs=include_mobs) include_mobs=include_mobs)
# EE only # EE only
def __is_insights(data: schemas_ee.CardSchema): def __is_insights(data: schemas.CardSchema):
return data.metric_type == schemas.MetricType.insights return data.metric_type == schemas.MetricType.insights
# EE only # EE only
def __get_insights_chart(project_id, user_id, data: schemas_ee.CardSchema): def __get_insights_chart(project_id: int, data: schemas.CardInsights, user_id: int = None):
return sessions_insights.fetch_selected(project_id=project_id, return sessions_insights.fetch_selected(project_id=project_id,
data=schemas_ee.GetInsightsSchema(startTimestamp=data.startTimestamp, data=schemas.GetInsightsSchema(startTimestamp=data.startTimestamp,
endTimestamp=data.endTimestamp, endTimestamp=data.endTimestamp,
metricValue=data.metric_value, metricValue=data.metric_value,
series=data.series)) series=data.series))
def merged_live(project_id, data: schemas_ee.CardSchema, user_id=None): def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if data.is_template: if len(data.series) == 0:
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.dict()) data.series.append(
elif __is_funnel_chart(data): schemas.CardPathAnalysisSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp))
return __get_funnel_chart(project_id=project_id, data=data) elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema):
elif __is_errors_list(data): data.series[0].filter = schemas.PathAnalysisSchema()
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
elif __is_sessions_list(data): return product_analytics.path_analysis(project_id=project_id, data=data.series[0].filter, density=data.density,
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data) selected_event_type=data.metric_value, hide_minor_paths=data.hide_excess)
elif __is_click_map(data):
return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data)
# EE only def __is_path_analysis(data: schemas.CardSchema):
elif __is_insights(data): return data.metric_type == schemas.MetricType.pathAnalysis
return __get_insights_chart(project_id=project_id, user_id=user_id, data=data)
elif len(data.series) == 0:
return [] def __get_timeseries_chart(project_id: int, data: schemas.CardTimeSeries, user_id: int = None):
series_charts = __try_live(project_id=project_id, data=data) series_charts = __try_live(project_id=project_id, data=data)
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: if data.view_type == schemas.MetricTimeseriesViewType.progress:
return series_charts return series_charts
results = [{}] * len(series_charts[0]) results = [{}] * len(series_charts[0])
for i in range(len(results)): for i in range(len(results)):
@ -169,29 +157,137 @@ def merged_live(project_id, data: schemas_ee.CardSchema, user_id=None):
return results return results
def __merge_metric_with_data(metric: schemas_ee.CardSchema, def empty(**args):
data: schemas.CardChartSchema) -> schemas_ee.CardSchema: raise Exception("not supported")
def __get_table_of_user_ids(project_id: int, data: schemas.CardTable, user_id: int = None):
series_charts = __try_live(project_id=project_id, data=data)
return series_charts
def __get_table_of_sessions(project_id: int, data: schemas.CardTable, user_id):
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
def __get_table_of_errors(project_id: int, data: schemas.CardTable, user_id: int):
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
def __get_table_of_issues(project_id: int, data: schemas.CardTable, user_id: int = None):
return __try_live(project_id=project_id, data=data)
def __get_table_of_browsers(project_id: int, data: schemas.CardTable, user_id: int = None):
return __try_live(project_id=project_id, data=data)
def __get_table_of_devises(project_id: int, data: schemas.CardTable, user_id: int = None):
return __try_live(project_id=project_id, data=data)
def __get_table_of_countries(project_id: int, data: schemas.CardTable, user_id: int = None):
return __try_live(project_id=project_id, data=data)
def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = None):
return __try_live(project_id=project_id, data=data)
def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int):
supported = {
schemas.MetricOfTable.sessions: __get_table_of_sessions,
schemas.MetricOfTable.errors: __get_table_of_errors,
schemas.MetricOfTable.user_id: __get_table_of_user_ids,
schemas.MetricOfTable.issues: __get_table_of_issues,
schemas.MetricOfTable.user_browser: __get_table_of_browsers,
schemas.MetricOfTable.user_device: __get_table_of_devises,
schemas.MetricOfTable.user_country: __get_table_of_countries,
schemas.MetricOfTable.visited_url: __get_table_of_urls,
}
return supported.get(data.metric_of, empty)(project_id=project_id, data=data, user_id=user_id)
def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
if data.is_template:
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump())
supported = {
schemas.MetricType.timeseries: __get_timeseries_chart,
schemas.MetricType.table: __get_table_chart,
schemas.MetricType.click_map: __get_click_map_chart,
schemas.MetricType.funnel: __get_funnel_chart,
schemas.MetricType.insights: __get_insights_chart,
schemas.MetricType.pathAnalysis: __get_path_analysis_chart
}
return supported.get(data.metric_type, empty)(project_id=project_id, data=data, user_id=user_id)
def merged_live(project_id, data: schemas.CardSchema, user_id=None):
return get_chart(project_id=project_id, data=data, user_id=user_id)
print("---1")
if data.is_template:
print("---2")
return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump())
elif __is_funnel_chart(data):
print("---3")
return __get_funnel_chart(project_id=project_id, data=data)
elif __is_errors_list(data):
print("---4")
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
elif __is_sessions_list(data):
print("---5")
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
elif __is_click_map(data):
print("---6")
return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data)
# EE only
elif __is_insights(data):
return __get_insights_chart(project_id=project_id, user_id=user_id, data=data)
elif __is_path_analysis(data):
print("---7")
return __get_path_analysis_chart(project_id=project_id, data=data)
elif len(data.series) == 0:
print("---8")
return []
series_charts = __try_live(project_id=project_id, data=data)
print("---9")
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
print("---10")
return series_charts
results = [{}] * len(series_charts[0])
print("---11")
for i in range(len(results)):
for j, series_chart in enumerate(series_charts):
results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]}
return results
def __merge_metric_with_data(metric: schemas.CardSchema,
data: schemas.CardSessionsSchema) -> schemas.CardSchema:
if data.series is not None and len(data.series) > 0: if data.series is not None and len(data.series) > 0:
metric.series = data.series metric.series = data.series
metric: schemas_ee.CardSchema = schemas_ee.CardSchema( # TODO: try to refactor this
**{**data.dict(by_alias=True), **metric.dict(by_alias=True)}) metric: schemas.CardSchema = schemas.CardSchema(**{**data.model_dump(by_alias=True),
**metric.model_dump(by_alias=True)})
if len(data.filters) > 0 or len(data.events) > 0: if len(data.filters) > 0 or len(data.events) > 0:
for s in metric.series: for s in metric.series:
if len(data.filters) > 0: if len(data.filters) > 0:
s.filter.filters += data.filters s.filter.filters += data.filters
if len(data.events) > 0: if len(data.events) > 0:
s.filter.events += data.events s.filter.events += data.events
metric.limit = data.limit # metric.limit = data.limit
metric.page = data.page # metric.page = data.page
metric.startTimestamp = data.startTimestamp # metric.startTimestamp = data.startTimestamp
metric.endTimestamp = data.endTimestamp # metric.endTimestamp = data.endTimestamp
return metric return metric
def make_chart(project_id, user_id, data: schemas.CardChartSchema, metric: schemas_ee.CardSchema): def make_chart(project_id, user_id, data: schemas.CardSessionsSchema, metric: schemas.CardSchema):
if metric is None: if metric is None:
return None return None
metric: schemas_ee.CardSchema = __merge_metric_with_data(metric=metric, data=data) metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
return merged_live(project_id=project_id, data=metric, user_id=user_id) return merged_live(project_id=project_id, data=metric, user_id=user_id)
@ -201,8 +297,8 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchem
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None: if raw_metric is None:
return None return None
metric: schemas_ee.CardSchema = schemas_ee.CardSchema(**raw_metric) metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
metric: schemas_ee.CardSchema = __merge_metric_with_data(metric=metric, data=data) metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None: if metric is None:
return None return None
results = [] results = []
@ -210,10 +306,10 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchem
# if __is_click_map(metric) and raw_metric.get("data") is not None: # if __is_click_map(metric) and raw_metric.get("data") is not None:
# is_click_map = True # is_click_map = True
for s in metric.series: for s in metric.series:
s.filter.startDate = data.startTimestamp # s.filter.startTimestamp = data.startTimestamp
s.filter.endDate = data.endTimestamp # s.filter.endTimestamp = data.endTimestamp
s.filter.limit = data.limit # s.filter.limit = data.limit
s.filter.page = data.page # s.filter.page = data.page
# if is_click_map: # if is_click_map:
# results.append( # results.append(
# {"seriesId": s.series_id, "seriesName": s.name, "total": 1, "sessions": [raw_metric["data"]]}) # {"seriesId": s.series_id, "seriesName": s.name, "total": 1, "sessions": [raw_metric["data"]]})
@ -228,15 +324,11 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessions
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None: if raw_metric is None:
return None return None
metric: schemas_ee.CardSchema = schemas_ee.CardSchema(**raw_metric) metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
metric: schemas_ee.CardSchema = __merge_metric_with_data(metric=metric, data=data) metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None: if metric is None:
return None return None
for s in metric.series: for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
return {"seriesId": s.series_id, "seriesName": s.name, return {"seriesId": s.series_id, "seriesName": s.name,
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)} **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
@ -245,28 +337,20 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSc
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if raw_metric is None: if raw_metric is None:
return None return None
metric: schemas_ee.CardSchema = schemas_ee.CardSchema(**raw_metric) metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
metric: schemas_ee.CardSchema = __merge_metric_with_data(metric=metric, data=data) metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None: if metric is None:
return None return None
for s in metric.series: for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
return {"seriesId": s.series_id, "seriesName": s.name, return {"seriesId": s.series_id, "seriesName": s.name,
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)} **errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema): def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
results = [] results = []
if data.series is None: if len(data.series) == 0:
return results return results
for s in data.series: for s in data.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
if len(data.filters) > 0: if len(data.filters) > 0:
s.filter.filters += data.filters s.filter.filters += data.filters
if len(data.events) > 0: if len(data.events) > 0:
@ -277,7 +361,7 @@ def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
return results return results
def create(project_id, user_id, data: schemas_ee.CardSchema, dashboard=False): def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
session_data = None session_data = None
if __is_click_map(data): if __is_click_map(data):
@ -299,13 +383,13 @@ def create(project_id, user_id, data: schemas_ee.CardSchema, dashboard=False):
session_data = json.dumps(session_data) session_data = json.dumps(session_data)
_data = {"session_data": session_data} _data = {"session_data": session_data}
for i, s in enumerate(data.series): for i, s in enumerate(data.series):
for k in s.dict().keys(): for k in s.model_dump().keys():
_data[f"{k}_{i}"] = s.__getattribute__(k) _data[f"{k}_{i}"] = s.__getattribute__(k)
_data[f"index_{i}"] = i _data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json() _data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series) series_len = len(data.series)
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data} params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data}
params["default_config"] = json.dumps(data.default_config.dict()) params["default_config"] = json.dumps(data.default_config.model_dump())
query = """INSERT INTO metrics (project_id, user_id, name, is_public, query = """INSERT INTO metrics (project_id, user_id, name, is_public,
view_type, metric_type, metric_of, metric_value, view_type, metric_type, metric_of, metric_value,
metric_format, default_config, thumbnail, data) metric_format, default_config, thumbnail, data)
@ -331,7 +415,7 @@ def create(project_id, user_id, data: schemas_ee.CardSchema, dashboard=False):
return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)} return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
def update(metric_id, user_id, project_id, data: schemas_ee.UpdateCardSchema): def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None: if metric is None:
return None return None
@ -344,7 +428,7 @@ def update(metric_id, user_id, project_id, data: schemas_ee.UpdateCardSchema):
"user_id": user_id, "project_id": project_id, "view_type": data.view_type, "user_id": user_id, "project_id": project_id, "view_type": data.view_type,
"metric_type": data.metric_type, "metric_of": data.metric_of, "metric_type": data.metric_type, "metric_of": data.metric_of,
"metric_value": data.metric_value, "metric_format": data.metric_format, "metric_value": data.metric_value, "metric_format": data.metric_format,
"config": json.dumps(data.default_config.dict()), "thumbnail": data.thumbnail} "config": json.dumps(data.default_config.model_dump()), "thumbnail": data.thumbnail}
for i, s in enumerate(data.series): for i, s in enumerate(data.series):
prefix = "u_" prefix = "u_"
if s.index is None: if s.index is None:
@ -355,7 +439,7 @@ def update(metric_id, user_id, project_id, data: schemas_ee.UpdateCardSchema):
else: else:
u_series.append({"i": i, "s": s}) u_series.append({"i": i, "s": s})
u_series_ids.append(s.series_id) u_series_ids.append(s.series_id)
ns = s.dict() ns = s.model_dump()
for k in ns.keys(): for k in ns.keys():
if k == "filter": if k == "filter":
ns[k] = json.dumps(ns[k]) ns[k] = json.dumps(ns[k])
@ -477,7 +561,7 @@ def get_all(project_id, user_id):
return result return result
def delete(project_id, metric_id, user_id): def delete_card(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute( cur.execute(
cur.mogrify("""\ cur.mogrify("""\
@ -485,8 +569,7 @@ def delete(project_id, metric_id, user_id):
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now()) SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
AND metric_id = %(metric_id)s AND metric_id = %(metric_id)s
AND (user_id = %(user_id)s OR is_public) AND (user_id = %(user_id)s OR is_public);""",
RETURNING data;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}) {"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
) )
# for EE only # for EE only
@ -596,13 +679,13 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None: if metric is None:
return None return None
metric: schemas_ee.CardSchema = schemas.CardSchema(**metric) metric: schemas.CardSchema = schemas.CardSchema(**metric)
metric: schemas_ee.CardSchema = __merge_metric_with_data(metric=metric, data=data) metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
if metric is None: if metric is None:
return None return None
for s in metric.series: for s in metric.series:
s.filter.startDate = data.startTimestamp s.filter.startTimestamp = data.startTimestamp
s.filter.endDate = data.endTimestamp s.filter.endTimestamp = data.endTimestamp
s.filter.limit = data.limit s.filter.limit = data.limit
s.filter.page = data.page s.filter.page = data.page
issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {}) issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
@ -628,13 +711,15 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
"issue": issue} "issue": issue}
def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema): def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True) raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True)
if raw_metric is None: if raw_metric is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found")
metric: schemas_ee.CardSchema = schemas_ee.CardSchema(**raw_metric) raw_metric["startTimestamp"] = data.startTimestamp
raw_metric["endTimestamp"] = data.endTimestamp
metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
if metric.is_template: if metric.is_template:
return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.dict()) return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.model_dump())
elif __is_click_map(metric): elif __is_click_map(metric):
if raw_metric["data"]: if raw_metric["data"]:
keys = sessions_mobs. \ keys = sessions_mobs. \
@ -654,53 +739,52 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChart
return make_chart(project_id=project_id, user_id=user_id, data=data, metric=metric) return make_chart(project_id=project_id, user_id=user_id, data=data, metric=metric)
PREDEFINED = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \ def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict): schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data) supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions,
schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration,
schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time,
schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit,
schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests,
schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render,
schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption,
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps,
schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx,
schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx,
schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains,
schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors,
schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type,
schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party,
schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location,
schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains,
schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser,
schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render,
schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption,
schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu,
schemas.MetricOfPerformance.fps: metrics.get_avg_fps,
schemas.MetricOfPerformance.crashes: metrics.get_crashes,
schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time,
schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time,
schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend,
schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources,
schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time,
schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, }
return supported.get(key, lambda *args: None)(project_id=project_id, **data)

View file

@ -461,10 +461,10 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
pg_sub_query_chart.append("errors.error_id =details.error_id") pg_sub_query_chart.append("errors.error_id =details.error_id")
statuses = [] statuses = []
error_ids = None error_ids = None
if data.startDate is None: if data.startTimestamp is None:
data.startDate = TimeUTC.now(-30) data.startTimestamp = TimeUTC.now(-30)
if data.endDate is None: if data.endTimestamp is None:
data.endDate = TimeUTC.now(1) data.endTimestamp = TimeUTC.now(1)
if len(data.events) > 0 or len(data.filters) > 0: if len(data.events) > 0 or len(data.filters) > 0:
print("-- searching for sessions before errors") print("-- searching for sessions before errors")
statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True, statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
@ -473,18 +473,18 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id):
return empty_response return empty_response
error_ids = [e["errorId"] for e in statuses] error_ids = [e["errorId"] for e in statuses]
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1) step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1)
sort = __get_sort_key('datetime') sort = __get_sort_key('datetime')
if data.sort is not None: if data.sort is not None:
sort = __get_sort_key(data.sort) sort = __get_sort_key(data.sort)
order = schemas.SortOrderType.desc.value order = schemas.SortOrderType.desc
if data.order is not None: if data.order is not None:
order = data.order.value order = data.order
extra_join = "" extra_join = ""
params = { params = {
"startDate": data.startDate, "startDate": data.startTimestamp,
"endDate": data.endDate, "endDate": data.endTimestamp,
"project_id": project_id, "project_id": project_id,
"userId": user_id, "userId": user_id,
"step_size": step_size} "step_size": step_size}
@ -716,41 +716,3 @@ def change_state(project_id, user_id, error_id, action):
for e in errors: for e in errors:
e["status"] = row["status"] e["status"] = row["status"]
return {"data": errors} return {"data": errors}
MAX_RANK = 2
def __status_rank(status):
return {
'unresolved': MAX_RANK - 2,
'ignored': MAX_RANK - 1,
'resolved': MAX_RANK
}.get(status)
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s)
SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed
FROM (SELECT root_error.error_id
FROM events.errors
INNER JOIN public.errors AS root_error USING (error_id)
LEFT JOIN user_viewed USING (error_id)
WHERE project_id = %(project_id)s
AND timestamp >= %(startTimestamp)s
AND timestamp <= %(endTimestamp)s
AND source = 'js_exception'
AND root_error.status = 'unresolved'
AND user_viewed.error_id ISNULL
LIMIT 1
) AS timed_errors;""",
{"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp})
cur.execute(query=query)
row = cur.fetchone()
return {
"data": helper.dict_to_camel_case(row)
}

View file

@ -44,7 +44,7 @@ def get_all_count(tenant_id, user_id):
def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None): def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None):
if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None: if len(notification_ids) == 0 and endTimestamp is None:
return False return False
if startTimestamp is None: if startTimestamp is None:
startTimestamp = 0 startTimestamp = 0

View file

@ -1,9 +1,9 @@
from fastapi.security import SecurityScopes from fastapi.security import SecurityScopes
import schemas_ee import schemas
def check(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext): def check(security_scopes: SecurityScopes, context: schemas.CurrentContext):
for scope in security_scopes.scopes: for scope in security_scopes.scopes:
if scope not in context.permissions: if scope not in context.permissions:
return False return False

File diff suppressed because it is too large Load diff

View file

@ -42,12 +42,12 @@ def __update(tenant_id, project_id, changes):
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(cur.fetchone())
def __create(tenant_id, name): def __create(tenant_id, data):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""INSERT INTO public.projects (tenant_id, name, active) query = cur.mogrify(f"""INSERT INTO public.projects (tenant_id, name, platform, active)
VALUES (%(tenant_id)s,%(name)s,TRUE) VALUES (%(tenant_id)s,%(name)s,%(platform)s,TRUE)
RETURNING project_id;""", RETURNING project_id;""",
{"tenant_id": tenant_id, "name": name}) {"tenant_id": tenant_id, **data})
cur.execute(query=query) cur.execute(query=query)
project_id = cur.fetchone()["project_id"] project_id = cur.fetchone()["project_id"]
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True) return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
@ -79,14 +79,15 @@ def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False, use
query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""} query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""}
SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at, SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at,
created_at, sessions_last_check_at, sample_rate {extra_projection} s.created_at, s.sessions_last_check_at, s.sample_rate, s.platform
{extra_projection}
FROM public.projects AS s FROM public.projects AS s
{role_query if user_id is not None else ""} {role_query if user_id is not None else ""}
WHERE s.tenant_id =%(tenant_id)s WHERE s.tenant_id =%(tenant_id)s
AND s.deleted_at IS NULL AND s.deleted_at IS NULL
ORDER BY s.name {") AS raw" if recorded else ""};""", ORDER BY s.name {") AS raw" if recorded else ""};""",
{"tenant_id": tenant_id, "user_id": user_id, "now": TimeUTC.now(), {"now": TimeUTC.now(), "check_delta": TimeUTC.MS_HOUR * 4,
"check_delta": TimeUTC.MS_HOUR * 4}) "tenant_id": tenant_id, "user_id": user_id})
cur.execute(query) cur.execute(query)
rows = cur.fetchall() rows = cur.fetchall()
# if recorded is requested, check if it was saved or computed # if recorded is requested, check if it was saved or computed
@ -145,6 +146,29 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
FROM public.sessions AS ss
WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at"""
if include_gdpr:
extra_select += ",s.gdpr"
query = cur.mogrify(f"""SELECT s.project_key,
s.name
{extra_select}
FROM public.projects AS s
WHERE s.project_key =%(project_key)s
AND s.tenant_id =%(tenant_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_key": project_key, "tenant_id": tenant_id})
cur.execute(query=query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False): def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if __exists_by_name(name=data.name, exclude_id=None, tenant_id=tenant_id): if __exists_by_name(name=data.name, exclude_id=None, tenant_id=tenant_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
@ -154,7 +178,7 @@ def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authoriza
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
if admin["roleId"] is not None and not admin["allProjects"]: if admin["roleId"] is not None and not admin["allProjects"]:
return {"errors": ["unauthorized: you need allProjects permission to create a new project"]} return {"errors": ["unauthorized: you need allProjects permission to create a new project"]}
return {"data": __create(tenant_id=tenant_id, name=data.name)} return {"data": __create(tenant_id=tenant_id, data=data.model_dump())}
def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema): def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
@ -164,7 +188,7 @@ def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
return {"data": __update(tenant_id=tenant_id, project_id=project_id, return {"data": __update(tenant_id=tenant_id, project_id=project_id,
changes={"name": data.name})} changes=data.model_dump())}
def delete(tenant_id, user_id, project_id): def delete(tenant_id, user_id, project_id):
@ -195,14 +219,14 @@ def get_gdpr(project_id):
return row return row
def edit_gdpr(project_id, gdpr): def edit_gdpr(project_id, gdpr: schemas.GdprSchema):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify("""UPDATE public.projects query = cur.mogrify("""UPDATE public.projects
SET gdpr = gdpr|| %(gdpr)s SET gdpr = gdpr|| %(gdpr)s
WHERE project_id = %(project_id)s WHERE project_id = %(project_id)s
AND deleted_at ISNULL AND deleted_at ISNULL
RETURNING gdpr;""", RETURNING gdpr;""",
{"project_id": project_id, "gdpr": json.dumps(gdpr)}) {"project_id": project_id, "gdpr": json.dumps(gdpr.model_dump_json())})
cur.execute(query=query) cur.execute(query=query)
row = cur.fetchone() row = cur.fetchone()
if not row: if not row:
@ -216,7 +240,7 @@ def get_internal_project_id(project_key):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify("""SELECT project_id query = cur.mogrify("""SELECT project_id
FROM public.projects FROM public.projects
WHERE project_key =%(project_key)s WHERE project_key =%(project_key)s
AND deleted_at ISNULL;""", AND deleted_at ISNULL;""",
{"project_key": project_key}) {"project_key": project_key})
cur.execute(query=query) cur.execute(query=query)
@ -247,20 +271,14 @@ def get_capture_status(project_id):
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(cur.fetchone())
def update_capture_status(project_id, changes): def update_capture_status(project_id, changes: schemas.SampleRateSchema):
if "rate" not in changes and "captureAll" not in changes: sample_rate = changes.rate
return {"errors": ["please provide 'rate' and/or 'captureAll' attributes to update."]} if changes.capture_all:
if int(changes["rate"]) < 0 or int(changes["rate"]) > 100:
return {"errors": ["'rate' must be between 0..100."]}
sample_rate = 0
if "rate" in changes:
sample_rate = int(changes["rate"])
if changes.get("captureAll"):
sample_rate = 100 sample_rate = 100
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify("""UPDATE public.projects query = cur.mogrify("""UPDATE public.projects
SET sample_rate= %(sample_rate)s SET sample_rate= %(sample_rate)s
WHERE project_id =%(project_id)s WHERE project_id =%(project_id)s
AND deleted_at ISNULL;""", AND deleted_at ISNULL;""",
{"project_id": project_id, "sample_rate": sample_rate}) {"project_id": project_id, "sample_rate": sample_rate})
cur.execute(query=query) cur.execute(query=query)
@ -280,30 +298,6 @@ def get_projects_ids(tenant_id):
return [r["project_id"] for r in rows] return [r["project_id"] for r in rows]
def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
FROM public.sessions AS ss
WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at"""
if include_gdpr:
extra_select += ",s.gdpr"
query = cur.mogrify(f"""SELECT s.project_key,
s.name
{extra_select}
FROM public.projects AS s
WHERE s.project_key =%(project_key)s
AND s.tenant_id =%(tenant_id)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_key": project_key, "tenant_id": tenant_id})
cur.execute(query=query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def is_authorized(project_id, tenant_id, user_id=None): def is_authorized(project_id, tenant_id, user_id=None):
if project_id is None or not str(project_id).isdigit(): if project_id is None or not str(project_id).isdigit():
return False return False

View file

@ -1,8 +1,7 @@
from typing import Optional from typing import Optional
from fastapi import HTTPException, status from fastapi import HTTPException, status
import schemas
import schemas_ee
from chalicelib.core import users, projects from chalicelib.core import users, projects
from chalicelib.utils import pg_client, helper from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
@ -22,7 +21,7 @@ def __exists_by_name(tenant_id: int, name: str, exclude_id: Optional[int]) -> bo
return row["exists"] return row["exists"]
def update(tenant_id, user_id, role_id, data: schemas_ee.RolePayloadSchema): def update(tenant_id, user_id, role_id, data: schemas.RolePayloadSchema):
admin = users.get(user_id=user_id, tenant_id=tenant_id) admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
@ -57,7 +56,7 @@ def update(tenant_id, user_id, role_id, data: schemas_ee.RolePayloadSchema):
RETURNING *, COALESCE((SELECT ARRAY_AGG(project_id) RETURNING *, COALESCE((SELECT ARRAY_AGG(project_id)
FROM roles_projects FROM roles_projects
WHERE roles_projects.role_id=%(role_id)s),'{}') AS projects;""", WHERE roles_projects.role_id=%(role_id)s),'{}') AS projects;""",
{"tenant_id": tenant_id, "role_id": role_id, **data.dict()}) {"tenant_id": tenant_id, "role_id": role_id, **data.model_dump()})
cur.execute(query=query) cur.execute(query=query)
row = cur.fetchone() row = cur.fetchone()
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
@ -80,7 +79,7 @@ def update(tenant_id, user_id, role_id, data: schemas_ee.RolePayloadSchema):
return helper.dict_to_camel_case(row) return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, data: schemas_ee.RolePayloadSchema): def create(tenant_id, user_id, data: schemas.RolePayloadSchema):
admin = users.get(user_id=user_id, tenant_id=tenant_id) admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:

View file

@ -1,11 +1,11 @@
from decouple import config from decouple import config
from fastapi.security import SecurityScopes from fastapi.security import SecurityScopes
import schemas_ee import schemas
from chalicelib.core import permissions from chalicelib.core import permissions
from chalicelib.utils.storage import StorageClient from chalicelib.utils.storage import StorageClient
SCOPES = SecurityScopes([schemas_ee.Permissions.dev_tools]) SCOPES = SecurityScopes([schemas.Permissions.dev_tools])
def __get_devtools_keys(project_id, session_id): def __get_devtools_keys(project_id, session_id):
@ -18,7 +18,7 @@ def __get_devtools_keys(project_id, session_id):
] ]
def get_urls(session_id, project_id, context: schemas_ee.CurrentContext, check_existence: bool = True): def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True):
if not permissions.check(security_scopes=SCOPES, context=context): if not permissions.check(security_scopes=SCOPES, context=context):
return [] return []
results = [] results = []

View file

@ -2,7 +2,7 @@ import ast
from typing import List, Union from typing import List, Union
import schemas import schemas
import schemas_ee import schemas
from chalicelib.core import events, metadata, projects, performance_event, metrics from chalicelib.core import events, metadata, projects, performance_event, metrics
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
@ -246,7 +246,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
else: else:
for i in range(len(sessions)): for i in range(len(sessions)):
sessions[i]["metadata"] = ast.literal_eval(sessions[i]["metadata"]) sessions[i]["metadata"] = ast.literal_eval(sessions[i]["metadata"])
sessions[i] = schemas_ee.SessionModel.parse_obj(helper.dict_to_camel_case(sessions[i])) sessions[i] = schemas.SessionModel.parse_obj(helper.dict_to_camel_case(sessions[i]))
# if not data.group_by_user and data.sort is not None and data.sort != "session_id": # if not data.group_by_user and data.sort is not None and data.sort != "session_id":
# sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)], # sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)],
@ -260,12 +260,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int, def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType, view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType,
metric_of: schemas.MetricOfTable, metric_value: List): metric_of: schemas.MetricOfTable, metric_value: List):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate, step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp,
density=density)) density=density))
extra_event = None extra_event = None
if metric_of == schemas.MetricOfTable.visited_url: if metric_of == schemas.MetricOfTable.visited_url:
extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path
FROM {exp_ch_helper.get_main_events_table(data.startDate)} AS ev FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev
WHERE ev.datetime >= toDateTime(%(startDate)s / 1000) WHERE ev.datetime >= toDateTime(%(startDate)s / 1000)
AND ev.datetime <= toDateTime(%(endDate)s / 1000) AND ev.datetime <= toDateTime(%(endDate)s / 1000)
AND ev.project_id = %(project_id)s AND ev.project_id = %(project_id)s
@ -300,7 +300,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
# print("--------------------") # print("--------------------")
sessions = cur.execute(main_query) sessions = cur.execute(main_query)
if view_type == schemas.MetricTimeseriesViewType.line_chart: if view_type == schemas.MetricTimeseriesViewType.line_chart:
sessions = metrics.__complete_missing_steps(start_time=data.startDate, end_time=data.endDate, sessions = metrics.__complete_missing_steps(start_time=data.startTimestamp, end_time=data.endTimestamp,
density=density, neutral={"count": 0}, rows=sessions) density=density, neutral={"count": 0}, rows=sessions)
else: else:
sessions = sessions[0]["count"] if len(sessions) > 0 else 0 sessions = sessions[0]["count"] if len(sessions) > 0 else 0
@ -362,7 +362,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
return sessions return sessions
def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details,
schemas.EventType.graphql] \ schemas.EventType.graphql] \
or event.type in [schemas.PerformanceEventType.location_dom_complete, or event.type in [schemas.PerformanceEventType.location_dom_complete,
@ -402,11 +402,11 @@ def __get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEve
def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue, def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
project_id, user_id, extra_event=None): project_id, user_id, extra_event=None):
ss_constraints = [] ss_constraints = []
full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate, full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
"projectId": project_id, "userId": user_id} "projectId": project_id, "userId": user_id}
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(data.startDate) MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(data.startTimestamp)
MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(data.startDate) MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(data.startTimestamp)
full_args["MAIN_EVENTS_TABLE"] = MAIN_EVENTS_TABLE full_args["MAIN_EVENTS_TABLE"] = MAIN_EVENTS_TABLE
full_args["MAIN_SESSIONS_TABLE"] = MAIN_SESSIONS_TABLE full_args["MAIN_SESSIONS_TABLE"] = MAIN_SESSIONS_TABLE
@ -1224,9 +1224,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
else: else:
data.events = [] data.events = []
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
if data.startDate is not None: if data.startTimestamp is not None:
extra_constraints.append("s.datetime >= toDateTime(%(startDate)s/1000)") extra_constraints.append("s.datetime >= toDateTime(%(startDate)s/1000)")
if data.endDate is not None: if data.endTimestamp is not None:
extra_constraints.append("s.datetime <= toDateTime(%(endDate)s/1000)") extra_constraints.append("s.datetime <= toDateTime(%(endDate)s/1000)")
# if data.platform is not None: # if data.platform is not None:
# if data.platform == schemas.PlatformType.mobile: # if data.platform == schemas.PlatformType.mobile:

View file

@ -1,12 +1,12 @@
from decouple import config from decouple import config
import schemas_ee import schemas
from chalicelib.core import sessions, sessions_favorite_exp, sessions_mobs, sessions_devtool from chalicelib.core import sessions, sessions_favorite_exp, sessions_mobs, sessions_devtool
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
from chalicelib.utils.storage import extra from chalicelib.utils.storage import extra
def add_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): def add_favorite_session(context: schemas.CurrentContext, project_id, session_id):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute( cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
@ -22,7 +22,7 @@ def add_favorite_session(context: schemas_ee.CurrentContext, project_id, session
return {"errors": ["something went wrong"]} return {"errors": ["something went wrong"]}
def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute( cur.execute(
cur.mogrify(f"""\ cur.mogrify(f"""\
@ -39,7 +39,7 @@ def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, sess
return {"errors": ["something went wrong"]} return {"errors": ["something went wrong"]}
def favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): def favorite_session(context: schemas.CurrentContext, project_id, session_id):
keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id) keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id)
keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions
keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id) keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id)

View file

@ -1,7 +1,7 @@
from typing import Optional from typing import Optional
import schemas import schemas
import schemas_ee import schemas
from chalicelib.core import metrics from chalicelib.core import metrics
from chalicelib.utils import ch_client from chalicelib.utils import ch_client
@ -161,7 +161,7 @@ def query_requests_by_period(project_id, start_time, end_time, filters: Optional
for n in names_: for n in names_:
if n is None: if n is None:
continue continue
data_ = {'category': schemas_ee.InsightCategories.network, 'name': n, data_ = {'category': schemas.InsightCategories.network, 'name': n,
'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True}
for n_, v in ratio: for n_, v in ratio:
if n == n_: if n == n_:
@ -266,7 +266,7 @@ def query_most_errors_by_period(project_id, start_time, end_time,
for n in names_: for n in names_:
if n is None: if n is None:
continue continue
data_ = {'category': schemas_ee.InsightCategories.errors, 'name': n, data_ = {'category': schemas.InsightCategories.errors, 'name': n,
'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True}
for n_, v in ratio: for n_, v in ratio:
if n == n_: if n == n_:
@ -346,7 +346,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time,
output = list() output = list()
if cpu_oldvalue is not None or cpu_newvalue is not None: if cpu_oldvalue is not None or cpu_newvalue is not None:
output.append({'category': schemas_ee.InsightCategories.resources, output.append({'category': schemas.InsightCategories.resources,
'name': 'cpu', 'name': 'cpu',
'value': cpu_newvalue, 'value': cpu_newvalue,
'oldValue': cpu_oldvalue, 'oldValue': cpu_oldvalue,
@ -354,7 +354,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time,
cpu_newvalue - cpu_oldvalue) / cpu_oldvalue if cpu_ratio is not None else cpu_ratio, cpu_newvalue - cpu_oldvalue) / cpu_oldvalue if cpu_ratio is not None else cpu_ratio,
'isNew': True if cpu_newvalue is not None and cpu_oldvalue is None else False}) 'isNew': True if cpu_newvalue is not None and cpu_oldvalue is None else False})
if mem_oldvalue is not None or mem_newvalue is not None: if mem_oldvalue is not None or mem_newvalue is not None:
output.append({'category': schemas_ee.InsightCategories.resources, output.append({'category': schemas.InsightCategories.resources,
'name': 'memory', 'name': 'memory',
'value': mem_newvalue, 'value': mem_newvalue,
'oldValue': mem_oldvalue, 'oldValue': mem_oldvalue,
@ -434,7 +434,7 @@ def query_click_rage_by_period(project_id, start_time, end_time,
for n in names_: for n in names_:
if n is None: if n is None:
continue continue
data_ = {'category': schemas_ee.InsightCategories.rage, 'name': n, data_ = {'category': schemas.InsightCategories.rage, 'name': n,
'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True}
for n_, v in ratio: for n_, v in ratio:
if n == n_: if n == n_:
@ -453,26 +453,26 @@ def query_click_rage_by_period(project_id, start_time, end_time,
return results return results
def fetch_selected(project_id, data: schemas_ee.GetInsightsSchema): def fetch_selected(project_id, data: schemas.GetInsightsSchema):
output = list() output = list()
if data.metricValue is None or len(data.metricValue) == 0: if data.metricValue is None or len(data.metricValue) == 0:
data.metricValue = [] data.metricValue = []
for v in schemas_ee.InsightCategories: for v in schemas.InsightCategories:
data.metricValue.append(v) data.metricValue.append(v)
filters = None filters = None
if len(data.series) > 0: if len(data.series) > 0:
filters = data.series[0].filter filters = data.series[0].filter
if schemas_ee.InsightCategories.errors in data.metricValue: if schemas.InsightCategories.errors in data.metricValue:
output += query_most_errors_by_period(project_id=project_id, start_time=data.startTimestamp, output += query_most_errors_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters) end_time=data.endTimestamp, filters=filters)
if schemas_ee.InsightCategories.network in data.metricValue: if schemas.InsightCategories.network in data.metricValue:
output += query_requests_by_period(project_id=project_id, start_time=data.startTimestamp, output += query_requests_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters) end_time=data.endTimestamp, filters=filters)
if schemas_ee.InsightCategories.rage in data.metricValue: if schemas.InsightCategories.rage in data.metricValue:
output += query_click_rage_by_period(project_id=project_id, start_time=data.startTimestamp, output += query_click_rage_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters) end_time=data.endTimestamp, filters=filters)
if schemas_ee.InsightCategories.resources in data.metricValue: if schemas.InsightCategories.resources in data.metricValue:
output += query_cpu_memory_by_period(project_id=project_id, start_time=data.startTimestamp, output += query_cpu_memory_by_period(project_id=project_id, start_time=data.startTimestamp,
end_time=data.endTimestamp, filters=filters) end_time=data.endTimestamp, filters=filters)
return output return output

View file

@ -88,7 +88,8 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public) query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public)
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s)
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""", RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""",
{"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict(), {"user_id": user_id, "project_id": project_id, "session_id": session_id,
**data.model_dump(),
"tenant_id": tenant_id}) "tenant_id": tenant_id})
cur.execute(query) cur.execute(query)
result = helper.dict_to_camel_case(cur.fetchone()) result = helper.dict_to_camel_case(cur.fetchone())
@ -118,7 +119,7 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
AND note_id = %(note_id)s AND note_id = %(note_id)s
AND deleted_at ISNULL AND deleted_at ISNULL
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""", RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""",
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict(), {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump(),
"tenant_id": tenant_id}) "tenant_id": tenant_id})
) )
row = helper.dict_to_camel_case(cur.fetchone()) row = helper.dict_to_camel_case(cur.fetchone())

View file

@ -1,5 +1,5 @@
import schemas import schemas
import schemas_ee import schemas
from chalicelib.core import events, metadata, events_ios, \ from chalicelib.core import events, metadata, events_ios, \
sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes
from chalicelib.utils import errors_helper from chalicelib.utils import errors_helper
@ -17,7 +17,7 @@ def __group_metadata(session, project_metadata):
# for backward compatibility # for backward compatibility
# This function should not use Clickhouse because it doesn't have `file_key` # This function should not use Clickhouse because it doesn't have `file_key`
def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False,
include_fav_viewed=False, group_metadata=False, live=True): include_fav_viewed=False, group_metadata=False, live=True):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
extra_query = [] extra_query = []

View file

@ -1,9 +1,9 @@
import schemas_ee import schemas
import logging import logging
from chalicelib.utils import events_queue from chalicelib.utils import events_queue
def handle_frontend_signals_queued(project_id: int, user_id: int, data: schemas_ee.SignalsSchema): def handle_frontend_signals_queued(project_id: int, user_id: int, data: schemas.SignalsSchema):
try: try:
events_queue.global_queue.put((project_id, user_id, data)) events_queue.global_queue.put((project_id, user_id, data))
return {'data': 'insertion succeded'} return {'data': 'insertion succeded'}

View file

@ -30,17 +30,19 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042} 21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
""" """
Add minimal timestamp Add minimal timestamp
:param filter_d: dict contains events&filters&... :param filter_d: dict contains events&filters&...
:return: :return:
""" """
stages: [dict] = filter_d.get("events", []) stages: [dict] = filter_d.events
filters: [dict] = filter_d.get("filters", []) filters: [dict] = filter_d.filters
filter_issues = filter_d.get("issueTypes") filter_issues = []
if filter_issues is None or len(filter_issues) == 0: # TODO: enable this if needed by an endpoint
filter_issues = [] # filter_issues = filter_d.get("issueTypes")
# if filter_issues is None or len(filter_issues) == 0:
# filter_issues = []
stage_constraints = ["main.timestamp <= %(endTimestamp)s"] stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s", first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s",
"s.start_ts <= %(endTimestamp)s"] "s.start_ts <= %(endTimestamp)s"]
@ -126,22 +128,22 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
i = -1 i = -1
for s in stages: for s in stages:
if s.get("operator") is None: if s.operator is None:
s["operator"] = "is" s.operator = schemas.SearchEventOperator._is
if not isinstance(s["value"], list): if not isinstance(s.value, list):
s["value"] = [s["value"]] s.value = [s.value]
is_any = sh.isAny_opreator(s["operator"]) is_any = sh.isAny_opreator(s.operator)
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0: if not is_any and isinstance(s.value, list) and len(s.value) == 0:
continue continue
i += 1 i += 1
if i == 0: if i == 0:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"] extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else: else:
extra_from = [] extra_from = []
op = sh.get_sql_operator(s["operator"]) op = sh.get_sql_operator(s.operator)
# event_type = s["type"].upper() # event_type = s["type"].upper()
event_type = s["type"] event_type = s.type
if event_type == events.EventType.CLICK.ui_type: if event_type == events.EventType.CLICK.ui_type:
next_table = events.EventType.CLICK.table next_table = events.EventType.CLICK.table
next_col_name = events.EventType.CLICK.column next_col_name = events.EventType.CLICK.column
@ -171,16 +173,16 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
print(f"=================UNDEFINED:{event_type}") print(f"=================UNDEFINED:{event_type}")
continue continue
values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator),
value_key=f"value{i + 1}")} value_key=f"value{i + 1}")}
if sh.is_negation_operator(s["operator"]) and i > 0: if sh.is_negation_operator(s.operator) and i > 0:
op = sh.reverse_sql_operator(op) op = sh.reverse_sql_operator(op)
main_condition = "left_not.session_id ISNULL" main_condition = "left_not.session_id ISNULL"
extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
FROM {next_table} AS s_main FROM {next_table} AS s_main
WHERE WHERE
{sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s", {sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")} values=s.value, value_key=f"value{i + 1}")}
AND s_main.timestamp >= T{i}.stage{i}_timestamp AND s_main.timestamp >= T{i}.stage{i}_timestamp
AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""") AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""")
else: else:
@ -188,7 +190,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
main_condition = "TRUE" main_condition = "TRUE"
else: else:
main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}") values=s.value, value_key=f"value{i + 1}")
n_stages_query.append(f""" n_stages_query.append(f"""
(SELECT main.session_id, (SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
@ -231,7 +233,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
""" """
# LIMIT 10000 # LIMIT 10000
params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"], params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp,
"endTimestamp": filter_d.endTimestamp,
"issueTypes": tuple(filter_issues), **values} "issueTypes": tuple(filter_issues), **values}
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify(n_stages_query, params) query = cur.mogrify(n_stages_query, params)
@ -245,7 +248,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------") print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8')) print(query.decode('UTF-8'))
print("--------- PAYLOAD -----------") print("--------- PAYLOAD -----------")
print(filter_d) print(filter_d.model_dump_json())
print("--------------------") print("--------------------")
raise err raise err
return rows return rows
@ -550,9 +553,9 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
return n_critical_issues, issues_dict, total_drop_due_to_issues return n_critical_issues, issues_dict, total_drop_due_to_issues
def get_top_insights(filter_d, project_id): def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
output = [] output = []
stages = filter_d.get("events", []) stages = filter_d.events
# TODO: handle 1 stage alone # TODO: handle 1 stage alone
if len(stages) == 0: if len(stages) == 0:
print("no stages found") print("no stages found")
@ -560,17 +563,24 @@ def get_top_insights(filter_d, project_id):
elif len(stages) == 1: elif len(stages) == 1:
# TODO: count sessions, and users for single stage # TODO: count sessions, and users for single stage
output = [{ output = [{
"type": stages[0]["type"], "type": stages[0].type,
"value": stages[0]["value"], "value": stages[0].value,
"dropPercentage": None, "dropPercentage": None,
"operator": stages[0]["operator"], "operator": stages[0].operator,
"sessionsCount": 0, "sessionsCount": 0,
"dropPct": 0, "dropPct": 0,
"usersCount": 0, "usersCount": 0,
"dropDueToIssues": 0 "dropDueToIssues": 0
}] }]
counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), # original
# counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d),
# project_id=project_id, user_id=None, count_only=True)
# first change
# counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d),
# project_id=project_id, user_id=None, count_only=True)
# last change
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d),
project_id=project_id, user_id=None, count_only=True) project_id=project_id, user_id=None, count_only=True)
output[0]["sessionsCount"] = counts["countSessions"] output[0]["sessionsCount"] = counts["countSessions"]
output[0]["usersCount"] = counts["countUsers"] output[0]["usersCount"] = counts["countUsers"]
@ -589,9 +599,9 @@ def get_top_insights(filter_d, project_id):
return stages_list, total_drop_due_to_issues return stages_list, total_drop_due_to_issues
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None): def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None):
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []}) output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
stages = filter_d.get("events", []) stages = filter_d.events
# The result of the multi-stage query # The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
# print(json.dumps(rows[0],indent=4)) # print(json.dumps(rows[0],indent=4))

View file

@ -1,4 +1,3 @@
__author__ = "AZNAUROV David"
__maintainer__ = "KRAIEM Taha Yassine" __maintainer__ = "KRAIEM Taha Yassine"
from decouple import config from decouple import config
@ -30,17 +29,19 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042} 21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]:
""" """
Add minimal timestamp Add minimal timestamp
:param filter_d: dict contains events&filters&... :param filter_d: dict contains events&filters&...
:return: :return:
""" """
stages: [dict] = filter_d.get("events", []) stages: [dict] = filter_d.events
filters: [dict] = filter_d.get("filters", []) filters: [dict] = filter_d.filters
filter_issues = filter_d.get("issueTypes") filter_issues = []
if filter_issues is None or len(filter_issues) == 0: # TODO: enable this if needed by an endpoint
filter_issues = [] # filter_issues = filter_d.get("issueTypes")
# if filter_issues is None or len(filter_issues) == 0:
# filter_issues = []
stage_constraints = ["main.timestamp <= %(endTimestamp)s"] stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s", first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s",
"s.start_ts <= %(endTimestamp)s"] "s.start_ts <= %(endTimestamp)s"]
@ -126,22 +127,22 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
i = -1 i = -1
for s in stages: for s in stages:
if s.get("operator") is None: if s.operator is None:
s["operator"] = "is" s.operator = schemas.SearchEventOperator._is
if not isinstance(s["value"], list): if not isinstance(s.value, list):
s["value"] = [s["value"]] s.value = [s.value]
is_any = sh.isAny_opreator(s["operator"]) is_any = sh.isAny_opreator(s.operator)
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0: if not is_any and isinstance(s.value, list) and len(s.value) == 0:
continue continue
i += 1 i += 1
if i == 0: if i == 0:
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"] extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else: else:
extra_from = [] extra_from = []
op = sh.get_sql_operator(s["operator"]) op = sh.get_sql_operator(s.operator)
# event_type = s["type"].upper() # event_type = s["type"].upper()
event_type = s["type"] event_type = s.type
if event_type == events.EventType.CLICK.ui_type: if event_type == events.EventType.CLICK.ui_type:
next_table = events.EventType.CLICK.table next_table = events.EventType.CLICK.table
next_col_name = events.EventType.CLICK.column next_col_name = events.EventType.CLICK.column
@ -171,16 +172,16 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
print(f"=================UNDEFINED:{event_type}") print(f"=================UNDEFINED:{event_type}")
continue continue
values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator),
value_key=f"value{i + 1}")} value_key=f"value{i + 1}")}
if sh.is_negation_operator(s["operator"]) and i > 0: if sh.is_negation_operator(s.operator) and i > 0:
op = sh.reverse_sql_operator(op) op = sh.reverse_sql_operator(op)
main_condition = "left_not.session_id ISNULL" main_condition = "left_not.session_id ISNULL"
extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
FROM {next_table} AS s_main FROM {next_table} AS s_main
WHERE WHERE
{sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s", {sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")} values=s.value, value_key=f"value{i + 1}")}
AND s_main.timestamp >= T{i}.stage{i}_timestamp AND s_main.timestamp >= T{i}.stage{i}_timestamp
AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""") AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""")
else: else:
@ -188,7 +189,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
main_condition = "TRUE" main_condition = "TRUE"
else: else:
main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}") values=s.value, value_key=f"value{i + 1}")
n_stages_query.append(f""" n_stages_query.append(f"""
(SELECT main.session_id, (SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
@ -231,7 +232,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
""" """
# LIMIT 10000 # LIMIT 10000
params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"], params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp,
"endTimestamp": filter_d.endTimestamp,
"issueTypes": tuple(filter_issues), **values} "issueTypes": tuple(filter_issues), **values}
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify(n_stages_query, params) query = cur.mogrify(n_stages_query, params)
@ -245,7 +247,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------") print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8')) print(query.decode('UTF-8'))
print("--------- PAYLOAD -----------") print("--------- PAYLOAD -----------")
print(filter_d) print(filter_d.model_dump_json())
print("--------------------") print("--------------------")
raise err raise err
return rows return rows
@ -550,9 +552,9 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
return n_critical_issues, issues_dict, total_drop_due_to_issues return n_critical_issues, issues_dict, total_drop_due_to_issues
def get_top_insights(filter_d, project_id): def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id):
output = [] output = []
stages = filter_d.get("events", []) stages = filter_d.events
# TODO: handle 1 stage alone # TODO: handle 1 stage alone
if len(stages) == 0: if len(stages) == 0:
print("no stages found") print("no stages found")
@ -560,17 +562,24 @@ def get_top_insights(filter_d, project_id):
elif len(stages) == 1: elif len(stages) == 1:
# TODO: count sessions, and users for single stage # TODO: count sessions, and users for single stage
output = [{ output = [{
"type": stages[0]["type"], "type": stages[0].type,
"value": stages[0]["value"], "value": stages[0].value,
"dropPercentage": None, "dropPercentage": None,
"operator": stages[0]["operator"], "operator": stages[0].operator,
"sessionsCount": 0, "sessionsCount": 0,
"dropPct": 0, "dropPct": 0,
"usersCount": 0, "usersCount": 0,
"dropDueToIssues": 0 "dropDueToIssues": 0
}] }]
counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), # original
# counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d),
# project_id=project_id, user_id=None, count_only=True)
# first change
# counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d),
# project_id=project_id, user_id=None, count_only=True)
# last change
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d),
project_id=project_id, user_id=None, count_only=True) project_id=project_id, user_id=None, count_only=True)
output[0]["sessionsCount"] = counts["countSessions"] output[0]["sessionsCount"] = counts["countSessions"]
output[0]["usersCount"] = counts["countUsers"] output[0]["usersCount"] = counts["countUsers"]
@ -589,9 +598,9 @@ def get_top_insights(filter_d, project_id):
return stages_list, total_drop_due_to_issues return stages_list, total_drop_due_to_issues
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None): def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None):
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []}) output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
stages = filter_d.get("events", []) stages = filter_d.events
# The result of the multi-stage query # The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
# print(json.dumps(rows[0],indent=4)) # print(json.dumps(rows[0],indent=4))

View file

@ -3,7 +3,6 @@ import json
from decouple import config from decouple import config
import schemas import schemas
import schemas_ee
from chalicelib.core import users, telemetry, tenants from chalicelib.core import users, telemetry, tenants
from chalicelib.utils import captcha from chalicelib.utils import captcha
from chalicelib.utils import helper from chalicelib.utils import helper
@ -19,7 +18,7 @@ def create_tenant(data: schemas.UserSignupSchema):
email = data.email email = data.email
print(f"=====================> {email}") print(f"=====================> {email}")
password = data.password password = data.password.get_secret_value()
if email is None or len(email) < 5: if email is None or len(email) < 5:
errors.append("Invalid email address.") errors.append("Invalid email address.")
@ -52,7 +51,7 @@ def create_tenant(data: schemas.UserSignupSchema):
params = { params = {
"email": email, "password": password, "fullname": fullname, "projectName": project_name, "email": email, "password": password, "fullname": fullname, "projectName": project_name,
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), "organizationName": organization_name, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), "organizationName": organization_name,
"permissions": [p.value for p in schemas_ee.Permissions] "permissions": [p.value for p in schemas.Permissions]
} }
query = """WITH t AS ( query = """WITH t AS (
INSERT INTO public.tenants (name) INSERT INTO public.tenants (name)

View file

@ -10,7 +10,7 @@ from starlette.background import BackgroundTask
import app as main_app import app as main_app
import schemas import schemas
import schemas_ee import schemas
from chalicelib.utils import pg_client, helper from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
from schemas import CurrentContext from schemas import CurrentContext
@ -64,7 +64,7 @@ class TraceSchema(BaseModel):
def __process_trace(trace: TraceSchema): def __process_trace(trace: TraceSchema):
data = trace.dict() data = trace.model_dump()
data["parameters"] = json.dumps(trace.parameters) if trace.parameters is not None and len( data["parameters"] = json.dumps(trace.parameters) if trace.parameters is not None and len(
trace.parameters.keys()) > 0 else None trace.parameters.keys()) > 0 else None
data["payload"] = json.dumps(trace.payload) if trace.payload is not None and len(trace.payload.keys()) > 0 else None data["payload"] = json.dumps(trace.payload) if trace.payload is not None and len(trace.payload.keys()) > 0 else None
@ -158,7 +158,7 @@ async def process_traces_queue():
await write_traces_batch(traces) await write_traces_batch(traces)
def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): def get_all(tenant_id, data: schemas.TrailSearchPayloadSchema):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
conditions = ["traces.tenant_id=%(tenant_id)s", conditions = ["traces.tenant_id=%(tenant_id)s",
"traces.created_at>=%(startDate)s", "traces.created_at>=%(startDate)s",
@ -168,7 +168,7 @@ def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema):
"endDate": data.endDate, "endDate": data.endDate,
"p_start": (data.page - 1) * data.limit, "p_start": (data.page - 1) * data.limit,
"p_end": data.page * data.limit, "p_end": data.page * data.limit,
**data.dict()} **data.model_dump()}
if data.user_id is not None: if data.user_id is not None:
conditions.append("user_id=%(user_id)s") conditions.append("user_id=%(user_id)s")
if data.action is not None: if data.action is not None:
@ -184,10 +184,10 @@ def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema):
COALESCE(JSONB_AGG(full_traces ORDER BY rn) COALESCE(JSONB_AGG(full_traces ORDER BY rn)
FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions
FROM (SELECT traces.*,users.email,users.name AS username, FROM (SELECT traces.*,users.email,users.name AS username,
ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order.value}) AS rn ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order}) AS rn
FROM traces LEFT JOIN users USING (user_id) FROM traces LEFT JOIN users USING (user_id)
WHERE {" AND ".join(conditions)} WHERE {" AND ".join(conditions)}
ORDER BY traces.created_at {data.order.value}) AS full_traces;""", params) ORDER BY traces.created_at {data.order}) AS full_traces;""", params)
) )
rows = cur.fetchone() rows = cur.fetchone()
return helper.dict_to_camel_case(rows) return helper.dict_to_camel_case(rows)

View file

@ -2,15 +2,17 @@ import json
import secrets import secrets
from decouple import config from decouple import config
from fastapi import BackgroundTasks from fastapi import BackgroundTasks, HTTPException
from starlette import status
import schemas import schemas
import schemas_ee from chalicelib.core import authorizers, metadata, projects
from chalicelib.core import authorizers, metadata, projects, roles
from chalicelib.core import tenants, assist from chalicelib.core import tenants, assist
from chalicelib.utils import helper, email_helper, smtp from chalicelib.utils import email_helper, smtp
from chalicelib.utils import helper
from chalicelib.utils import pg_client from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.core import roles
def __generate_invitation_token(): def __generate_invitation_token():
@ -210,33 +212,33 @@ def update(tenant_id, user_id, changes, output=True):
return get(user_id=user_id, tenant_id=tenant_id) return get(user_id=user_id, tenant_id=tenant_id)
def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks): def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, background_tasks: BackgroundTasks):
admin = get(tenant_id=tenant_id, user_id=user_id) admin = get(tenant_id=tenant_id, user_id=user_id)
if not admin["admin"] and not admin["superAdmin"]: if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]} return {"errors": ["unauthorized"]}
if data.get("userId") is not None: if data.user_id is not None:
return {"errors": ["please use POST/PUT /client/members/{memberId} for update"]} return {"errors": ["please use POST/PUT /client/members/{memberId} for update"]}
user = get_by_email_only(email=data["email"]) user = get_by_email_only(email=data.email)
if user: if user:
return {"errors": ["user already exists"]} return {"errors": ["user already exists"]}
name = data.get("name", None)
if name is None or len(name) == 0: if data.name is None or len(data.name) == 0:
name = data["email"] data.name = data.email
role_id = data.get("roleId") role_id = data.get("roleId")
if role_id is None: if role_id is None:
role_id = roles.get_role_by_name(tenant_id=tenant_id, name="member").get("roleId") role_id = roles.get_role_by_name(tenant_id=tenant_id, name="member").get("roleId")
invitation_token = __generate_invitation_token() invitation_token = __generate_invitation_token()
user = get_deleted_user_by_email(email=data["email"]) user = get_deleted_user_by_email(email=data.email)
if user is not None and user["tenantId"] == tenant_id: if user is not None and user["tenantId"] == tenant_id:
new_member = restore_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, new_member = restore_member(tenant_id=tenant_id, email=data.email, invitation_token=invitation_token,
admin=data.get("admin", False), name=name, user_id=user["userId"], role_id=role_id) admin=data.admin, name=data.name, user_id=user["userId"], role_id=role_id)
elif user is not None: elif user is not None:
__hard_delete_user(user_id=user["userId"]) __hard_delete_user(user_id=user["userId"])
new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token,
admin=data.get("admin", False), name=name, role_id=role_id) admin=data.get("admin", False), name=data.name, role_id=role_id)
else: else:
new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token,
admin=data.get("admin", False), name=name, role_id=role_id) admin=data.get("admin", False), name=data.name, role_id=role_id)
new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken")) new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken"))
background_tasks.add_task(email_helper.send_team_invitation, **{ background_tasks.add_task(email_helper.send_team_invitation, **{
"recipient": data["email"], "recipient": data["email"],
@ -282,7 +284,8 @@ def get(user_id, tenant_id):
roles.name AS role_name, roles.name AS role_name,
roles.permissions, roles.permissions,
roles.all_projects, roles.all_projects,
basic_authentication.password IS NOT NULL AS has_password basic_authentication.password IS NOT NULL AS has_password,
users.service_account
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
LEFT JOIN public.roles USING (role_id) LEFT JOIN public.roles USING (role_id)
WHERE WHERE
@ -351,7 +354,7 @@ def edit_account(user_id, tenant_id, changes: schemas.EditAccountSchema):
return {"data": __get_account_info(tenant_id=tenant_id, user_id=user_id)} return {"data": __get_account_info(tenant_id=tenant_id, user_id=user_id)}
def edit_member(user_id_to_update, tenant_id, changes: schemas_ee.EditMemberSchema, editor_id): def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema, editor_id):
user = get_member(user_id=user_id_to_update, tenant_id=tenant_id) user = get_member(user_id=user_id_to_update, tenant_id=tenant_id)
_changes = {} _changes = {}
if editor_id != user_id_to_update: if editor_id != user_id_to_update:
@ -472,7 +475,9 @@ def get_members(tenant_id):
FROM public.users FROM public.users
LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
LEFT JOIN public.roles USING (role_id) LEFT JOIN public.roles USING (role_id)
WHERE users.tenant_id = %(tenant_id)s AND users.deleted_at IS NULL WHERE users.tenant_id = %(tenant_id)s
AND users.deleted_at IS NULL
AND NOT users.service_account
ORDER BY name, user_id""", ORDER BY name, user_id""",
{"tenant_id": tenant_id}) {"tenant_id": tenant_id})
) )
@ -626,17 +631,24 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
cur.execute( cur.execute(
cur.mogrify( cur.mogrify(
f"SELECT user_id AS id,jwt_iat, changed_at FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE user_id = %(userId)s AND tenant_id = %(tenant_id)s AND deleted_at IS NULL LIMIT 1;", f"""SELECT user_id,
jwt_iat,
changed_at,
service_account,
basic_authentication.user_id IS NOT NULL AS has_basic_auth
FROM public.users
LEFT JOIN public.basic_authentication USING(user_id)
WHERE user_id = %(userId)s
AND tenant_id = %(tenant_id)s
AND deleted_at IS NULL
LIMIT 1;""",
{"userId": user_id, "tenant_id": tenant_id}) {"userId": user_id, "tenant_id": tenant_id})
) )
r = cur.fetchone() r = cur.fetchone()
return r is not None \ return r is not None \
and r.get("jwt_iat") is not None \ and (r["service_account"] and not r["has_basic_auth"]
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ or r.get("jwt_iat") is not None \
or (jwt_aud.startswith("plugin") \ and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1))
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
def change_jwt_iat(user_id): def change_jwt_iat(user_id):
@ -665,7 +677,8 @@ def authenticate(email, password, for_change_password=False) -> dict | None:
users.origin, users.origin,
users.role_id, users.role_id,
roles.name AS role_name, roles.name AS role_name,
roles.permissions roles.permissions,
users.service_account
FROM public.users AS users INNER JOIN public.basic_authentication USING(user_id) FROM public.users AS users INNER JOIN public.basic_authentication USING(user_id)
LEFT JOIN public.roles ON (roles.role_id = users.role_id AND roles.tenant_id = users.tenant_id) LEFT JOIN public.roles ON (roles.role_id = users.role_id AND roles.tenant_id = users.tenant_id)
WHERE users.email = %(email)s WHERE users.email = %(email)s
@ -694,7 +707,10 @@ def authenticate(email, password, for_change_password=False) -> dict | None:
if for_change_password: if for_change_password:
return True return True
r = helper.dict_to_camel_case(r) r = helper.dict_to_camel_case(r)
if config("enforce_SSO", cast=bool, default=False) and helper.is_saml2_available(): if r["serviceAccount"]:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
detail="service account is not authorized to login")
elif config("enforce_SSO", cast=bool, default=False) and helper.is_saml2_available():
return {"errors": ["must sign-in with SSO, enforced by admin"]} return {"errors": ["must sign-in with SSO, enforced by admin"]}
jwt_iat = change_jwt_iat(r['userId']) jwt_iat = change_jwt_iat(r['userId'])
@ -710,33 +726,27 @@ def authenticate(email, password, for_change_password=False) -> dict | None:
return None return None
def authenticate_sso(email, internal_id, exp=None): def get_user_role(tenant_id, user_id):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify( cur.execute(
f"""SELECT cur.mogrify(
users.user_id, f"""SELECT
users.tenant_id, users.user_id,
users.role, users.email,
users.name, users.role,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, users.name,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, users.created_at,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
origin, (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
role_id (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
FROM public.users AS users FROM public.users
WHERE users.email = %(email)s AND internal_id = %(internal_id)s;""", WHERE users.deleted_at IS NULL
{"email": email, "internal_id": internal_id}) AND users.user_id=%(user_id)s
AND users.tenant_id=%(tenant_id)s
cur.execute(query) LIMIT 1""",
r = cur.fetchone() {"tenant_id": tenant_id, "user_id": user_id})
)
if r is not None: return helper.dict_to_camel_case(cur.fetchone())
r = helper.dict_to_camel_case(r)
jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId']))
return authorizers.generate_jwt(r['userId'], r['tenantId'],
iat=jwt_iat, aud=f"front:{helper.get_stage_name()}",
exp=(exp + jwt_iat // 1000) if exp is not None else None)
return None
def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id=None): def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id=None):
@ -772,6 +782,48 @@ def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id=
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(cur.fetchone())
def __hard_delete_user(user_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""DELETE FROM public.users
WHERE users.user_id = %(user_id)s AND users.deleted_at IS NOT NULL ;""",
{"user_id": user_id})
cur.execute(query)
def authenticate_sso(email, internal_id, exp=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""SELECT
users.user_id,
users.tenant_id,
users.role,
users.name,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
origin,
role_id,
service_account
FROM public.users AS users
WHERE users.email = %(email)s AND internal_id = %(internal_id)s;""",
{"email": email, "internal_id": internal_id})
cur.execute(query)
r = cur.fetchone()
if r is not None:
r = helper.dict_to_camel_case(r)
if r["serviceAccount"]:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
detail="service account is not authorized to login")
jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId']))
return authorizers.generate_jwt(r['userId'], r['tenantId'],
iat=jwt_iat, aud=f"front:{helper.get_stage_name()}",
exp=(exp + jwt_iat // 1000) if exp is not None else None)
return None
def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, internal_id=None): def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, internal_id=None):
with pg_client.PostgresClient() as cur: with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\ query = cur.mogrify(f"""\
@ -822,35 +874,3 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in
query query
) )
return helper.dict_to_camel_case(cur.fetchone()) return helper.dict_to_camel_case(cur.fetchone())
def __hard_delete_user(user_id):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""DELETE FROM public.users
WHERE users.user_id = %(user_id)s AND users.deleted_at IS NOT NULL ;""",
{"user_id": user_id})
cur.execute(query)
def get_user_role(tenant_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT
users.user_id,
users.email,
users.role,
users.name,
users.created_at,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
FROM public.users
WHERE users.deleted_at IS NULL
AND users.user_id=%(user_id)s
AND users.tenant_id=%(tenant_id)s
LIMIT 1""",
{"tenant_id": tenant_id, "user_id": user_id})
)
return helper.dict_to_camel_case(cur.fetchone())

View file

@ -83,6 +83,8 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
{"tenant_id": tenant_id, "id": webhook_id, **changes}) {"tenant_id": tenant_id, "id": webhook_id, **changes})
) )
w = helper.dict_to_camel_case(cur.fetchone()) w = helper.dict_to_camel_case(cur.fetchone())
if w is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"webhook not found.")
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
if replace_none: if replace_none:
for k in w.keys(): for k in w.keys():
@ -128,20 +130,22 @@ def exists_by_name(tenant_id: int, name: str, exclude_id: Optional[int],
return row["exists"] return row["exists"]
def add_edit(tenant_id, data, replace_none=None): def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None):
if "name" in data and len(data["name"]) > 0 \ if len(data.name) > 0 \
and exists_by_name(name=data["name"], exclude_id=data.get("webhookId"), tenant_id=tenant_id): and exists_by_name(name=data.name, exclude_id=data.webhook_id):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
if data.get("webhookId") is not None: if data.webhook_id is not None:
return update(tenant_id=tenant_id, webhook_id=data["webhookId"], return update(tenant_id=tenant_id, webhook_id=data.webhook_id,
changes={"endpoint": data["endpoint"], changes={"endpoint": data.endpoint,
"authHeader": None if "authHeader" not in data else data["authHeader"], "authHeader": data.auth_header,
"name": data["name"] if "name" in data else ""}, replace_none=replace_none) "name": data.name},
replace_none=replace_none)
else: else:
return add(tenant_id=tenant_id, return add(tenant_id=tenant_id,
endpoint=data["endpoint"], endpoint=data.endpoint,
auth_header=None if "authHeader" not in data else data["authHeader"], auth_header=data.auth_header,
name=data["name"] if "name" in data else "", replace_none=replace_none) name=data.name,
replace_none=replace_none)
def delete(tenant_id, webhook_id): def delete(tenant_id, webhook_id):

View file

@ -5,9 +5,11 @@ from urllib.parse import urlparse
from decouple import config from decouple import config
from fastapi import Request from fastapi import Request
from onelogin.saml2.auth import OneLogin_Saml2_Auth
from starlette.datastructures import FormData from starlette.datastructures import FormData
if config("ENABLE_SSO", cast=bool, default=True):
from onelogin.saml2.auth import OneLogin_Saml2_Auth
SAML2 = { SAML2 = {
"strict": config("saml_strict", cast=bool, default=True), "strict": config("saml_strict", cast=bool, default=True),
"debug": config("saml_debug", cast=bool, default=True), "debug": config("saml_debug", cast=bool, default=True),

View file

@ -18,9 +18,10 @@ if config('ch_receive_timeout', cast=int, default=-1) > 0:
class ClickHouseClient: class ClickHouseClient:
__client = None __client = None
def __init__(self): def __init__(self, database=None):
self.__client = clickhouse_driver.Client(host=config("ch_host"), self.__client = clickhouse_driver.Client(host=config("ch_host"),
database=config("ch_database", default="default"), database=database if database else config("ch_database",
default="default"),
user=config("ch_user", default="default"), user=config("ch_user", default="default"),
password=config("ch_password", default=""), password=config("ch_password", default=""),
port=config("ch_port", cast=int), port=config("ch_port", cast=int),

View file

@ -25,7 +25,7 @@ class EventQueue():
project_id, user_id, element = self.events.get() project_id, user_id, element = self.events.get()
params[f'project_id_{i}'] = project_id params[f'project_id_{i}'] = project_id
params[f'user_id_{i}'] = user_id params[f'user_id_{i}'] = user_id
for _key, _val in element.dict().items(): for _key, _val in element.model_dump().items():
if _key == 'data': if _key == 'data':
params[f'{_key}_{i}'] = json.dumps(_val) params[f'{_key}_{i}'] = json.dumps(_val)
if 'sessionId' in _val.keys(): if 'sessionId' in _val.keys():
@ -77,7 +77,7 @@ async def terminate():
logging.info('> queue fulshed') logging.info('> queue fulshed')
# def __process_schema(trace): # def __process_schema(trace):
# data = trace.dict() # data = trace.model_dump()
# data["parameters"] = json.dumps(trace.parameters) if trace.parameters is not None and len( # data["parameters"] = json.dumps(trace.parameters) if trace.parameters is not None and len(
# trace.parameters.keys()) > 0 else None # trace.parameters.keys()) > 0 else None
# data["payload"] = json.dumps(trace.payload) if trace.payload is not None and len(trace.payload.keys()) > 0 else None # data["payload"] = json.dumps(trace.payload) if trace.payload is not None and len(trace.payload.keys()) > 0 else None

View file

@ -82,7 +82,6 @@ rm -rf ./db_changes.sql
rm -rf ./Dockerfile_bundle rm -rf ./Dockerfile_bundle
rm -rf ./entrypoint.bundle.sh rm -rf ./entrypoint.bundle.sh
rm -rf ./chalicelib/core/heatmaps.py rm -rf ./chalicelib/core/heatmaps.py
rm -rf ./schemas.py
rm -rf ./routers/subs/v1_api.py rm -rf ./routers/subs/v1_api.py
#exp rm -rf ./chalicelib/core/custom_metrics.py #exp rm -rf ./chalicelib/core/custom_metrics.py
rm -rf ./chalicelib/core/performance_event.py rm -rf ./chalicelib/core/performance_event.py
@ -91,3 +90,5 @@ rm -rf ./app_alerts.py
rm -rf ./build_alerts.sh rm -rf ./build_alerts.sh
rm -rf ./run-dev.sh rm -rf ./run-dev.sh
rm -rf ./run-alerts-dev.sh rm -rf ./run-alerts-dev.sh
rm -rf ./schemas/overrides.py
rm -rf ./schemas/schemas.py

View file

@ -10,12 +10,12 @@ from starlette.exceptions import HTTPException
from starlette.requests import Request from starlette.requests import Request
from starlette.responses import Response, JSONResponse from starlette.responses import Response, JSONResponse
import schemas_ee import schemas
from chalicelib.utils import helper from chalicelib.utils import helper
from chalicelib.core import traces from chalicelib.core import traces
async def OR_context(request: Request) -> schemas_ee.CurrentContext: async def OR_context(request: Request) -> schemas.CurrentContext:
if hasattr(request.state, "currentContext"): if hasattr(request.state, "currentContext"):
return request.state.currentContext return request.state.currentContext
else: else:
@ -55,11 +55,20 @@ class ORRoute(APIRoute):
return custom_route_handler return custom_route_handler
def __check(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext = Depends(OR_context)): def __check(security_scopes: SecurityScopes, context: schemas.CurrentContext = Depends(OR_context)):
s_p = 0
for scope in security_scopes.scopes: for scope in security_scopes.scopes:
if isinstance(scope, schemas.ServicePermissions):
s_p += 1
if context.service_account and not isinstance(scope, schemas.ServicePermissions) \
or not context.service_account and not isinstance(scope, schemas.Permissions):
continue
if scope not in context.permissions: if scope not in context.permissions:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
detail="Not enough permissions") detail="Not enough permissions")
if context.service_account and s_p == 0:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
detail="Not enough permissions (service account)")
def OR_scope(*scopes): def OR_scope(*scopes):

View file

@ -1,20 +1,20 @@
requests==2.31.0 # Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16 urllib3==1.26.16
boto3==1.26.148 requests==2.31.0
pyjwt==2.7.0 boto3==1.28.35
psycopg2-binary==2.9.6 pyjwt==2.8.0
elasticsearch==8.8.0 psycopg2-binary==2.9.7
jira==3.5.1 elasticsearch==8.9.0
jira==3.5.2
fastapi==0.97.0 fastapi==0.103.0
uvicorn[standard]==0.22.0 uvicorn[standard]==0.23.2
python-decouple==3.8 python-decouple==3.8
pydantic[email]==1.10.8 pydantic[email]==2.3.0
apscheduler==3.10.1 apscheduler==3.10.4
clickhouse-driver==0.2.6
clickhouse-driver[lz4]==0.2.6 clickhouse-driver[lz4]==0.2.6
python-multipart==0.0.5 python-multipart==0.0.6
azure-storage-blob==12.16.0 azure-storage-blob==12.17.0

View file

@ -1,18 +1,19 @@
requests==2.31.0 # Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16 urllib3==1.26.16
boto3==1.26.148 requests==2.31.0
pyjwt==2.7.0 boto3==1.28.35
psycopg2-binary==2.9.6 pyjwt==2.8.0
elasticsearch==8.8.0 psycopg2-binary==2.9.7
jira==3.5.1 elasticsearch==8.9.0
jira==3.5.2
fastapi==0.97.0
fastapi==0.103.0
python-decouple==3.8 python-decouple==3.8
pydantic[email]==1.10.8 pydantic[email]==2.3.0
apscheduler==3.10.1 apscheduler==3.10.4
clickhouse-driver==0.2.5 clickhouse-driver[lz4]==0.2.6
clickhouse-driver[lz4]==0.2.5 redis==5.0.0
redis==4.5.5 azure-storage-blob==12.17.0
azure-storage-blob==12.16.0

View file

@ -1,21 +1,21 @@
requests==2.31.0 # Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16 urllib3==1.26.16
boto3==1.26.148 requests==2.31.0
pyjwt==2.7.0 boto3==1.28.40
psycopg2-binary==2.9.6 pyjwt==2.8.0
elasticsearch==8.8.0 psycopg2-binary==2.9.7
jira==3.5.1 elasticsearch==8.9.0
jira==3.5.2
fastapi==0.97.0 fastapi==0.103.1
uvicorn[standard]==0.22.0 uvicorn[standard]==0.23.2
gunicorn==20.1.0 gunicorn==21.2.0
python-decouple==3.8 python-decouple==3.8
pydantic[email]==1.10.8 pydantic[email]==2.3.0
apscheduler==3.10.1 apscheduler==3.10.4
clickhouse-driver==0.2.6
clickhouse-driver[lz4]==0.2.6 clickhouse-driver[lz4]==0.2.6
# TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252 # TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252
#--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml #--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml
@ -23,6 +23,6 @@ clickhouse-driver[lz4]==0.2.6
python3-saml==1.15.0 python3-saml==1.15.0
python-multipart==0.0.6 python-multipart==0.0.6
redis==4.5.5 redis==5.0.0
#confluent-kafka==2.1.0 #confluent-kafka==2.1.0
azure-storage-blob==12.16.0 azure-storage-blob==12.17.0

View file

@ -6,7 +6,6 @@ from fastapi import HTTPException, status
from starlette.responses import RedirectResponse, FileResponse from starlette.responses import RedirectResponse, FileResponse
import schemas import schemas
import schemas_ee
from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \ from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \
errors_favorite, sessions_notes, click_maps, sessions_replay, signup, feature_flags errors_favorite, sessions_notes, click_maps, sessions_replay, signup, feature_flags
from chalicelib.core import sessions_viewed from chalicelib.core import sessions_viewed
@ -18,9 +17,11 @@ from chalicelib.utils import captcha
from chalicelib.utils import helper from chalicelib.utils import helper
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
from or_dependencies import OR_context, OR_scope from or_dependencies import OR_context, OR_scope
from routers import saml
from routers.base import get_routers from routers.base import get_routers
from schemas_ee import Permissions from schemas import Permissions, ServicePermissions
if config("ENABLE_SSO", cast=bool, default=True):
from routers import saml
public_app, app, app_apikey = get_routers() public_app, app, app_apikey = get_routers()
@ -49,7 +50,7 @@ def login_user(data: schemas.UserLoginSchema = Body(...)):
detail="Invalid captcha." detail="Invalid captcha."
) )
r = users.authenticate(data.email, data.password) r = users.authenticate(data.email, data.password.get_secret_value())
if r is None: if r is None:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, status_code=status.HTTP_401_UNAUTHORIZED,
@ -132,9 +133,9 @@ def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSc
@app.post('/client/members', tags=["client"]) @app.post('/client/members', tags=["client"])
def add_member(background_tasks: BackgroundTasks, data: schemas_ee.CreateMemberSchema = Body(...), def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data,
background_tasks=background_tasks) background_tasks=background_tasks)
@ -167,11 +168,12 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
if user["expiredChange"]: if user["expiredChange"]:
return {"errors": ["expired change, please re-use the invitation link"]} return {"errors": ["expired change, please re-use the invitation link"]}
return users.set_password_invitation(new_password=data.password, user_id=user["userId"], tenant_id=user["tenantId"]) return users.set_password_invitation(new_password=data.password.get_secret_value(), user_id=user["userId"],
tenant_id=user["tenantId"])
@app.put('/client/members/{memberId}', tags=["client"]) @app.put('/client/members/{memberId}', tags=["client"])
def edit_member(memberId: int, data: schemas_ee.EditMemberSchema, def edit_member(memberId: int, data: schemas.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
user_id_to_update=memberId) user_id_to_update=memberId)
@ -203,11 +205,13 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
# for backward compatibility # for backward compatibility
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"], @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"],
dependencies=[OR_scope(Permissions.session_replay)]) dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str): if not sessionId.isnumeric():
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
else:
sessionId = int(sessionId)
data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True,
include_fav_viewed=True, group_metadata=True, context=context) include_fav_viewed=True, group_metadata=True, context=context)
if data is None: if data is None:
@ -220,12 +224,30 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
} }
@app.post('/{projectId}/sessions/search', tags=["sessions"],
dependencies=[OR_scope(Permissions.session_replay)])
def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
return {'data': data}
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"],
dependencies=[OR_scope(Permissions.session_replay)])
def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True)
return {'data': data}
@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"], @app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"],
dependencies=[OR_scope(Permissions.session_replay)]) dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str): if not sessionId.isnumeric():
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
else:
sessionId = int(sessionId)
data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True,
include_fav_viewed=True, group_metadata=True, context=context) include_fav_viewed=True, group_metadata=True, context=context)
if data is None: if data is None:
@ -239,11 +261,13 @@ def get_session_events(projectId: int, sessionId: Union[int, str], background_ta
@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"], @app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"],
dependencies=[OR_scope(Permissions.session_replay)]) dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
def get_session_events(projectId: int, sessionId: Union[int, str], def get_session_events(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str): if not sessionId.isnumeric():
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
else:
sessionId = int(sessionId)
data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) data = sessions_replay.get_events(project_id=projectId, session_id=sessionId)
if data is None: if data is None:
return {"errors": ["session not found"]} return {"errors": ["session not found"]}
@ -265,18 +289,6 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
} }
@app.post('/{projectId}/errors/search', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": errors.search(data, projectId, user_id=context.user_id)}
@app.get('/{projectId}/errors/stats', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int,
context: schemas.CurrentContext = Depends(OR_context)):
return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp)
@app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) @app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
@ -288,15 +300,6 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun
return data return data
@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), density: int = 7,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId,
**{"startDate": startDate, "endDate": endDate, "density": density})
return data
@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) @app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
def errors_get_details_sourcemaps(projectId: int, errorId: str, def errors_get_details_sourcemaps(projectId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -326,9 +329,10 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa
return {"errors": ["undefined action"]} return {"errors": ["undefined action"]}
@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live)]) @app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"],
dependencies=[OR_scope(Permissions.assist_live, ServicePermissions.assist_live)])
def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks,
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
if data is None: if data is None:
data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId,
@ -342,13 +346,15 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"], @app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"],
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)]) dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay,
ServicePermissions.assist_live, ServicePermissions.session_replay)])
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Replay file not found"]} not_found = {"errors": ["Replay file not found"]}
if isinstance(sessionId, str): if not sessionId.isnumeric():
print(f"{sessionId} not a valid number.")
return not_found return not_found
else:
sessionId = int(sessionId)
if not sessions.session_exists(project_id=projectId, session_id=sessionId): if not sessions.session_exists(project_id=projectId, session_id=sessionId):
print(f"{projectId}/{sessionId} not found in DB.") print(f"{projectId}/{sessionId} not found in DB.")
if not assist.session_exists(project_id=projectId, session_id=sessionId): if not assist.session_exists(project_id=projectId, session_id=sessionId):
@ -363,13 +369,16 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"], @app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"],
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)]) dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools,
ServicePermissions.assist_live, ServicePermissions.session_replay,
ServicePermissions.dev_tools)])
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Devtools file not found"]} not_found = {"errors": ["Devtools file not found"]}
if isinstance(sessionId, str): if not sessionId.isnumeric():
print(f"{sessionId} not a valid number.")
return not_found return not_found
else:
sessionId = int(sessionId)
if not sessions.session_exists(project_id=projectId, session_id=sessionId): if not sessions.session_exists(project_id=projectId, session_id=sessionId):
print(f"{projectId}/{sessionId} not found in DB.") print(f"{projectId}/{sessionId} not found in DB.")
if not assist.session_exists(project_id=projectId, session_id=sessionId): if not assist.session_exists(project_id=projectId, session_id=sessionId):
@ -392,7 +401,7 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema =
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"], @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"],
dependencies=[OR_scope(Permissions.session_replay)]) dependencies=[OR_scope(Permissions.session_replay)])
def add_remove_favorite_session2(projectId: int, sessionId: int, def add_remove_favorite_session2(projectId: int, sessionId: int,
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId) return sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)
@ -511,7 +520,7 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
@app.post('/{projectId}/click_maps/search', tags=["click maps"], dependencies=[OR_scope(Permissions.session_replay)]) @app.post('/{projectId}/click_maps/search', tags=["click maps"], dependencies=[OR_scope(Permissions.session_replay)])
def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...), def click_map_search(projectId: int, data: schemas.ClickMapSessionsSearch = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)}
@ -547,7 +556,7 @@ def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.Fea
@app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"], @app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"],
dependencies=[OR_scope(Permissions.feature_flags)]) dependencies=[OR_scope(Permissions.feature_flags)])
def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)):
return feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id) return {"data": feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)}
@app.post('/{project_id}/feature-flags/{feature_flag_id}/status', tags=["feature flags"], @app.post('/{project_id}/feature-flags/{feature_flag_id}/status', tags=["feature flags"],

View file

@ -7,14 +7,14 @@ unlock.check()
from or_dependencies import OR_context from or_dependencies import OR_context
from routers.base import get_routers from routers.base import get_routers
import schemas_ee import schemas
from fastapi import Depends, Body from fastapi import Depends, Body
public_app, app, app_apikey = get_routers() public_app, app, app_apikey = get_routers()
@app.get('/client/roles', tags=["client", "roles"]) @app.get('/client/roles', tags=["client", "roles"])
def get_roles(context: schemas_ee.CurrentContext = Depends(OR_context)): def get_roles(context: schemas.CurrentContext = Depends(OR_context)):
return { return {
'data': roles.get_roles(tenant_id=context.tenant_id) 'data': roles.get_roles(tenant_id=context.tenant_id)
} }
@ -22,8 +22,8 @@ def get_roles(context: schemas_ee.CurrentContext = Depends(OR_context)):
@app.post('/client/roles', tags=["client", "roles"]) @app.post('/client/roles', tags=["client", "roles"])
@app.put('/client/roles', tags=["client", "roles"]) @app.put('/client/roles', tags=["client", "roles"])
def add_role(data: schemas_ee.RolePayloadSchema = Body(...), def add_role(data: schemas.RolePayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = roles.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data) data = roles.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data)
if "errors" in data: if "errors" in data:
return data return data
@ -35,8 +35,8 @@ def add_role(data: schemas_ee.RolePayloadSchema = Body(...),
@app.post('/client/roles/{roleId}', tags=["client", "roles"]) @app.post('/client/roles/{roleId}', tags=["client", "roles"])
@app.put('/client/roles/{roleId}', tags=["client", "roles"]) @app.put('/client/roles/{roleId}', tags=["client", "roles"])
def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...), def edit_role(roleId: int, data: schemas.RolePayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = roles.update(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId, data=data) data = roles.update(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId, data=data)
if "errors" in data: if "errors" in data:
return data return data
@ -47,7 +47,7 @@ def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...),
@app.delete('/client/roles/{roleId}', tags=["client", "roles"]) @app.delete('/client/roles/{roleId}', tags=["client", "roles"])
def delete_role(roleId: int, _=Body(None), context: schemas_ee.CurrentContext = Depends(OR_context)): def delete_role(roleId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
data = roles.delete(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId) data = roles.delete(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId)
if "errors" in data: if "errors" in data:
return data return data
@ -63,48 +63,48 @@ def get_assist_credentials():
@app.post('/trails', tags=["traces", "trails"]) @app.post('/trails', tags=["traces", "trails"])
def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...), def get_trails(data: schemas.TrailSearchPayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return { return {
'data': traces.get_all(tenant_id=context.tenant_id, data=data) 'data': traces.get_all(tenant_id=context.tenant_id, data=data)
} }
@app.post('/trails/actions', tags=["traces", "trails"]) @app.post('/trails/actions', tags=["traces", "trails"])
def get_available_trail_actions(context: schemas_ee.CurrentContext = Depends(OR_context)): def get_available_trail_actions(context: schemas.CurrentContext = Depends(OR_context)):
return {'data': traces.get_available_actions(tenant_id=context.tenant_id)} return {'data': traces.get_available_actions(tenant_id=context.tenant_id)}
@app.put('/{projectId}/assist/save', tags=["assist"]) @app.put('/{projectId}/assist/save', tags=["assist"])
def sign_record_for_upload(projectId: int, data: schemas_ee.AssistRecordPayloadSchema = Body(...), def sign_record_for_upload(projectId: int, data: schemas.AssistRecordPayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=data.session_id): if not sessions.session_exists(project_id=projectId, session_id=data.session_id):
return {"errors": ["Session not found"]} return {"errors": ["Session not found"]}
return {"data": assist_records.presign_record(project_id=projectId, data=data, context=context)} return {"data": assist_records.presign_record(project_id=projectId, data=data, context=context)}
@app.put('/{projectId}/assist/save/done', tags=["assist"]) @app.put('/{projectId}/assist/save/done', tags=["assist"])
def save_record_after_upload(projectId: int, data: schemas_ee.AssistRecordSavePayloadSchema = Body(...), def save_record_after_upload(projectId: int, data: schemas.AssistRecordSavePayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=data.session_id): if not sessions.session_exists(project_id=projectId, session_id=data.session_id):
return {"errors": ["Session not found"]} return {"errors": ["Session not found"]}
return {"data": {"URL": assist_records.save_record(project_id=projectId, data=data, context=context)}} return {"data": {"URL": assist_records.save_record(project_id=projectId, data=data, context=context)}}
@app.post('/{projectId}/assist/records', tags=["assist"]) @app.post('/{projectId}/assist/records', tags=["assist"])
def search_records(projectId: int, data: schemas_ee.AssistRecordSearchPayloadSchema = Body(...), def search_records(projectId: int, data: schemas.AssistRecordSearchPayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": assist_records.search_records(project_id=projectId, data=data, context=context)} return {"data": assist_records.search_records(project_id=projectId, data=data, context=context)}
@app.get('/{projectId}/assist/records/{recordId}', tags=["assist"]) @app.get('/{projectId}/assist/records/{recordId}', tags=["assist"])
def get_record(projectId: int, recordId: int, context: schemas_ee.CurrentContext = Depends(OR_context)): def get_record(projectId: int, recordId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": assist_records.get_record(project_id=projectId, record_id=recordId, context=context)} return {"data": assist_records.get_record(project_id=projectId, record_id=recordId, context=context)}
@app.post('/{projectId}/assist/records/{recordId}', tags=["assist"]) @app.post('/{projectId}/assist/records/{recordId}', tags=["assist"])
def update_record(projectId: int, recordId: int, data: schemas_ee.AssistRecordUpdatePayloadSchema = Body(...), def update_record(projectId: int, recordId: int, data: schemas.AssistRecordUpdatePayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
result = assist_records.update_record(project_id=projectId, record_id=recordId, data=data, context=context) result = assist_records.update_record(project_id=projectId, record_id=recordId, data=data, context=context)
if "errors" in result: if "errors" in result:
return result return result
@ -113,7 +113,7 @@ def update_record(projectId: int, recordId: int, data: schemas_ee.AssistRecordUp
@app.delete('/{projectId}/assist/records/{recordId}', tags=["assist"]) @app.delete('/{projectId}/assist/records/{recordId}', tags=["assist"])
def delete_record(projectId: int, recordId: int, _=Body(None), def delete_record(projectId: int, recordId: int, _=Body(None),
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
result = assist_records.delete_record(project_id=projectId, record_id=recordId, context=context) result = assist_records.delete_record(project_id=projectId, record_id=recordId, context=context)
if "errors" in result: if "errors" in result:
return result return result
@ -121,8 +121,8 @@ def delete_record(projectId: int, recordId: int, _=Body(None),
@app.post('/{projectId}/signals', tags=['signals']) @app.post('/{projectId}/signals', tags=['signals'])
def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...), def send_interactions(projectId: int, data: schemas.SignalsSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = signals.handle_frontend_signals_queued(project_id=projectId, user_id=context.user_id, data=data) data = signals.handle_frontend_signals_queued(project_id=projectId, user_id=context.user_id, data=data)
if "errors" in data: if "errors" in data:
@ -132,6 +132,6 @@ def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...)
@app.post('/{projectId}/dashboard/insights', tags=["insights"]) @app.post('/{projectId}/dashboard/insights', tags=["insights"])
@app.post('/{projectId}/dashboard/insights', tags=["insights"]) @app.post('/{projectId}/dashboard/insights', tags=["insights"])
def sessions_search(projectId: int, data: schemas_ee.GetInsightsSchema = Body(...), def sessions_search(projectId: int, data: schemas.GetInsightsSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {'data': sessions_insights.fetch_selected(data=data, project_id=projectId)} return {'data': sessions_insights.fetch_selected(data=data, project_id=projectId)}

View file

@ -4,9 +4,8 @@ import schemas
from chalicelib.core import product_analytics from chalicelib.core import product_analytics
from or_dependencies import OR_scope from or_dependencies import OR_scope
from routers.base import get_routers from routers.base import get_routers
from schemas_ee import Permissions
public_app, app, app_apikey = get_routers([OR_scope(Permissions.metrics)]) public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.metrics)])
@app.post('/{projectId}/insights/journey', tags=["insights"]) @app.post('/{projectId}/insights/journey', tags=["insights"])
@ -14,7 +13,6 @@ public_app, app, app_apikey = get_routers([OR_scope(Permissions.metrics)])
async def get_insights_journey(projectId: int, data: schemas.PathAnalysisSchema = Body(...)): async def get_insights_journey(projectId: int, data: schemas.PathAnalysisSchema = Body(...)):
return {"data": product_analytics.path_analysis(project_id=projectId, **data.dict())} return {"data": product_analytics.path_analysis(project_id=projectId, **data.dict())}
# @app.post('/{projectId}/insights/users_acquisition', tags=["insights"]) # @app.post('/{projectId}/insights/users_acquisition', tags=["insights"])
# @app.get('/{projectId}/insights/users_acquisition', tags=["insights"]) # @app.get('/{projectId}/insights/users_acquisition', tags=["insights"])
# async def get_users_acquisition(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): # async def get_users_acquisition(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
@ -81,7 +79,6 @@ async def get_insights_journey(projectId: int, data: schemas.PathAnalysisSchema
# return {"data": product_analytics.users_slipping(project_id=projectId, **data.dict())} # return {"data": product_analytics.users_slipping(project_id=projectId, **data.dict())}
# #
# #
# @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) # @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET'])

View file

@ -3,17 +3,15 @@ from typing import Union
from fastapi import Body, Depends, Request from fastapi import Body, Depends, Request
import schemas import schemas
import schemas_ee
from chalicelib.core import dashboards, custom_metrics, funnels from chalicelib.core import dashboards, custom_metrics, funnels
from or_dependencies import OR_context, OR_scope from or_dependencies import OR_context, OR_scope
from routers.base import get_routers from routers.base import get_routers
from schemas_ee import Permissions
public_app, app, app_apikey = get_routers([OR_scope(Permissions.metrics)]) public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.metrics)])
@app.post('/{projectId}/dashboards', tags=["dashboard"]) @app.post('/{projectId}/dashboards', tags=["dashboard"])
@app.put('/{projectId}/dashboards', tags=["dashboard"]) # @app.put('/{projectId}/dashboards', tags=["dashboard"])
def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...), def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data) return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data)
@ -32,7 +30,7 @@ def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont
return {"data": data} return {"data": data}
@app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) # @app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) @app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...), def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -52,8 +50,8 @@ def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont
@app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"]) @app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"])
@app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) # @app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) # @app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"])
def add_card_to_dashboard(projectId: int, dashboardId: int, def add_card_to_dashboard(projectId: int, dashboardId: int,
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...), data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -62,16 +60,16 @@ def add_card_to_dashboard(projectId: int, dashboardId: int,
@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) @app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) # @app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
data: schemas_ee.CardSchema = Body(...), data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id, return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id,
dashboard_id=dashboardId, data=data)} dashboard_id=dashboardId, data=data)}
@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) @app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) # @app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int, def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int,
data: schemas.UpdateWidgetPayloadSchema = Body(...), data: schemas.UpdateWidgetPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -98,18 +96,18 @@ def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int
@app.post('/{projectId}/cards/try', tags=["cards"]) @app.post('/{projectId}/cards/try', tags=["cards"])
@app.post('/{projectId}/metrics/try', tags=["dashboard"]) # @app.post('/{projectId}/metrics/try', tags=["dashboard"])
@app.put('/{projectId}/metrics/try', tags=["dashboard"]) # @app.put('/{projectId}/metrics/try', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) # @app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
def try_card(projectId: int, data: schemas_ee.CardSchema = Body(...), def try_card(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)} return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)}
@app.post('/{projectId}/cards/try/sessions', tags=["cards"]) @app.post('/{projectId}/cards/try/sessions', tags=["cards"])
@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) # @app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"])
def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...), def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data) data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data)
@ -117,48 +115,50 @@ def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(..
@app.post('/{projectId}/cards/try/issues', tags=["cards"]) @app.post('/{projectId}/cards/try/issues', tags=["cards"])
@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"]) # @app.post('/{projectId}/metrics/try/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"])
def try_card_funnel_issues(projectId: int, data: schemas.CardSessionsSchema = Body(...), def try_card_funnel_issues(projectId: int, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if len(data.series) == 0: if len(data.series) == 0:
return {"data": []} return {"data": []}
data.series[0].filter.startDate = data.startTimestamp data.series[0].filter.startTimestamp = data.startTimestamp
data.series[0].filter.endDate = data.endTimestamp data.series[0].filter.endTimestamp = data.endTimestamp
data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter) data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter)
return {"data": data} return {"data": data}
@app.get('/{projectId}/cards', tags=["cards"]) @app.get('/{projectId}/cards', tags=["cards"])
@app.get('/{projectId}/metrics', tags=["dashboard"]) # @app.get('/{projectId}/metrics', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics', tags=["customMetrics"]) # @app.get('/{projectId}/custom_metrics', tags=["customMetrics"])
def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)} return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
@app.post('/{projectId}/cards', tags=["cards"]) @app.post('/{projectId}/cards', tags=["cards"])
@app.post('/{projectId}/metrics', tags=["dashboard"]) # @app.post('/{projectId}/metrics', tags=["dashboard"])
@app.put('/{projectId}/metrics', tags=["dashboard"]) # @app.put('/{projectId}/metrics', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics', tags=["customMetrics"]) # @app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
def create_card(projectId: int, data: schemas_ee.CardSchema = Body(...), def create_card(projectId: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data) return custom_metrics.create_card(project_id=projectId, user_id=context.user_id, data=data)
@app.post('/{projectId}/cards/search', tags=["cards"]) @app.post('/{projectId}/cards/search', tags=["cards"])
@app.post('/{projectId}/metrics/search', tags=["dashboard"]) # @app.post('/{projectId}/metrics/search', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"])
def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...), def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)} return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)}
@app.get('/{projectId}/cards/{metric_id}', tags=["cards"]) @app.get('/{projectId}/cards/{metric_id}', tags=["cards"])
@app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) # @app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) # @app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
if not isinstance(metric_id, int): if metric_id.isnumeric():
metric_id = int(metric_id)
else:
return {"errors": ["invalid card_id"]} return {"errors": ["invalid card_id"]}
data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id) data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
if data is None: if data is None:
@ -175,8 +175,8 @@ def get_card(projectId: int, metric_id: Union[int, str], context: schemas.Curren
@app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"]) @app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"])
@app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"])
def get_card_sessions(projectId: int, metric_id: int, def get_card_sessions(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -187,13 +187,15 @@ def get_card_sessions(projectId: int, metric_id: int,
@app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"]) @app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"])
@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"])
def get_card_funnel_issues(projectId: int, metric_id: Union[int, str], def get_card_funnel_issues(projectId: int, metric_id: Union[int, str],
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
if not isinstance(metric_id, int): if metric_id.isnumeric():
return {"errors": [f"invalid card_id: {metric_id}"]} metric_id = int(metric_id)
else:
return {"errors": ["invalid card_id"]}
data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data) data=data)
@ -203,8 +205,8 @@ def get_card_funnel_issues(projectId: int, metric_id: Union[int, str],
@app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"])
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str, def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
@ -216,22 +218,22 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st
@app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"])
def get_custom_metric_errors_list(projectId: int, metric_id: int, def get_custom_metric_errors_list(projectId: int, metric_id: int,
data: schemas.CardSessionsSchema = Body(...), data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id,
data=data) metric_id=metric_id, data=data)
if data is None: if data is None:
return {"errors": ["custom metric not found"]} return {"errors": ["custom metric not found"]}
return {"data": data} return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"]) @app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"])
@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardChartSchema = Body(...), def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardSessionsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
data=data) data=data)
@ -239,25 +241,25 @@ def get_card_chart(projectId: int, metric_id: int, request: Request, data: schem
@app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) # @app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) # @app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def update_custom_metric(projectId: int, metric_id: int, data: schemas_ee.UpdateCardSchema = Body(...), def update_custom_metric(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
if data is None: if data is None:
return {"errors": ["custom metric not found"]} return {"errors": ["custom metric not found"]}
return {"data": data} return {"data": data}
@app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"]) @app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"])
@app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) # @app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
@app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) # @app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"])
@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) # @app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
@app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) # @app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
def update_custom_metric_state(projectId: int, metric_id: int, def update_custom_metric_state(projectId: int, metric_id: int,
data: schemas.UpdateCustomMetricsStatusSchema = Body(...), data: schemas.UpdateCardStatusSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return { return {
"data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id, "data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
@ -265,8 +267,8 @@ def update_custom_metric_state(projectId: int, metric_id: int,
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"]) @app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) # @app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) # @app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def delete_custom_metric(projectId: int, metric_id: int, _=Body(None), def delete_custom_metric(projectId: int, metric_id: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)): context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}

View file

@ -0,0 +1,3 @@
from .schemas import *
from .schemas_ee import *
from . import overrides as _overrides

View file

@ -1,11 +1,12 @@
from enum import Enum
from typing import Optional, List, Union, Literal from typing import Optional, List, Union, Literal
from pydantic import BaseModel, Field, EmailStr from pydantic import Field, EmailStr, field_validator, model_validator
from pydantic import root_validator, validator
import schemas from . import schemas
from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.TimeUTC import TimeUTC
from .overrides import BaseModel, Enum
from .overrides import transform_email, remove_whitespace, remove_duplicate_values, \
single_to_list, ORUnion
class Permissions(str, Enum): class Permissions(str, Enum):
@ -18,8 +19,16 @@ class Permissions(str, Enum):
feature_flags = "FEATURE_FLAGS" feature_flags = "FEATURE_FLAGS"
class ServicePermissions(str, Enum):
session_replay = "SERVICE_SESSION_REPLAY"
dev_tools = "SERVICE_DEV_TOOLS"
assist_live = "SERVICE_ASSIST_LIVE"
assist_call = "SERVICE_ASSIST_CALL"
class CurrentContext(schemas.CurrentContext): class CurrentContext(schemas.CurrentContext):
permissions: List[Optional[Permissions]] = Field(...) permissions: List[Union[Permissions, ServicePermissions]] = Field(...)
service_account: bool = Field(default=False)
class RolePayloadSchema(BaseModel): class RolePayloadSchema(BaseModel):
@ -28,10 +37,7 @@ class RolePayloadSchema(BaseModel):
permissions: List[Permissions] = Field(...) permissions: List[Permissions] = Field(...)
all_projects: bool = Field(default=True) all_projects: bool = Field(default=True)
projects: List[int] = Field(default=[]) projects: List[int] = Field(default=[])
_transform_name = validator('name', pre=True, allow_reuse=True)(schemas.remove_whitespace) _transform_name = field_validator('name', mode="before")(remove_whitespace)
class Config:
alias_generator = schemas.attribute_to_camel_case
class SignalsSchema(BaseModel): class SignalsSchema(BaseModel):
@ -55,9 +61,6 @@ class GetInsightsSchema(schemas._TimedSchema):
metricValue: List[InsightCategories] = Field(default=[]) metricValue: List[InsightCategories] = Field(default=[])
series: List[schemas.CardSeriesSchema] = Field(default=[]) series: List[schemas.CardSeriesSchema] = Field(default=[])
class Config:
alias_generator = schemas.attribute_to_camel_case
class CreateMemberSchema(schemas.CreateMemberSchema): class CreateMemberSchema(schemas.CreateMemberSchema):
roleId: Optional[int] = Field(None) roleId: Optional[int] = Field(None)
@ -79,7 +82,7 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
action: Optional[str] = Field(default=None) action: Optional[str] = Field(default=None)
order: schemas.SortOrderType = Field(default=schemas.SortOrderType.desc) order: schemas.SortOrderType = Field(default=schemas.SortOrderType.desc)
@root_validator(pre=True) @model_validator(mode="before")
def transform_order(cls, values): def transform_order(cls, values):
if values.get("order") is None: if values.get("order") is None:
values["order"] = schemas.SortOrderType.desc values["order"] = schemas.SortOrderType.desc
@ -87,9 +90,6 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
values["order"] = values["order"].upper() values["order"] = values["order"].upper()
return values return values
class Config:
alias_generator = schemas.attribute_to_camel_case
class SessionModel(BaseModel): class SessionModel(BaseModel):
viewed: bool = Field(default=False) viewed: bool = Field(default=False)
@ -119,16 +119,13 @@ class SessionModel(BaseModel):
class AssistRecordUpdatePayloadSchema(BaseModel): class AssistRecordUpdatePayloadSchema(BaseModel):
name: str = Field(..., min_length=1) name: str = Field(..., min_length=1)
_transform_name = validator('name', pre=True, allow_reuse=True)(schemas.remove_whitespace) _transform_name = field_validator('name', mode="before")(remove_whitespace)
class AssistRecordPayloadSchema(AssistRecordUpdatePayloadSchema): class AssistRecordPayloadSchema(AssistRecordUpdatePayloadSchema):
duration: int = Field(...) duration: int = Field(...)
session_id: int = Field(...) session_id: int = Field(...)
class Config:
alias_generator = schemas.attribute_to_camel_case
class AssistRecordSavePayloadSchema(AssistRecordPayloadSchema): class AssistRecordSavePayloadSchema(AssistRecordPayloadSchema):
key: str = Field(...) key: str = Field(...)
@ -139,41 +136,14 @@ class AssistRecordSearchPayloadSchema(schemas._PaginatedSchema, schemas._TimedSc
query: Optional[str] = Field(default=None) query: Optional[str] = Field(default=None)
order: Literal["asc", "desc"] = Field(default="desc") order: Literal["asc", "desc"] = Field(default="desc")
class Config:
alias_generator = schemas.attribute_to_camel_case
# TODO: move these to schema when Insights is supported on PG # TODO: move these to schema when Insights is supported on PG
class MetricOfInsights(str, Enum): class CardInsights(schemas.CardInsights):
issue_categories = "issueCategories" metric_value: List[InsightCategories] = Field(default=[])
@model_validator(mode='after')
class CardSchema(schemas.CardSchema):
metric_of: Union[schemas.MetricOfTimeseries, schemas.MetricOfTable, \
schemas.MetricOfErrors, schemas.MetricOfPerformance, \
schemas.MetricOfResources, schemas.MetricOfWebVitals, \
schemas.MetricOfClickMap, MetricOfInsights] = Field(default=schemas.MetricOfTable.user_id)
metric_value: List[Union[schemas.IssueType, InsightCategories]] = Field(default=[])
@root_validator
def restrictions(cls, values): def restrictions(cls, values):
return values return values
@root_validator
def validator(cls, values):
values = super().validator(values)
if values.get("metric_type") == schemas.MetricType.insights:
assert values.get("view_type") == schemas.MetricOtherViewType.list_chart, \
f"viewType must be 'list' for metricOf:{values.get('metric_of')}"
assert isinstance(values.get("metric_of"), MetricOfInsights), \
f"metricOf must be of type {MetricOfInsights} for metricType:{schemas.MetricType.insights}"
if values.get("metric_value") is not None and len(values.get("metric_value")) > 0:
for i in values.get("metric_value"):
assert isinstance(i, InsightCategories), \
f"metricValue should be of type [InsightCategories] for metricType:{schemas.MetricType.insights}"
return values CardSchema = ORUnion(Union[schemas.__cards_union_base, CardInsights], discriminator='metric_type')
class UpdateCardSchema(CardSchema):
series: List[schemas.CardUpdateSeriesSchema] = Field(...)

View file

@ -18,13 +18,13 @@
}, },
"homepage": "https://github.com/openreplay/openreplay#readme", "homepage": "https://github.com/openreplay/openreplay#readme",
"dependencies": { "dependencies": {
"@maxmind/geoip2-node": "^3.5.0", "@maxmind/geoip2-node": "^4.2.0",
"@socket.io/redis-adapter": "^8.1.0", "@socket.io/redis-adapter": "^8.2.1",
"express": "^4.18.2", "express": "^4.18.2",
"jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.1",
"redis": "^4.6.6", "redis": "^4.6.7",
"socket.io": "^4.6.1", "socket.io": "^4.7.2",
"ua-parser-js": "^1.0.35", "ua-parser-js": "^1.0.35",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.23.0" "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.31.0"
} }
} }

View file

@ -0,0 +1,7 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.15.0-ee';
ALTER TABLE experimental.events
ADD COLUMN IF NOT EXISTS transfer_size Nullable(UInt32);
ALTER TABLE experimental.sessions
ADD COLUMN IF NOT EXISTS timezone LowCardinality(Nullable(String));

View file

@ -79,6 +79,7 @@ CREATE TABLE IF NOT EXISTS experimental.events
issue_id Nullable(String), issue_id Nullable(String),
error_tags_keys Array(String), error_tags_keys Array(String),
error_tags_values Array(Nullable(String)), error_tags_values Array(Nullable(String)),
transfer_size Nullable(UInt32),
message_id UInt64 DEFAULT 0, message_id UInt64 DEFAULT 0,
_timestamp DateTime DEFAULT now() _timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp) ) ENGINE = ReplacingMergeTree(_timestamp)
@ -131,6 +132,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions
user_state LowCardinality(String), user_state LowCardinality(String),
platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web', platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web',
datetime DateTime, datetime DateTime,
timezone LowCardinality(Nullable(String)),
duration UInt32, duration UInt32,
pages_count UInt16, pages_count UInt16,
events_count UInt16, events_count UInt16,
@ -273,6 +275,7 @@ SELECT session_id,
issue_id, issue_id,
error_tags_keys, error_tags_keys,
error_tags_values, error_tags_values,
transfer_size,
message_id, message_id,
_timestamp _timestamp
FROM experimental.events FROM experimental.events

View file

@ -71,6 +71,14 @@ UPDATE public.roles
SET permissions = (SELECT array_agg(distinct e) FROM unnest(permissions || '{FEATURE_FLAGS}') AS e) SET permissions = (SELECT array_agg(distinct e) FROM unnest(permissions || '{FEATURE_FLAGS}') AS e)
where not permissions @> '{FEATURE_FLAGS}'; where not permissions @> '{FEATURE_FLAGS}';
ALTER TYPE public.user_role ADD VALUE IF NOT EXISTS 'service';
ALTER TABLE IF EXISTS public.users
ADD COLUMN IF NOT EXISTS service_account bool NOT NULL DEFAULT FALSE;
ALTER TABLE IF EXISTS public.roles
ADD COLUMN IF NOT EXISTS service_role bool NOT NULL DEFAULT FALSE;
COMMIT; COMMIT;
\elif :is_next \elif :is_next

View file

@ -0,0 +1,36 @@
\set previous_version 'v1.14.0-ee'
\set next_version 'v1.15.0-ee'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
--
ALTER TABLE IF EXISTS events_common.requests
ADD COLUMN transfer_size bigint NULL;
ALTER TABLE IF EXISTS public.sessions
ADD COLUMN IF NOT EXISTS timezone text NULL;
ALTER TABLE IF EXISTS public.projects
ADD COLUMN IF NOT EXISTS platform public.platform NOT NULL DEFAULT 'web';
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -8,7 +8,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version() CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS RETURNS text AS
$$ $$
SELECT 'v1.14.0-ee' SELECT 'v1.15.0-ee'
$$ LANGUAGE sql IMMUTABLE; $$ LANGUAGE sql IMMUTABLE;
@ -107,6 +107,7 @@ $$
('dashboards'), ('dashboards'),
('dashboard_widgets'), ('dashboard_widgets'),
('errors'), ('errors'),
('errors_tags'),
('integrations'), ('integrations'),
('issues'), ('issues'),
('jira_cloud'), ('jira_cloud'),
@ -172,32 +173,34 @@ $$
protected bool NOT NULL DEFAULT FALSE, protected bool NOT NULL DEFAULT FALSE,
all_projects bool NOT NULL DEFAULT TRUE, all_projects bool NOT NULL DEFAULT TRUE,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
deleted_at timestamp NULL DEFAULT NULL deleted_at timestamp NULL DEFAULT NULL,
service_role bool NOT NULL DEFAULT FALSE
); );
IF NOT EXISTS(SELECT * IF NOT EXISTS(SELECT *
FROM pg_type typ FROM pg_type typ
WHERE typ.typname = 'user_role') THEN WHERE typ.typname = 'user_role') THEN
CREATE TYPE user_role AS ENUM ('owner','admin','member'); CREATE TYPE user_role AS ENUM ('owner','admin','member','service');
END IF; END IF;
CREATE TABLE IF NOT EXISTS users CREATE TABLE IF NOT EXISTS users
( (
user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
email text NOT NULL UNIQUE, email text NOT NULL UNIQUE,
role user_role NOT NULL DEFAULT 'member', role user_role NOT NULL DEFAULT 'member',
name text NOT NULL, name text NOT NULL,
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL, deleted_at timestamp without time zone NULL DEFAULT NULL,
api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL, api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL,
jwt_iat timestamp without time zone NULL DEFAULT NULL, jwt_iat timestamp without time zone NULL DEFAULT NULL,
data jsonb NOT NULL DEFAULT'{}'::jsonb, data jsonb NOT NULL DEFAULT'{}'::jsonb,
weekly_report boolean NOT NULL DEFAULT TRUE, weekly_report boolean NOT NULL DEFAULT TRUE,
origin text NULL DEFAULT NULL, origin text NULL DEFAULT NULL,
role_id integer REFERENCES roles (role_id) ON DELETE SET NULL, role_id integer REFERENCES roles (role_id) ON DELETE SET NULL,
internal_id text NULL DEFAULT NULL internal_id text NULL DEFAULT NULL,
service_account bool NOT NULL DEFAULT FALSE
); );
CREATE INDEX IF NOT EXISTS users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL; CREATE INDEX IF NOT EXISTS users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL;
CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops);
@ -230,12 +233,19 @@ $$
); );
CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider); CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider);
IF NOT EXISTS(SELECT *
FROM pg_type typ
WHERE typ.typname = 'platform') THEN
CREATE TYPE platform AS ENUM ('web','ios','android');
END IF;
CREATE TABLE IF NOT EXISTS projects CREATE TABLE IF NOT EXISTS projects
( (
project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20),
tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
name text NOT NULL, name text NOT NULL,
platform platform NOT NULL DEFAULT 'web',
active boolean NOT NULL, active boolean NOT NULL,
sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100),
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
@ -487,11 +497,20 @@ $$
CREATE INDEX IF NOT EXISTS user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); CREATE INDEX IF NOT EXISTS user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id);
CREATE INDEX IF NOT EXISTS user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); CREATE INDEX IF NOT EXISTS user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
IF NOT EXISTS(SELECT * CREATE TABLE IF NOT EXISTS errors_tags
FROM pg_type typ (
WHERE typ.typname = 'platform') THEN key text NOT NULL,
CREATE TYPE platform AS ENUM ('web','ios','android'); value text NOT NULL,
END IF; created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
session_id bigint NOT NULL,
message_id bigint NOT NULL,
FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id);
CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id);
CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id);
IF NOT EXISTS(SELECT * IF NOT EXISTS(SELECT *
FROM pg_type typ FROM pg_type typ
@ -512,6 +531,7 @@ $$
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
tracker_version text NOT NULL, tracker_version text NOT NULL,
start_ts bigint NOT NULL, start_ts bigint NOT NULL,
timezone text NULL,
duration integer NULL, duration integer NULL,
rev_id text DEFAULT NULL, rev_id text DEFAULT NULL,
platform platform NOT NULL DEFAULT 'web', platform platform NOT NULL DEFAULT 'web',
@ -1086,21 +1106,6 @@ $$
CREATE INDEX IF NOT EXISTS errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id); CREATE INDEX IF NOT EXISTS errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id);
CREATE INDEX IF NOT EXISTS errors_error_id_idx ON events.errors (error_id); CREATE INDEX IF NOT EXISTS errors_error_id_idx ON events.errors (error_id);
CREATE TABLE IF NOT EXISTS errors_tags
(
key text NOT NULL,
value text NOT NULL,
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
session_id bigint NOT NULL,
message_id bigint NOT NULL,
FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id);
CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id);
CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id);
IF NOT EXISTS(SELECT * IF NOT EXISTS(SELECT *
FROM pg_type typ FROM pg_type typ
WHERE typ.typname = 'http_method') THEN WHERE typ.typname = 'http_method') THEN
@ -1280,6 +1285,7 @@ $$
host text NULL, host text NULL,
path text NULL, path text NULL,
query text NULL, query text NULL,
transfer_size bigint NULL,
PRIMARY KEY (session_id, timestamp, seq_index) PRIMARY KEY (session_id, timestamp, seq_index)
); );

View file

@ -0,0 +1,36 @@
\set previous_version 'v1.14.0'
\set next_version 'v1.15.0'
SELECT openreplay_version() AS current_version,
openreplay_version() = :'previous_version' AS valid_previous,
openreplay_version() = :'next_version' AS is_next
\gset
\if :valid_previous
\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version'
BEGIN;
SELECT format($fn_def$
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT '%1$s'
$$ LANGUAGE sql IMMUTABLE;
$fn_def$, :'next_version')
\gexec
--
ALTER TABLE IF EXISTS events_common.requests
ADD COLUMN transfer_size bigint NULL;
ALTER TABLE IF EXISTS public.sessions
ADD COLUMN IF NOT EXISTS timezone text NULL;
ALTER TABLE IF EXISTS public.projects
ADD COLUMN IF NOT EXISTS platform public.platform NOT NULL DEFAULT 'web';
COMMIT;
\elif :is_next
\echo new version detected :'next_version', nothing to do
\else
\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version'
\endif

View file

@ -8,7 +8,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version() CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS RETURNS text AS
$$ $$
SELECT 'v1.14.0' SELECT 'v1.15.0'
$$ LANGUAGE sql IMMUTABLE; $$ LANGUAGE sql IMMUTABLE;
@ -163,6 +163,7 @@ $$
); );
CREATE UNIQUE INDEX oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider); CREATE UNIQUE INDEX oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider);
CREATE TYPE platform AS ENUM ('web','ios','android');
CREATE TABLE projects CREATE TABLE projects
( (
@ -171,6 +172,7 @@ $$
name text NOT NULL, name text NOT NULL,
active boolean NOT NULL, active boolean NOT NULL,
sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100),
platform platform NOT NULL DEFAULT 'web',
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
deleted_at timestamp without time zone NULL DEFAULT NULL, deleted_at timestamp without time zone NULL DEFAULT NULL,
max_session_duration integer NOT NULL DEFAULT 7200000, max_session_duration integer NOT NULL DEFAULT 7200000,
@ -375,10 +377,23 @@ $$
CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id);
CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
CREATE TABLE errors_tags
(
key text NOT NULL,
value text NOT NULL,
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
session_id bigint NOT NULL,
message_id bigint NOT NULL,
FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE
);
CREATE INDEX errors_tags_error_id_idx ON errors_tags (error_id);
CREATE INDEX errors_tags_session_id_idx ON errors_tags (session_id);
CREATE INDEX errors_tags_message_id_idx ON errors_tags (message_id);
CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other');
CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS','AC','AN','BU','CP','CS','CT','DD','DG','DY','EA','FQ','FX','HV','IC','JT','MI','NH','NQ','NT','PC','PU','PZ','RH','SU','TA','TP','VD','WK','YD','YU','ZR'); CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS','AC','AN','BU','CP','CS','CT','DD','DG','DY','EA','FQ','FX','HV','IC','JT','MI','NH','NQ','NT','PC','PU','PZ','RH','SU','TA','TP','VD','WK','YD','YU','ZR');
CREATE TYPE platform AS ENUM ('web','ios','android');
CREATE TABLE sessions CREATE TABLE sessions
( (
@ -386,6 +401,7 @@ $$
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
tracker_version text NOT NULL, tracker_version text NOT NULL,
start_ts bigint NOT NULL, start_ts bigint NOT NULL,
timezone text NULL,
duration integer NULL, duration integer NULL,
rev_id text DEFAULT NULL, rev_id text DEFAULT NULL,
platform platform NOT NULL DEFAULT 'web', platform platform NOT NULL DEFAULT 'web',
@ -560,6 +576,7 @@ $$
host text NULL, host text NULL,
path text NULL, path text NULL,
query text NULL, query text NULL,
transfer_size bigint NULL,
PRIMARY KEY (session_id, timestamp, seq_index) PRIMARY KEY (session_id, timestamp, seq_index)
); );
@ -687,20 +704,6 @@ $$
CREATE INDEX errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id); CREATE INDEX errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id);
CREATE INDEX errors_error_id_idx ON events.errors (error_id); CREATE INDEX errors_error_id_idx ON events.errors (error_id);
CREATE TABLE errors_tags
(
key text NOT NULL,
value text NOT NULL,
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
session_id bigint NOT NULL,
message_id bigint NOT NULL,
FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE
);
CREATE INDEX errors_tags_error_id_idx ON errors_tags (error_id);
CREATE INDEX errors_tags_session_id_idx ON errors_tags (session_id);
CREATE INDEX errors_tags_message_id_idx ON errors_tags (message_id);
CREATE TABLE events.graphql CREATE TABLE events.graphql
( (

View file

@ -18,8 +18,8 @@
}, },
"homepage": "https://github.com/openreplay/openreplay#readme", "homepage": "https://github.com/openreplay/openreplay#readme",
"dependencies": { "dependencies": {
"@azure/storage-blob": "^12.14.0", "@azure/storage-blob": "^12.15.0",
"aws-sdk": "^2.1385.0", "aws-sdk": "^2.1440.0",
"express": "^4.18.2", "express": "^4.18.2",
"source-map": "^0.7.4" "source-map": "^0.7.4"
} }