diff --git a/api/auth/auth_jwt.py b/api/auth/auth_jwt.py index fd4d145b1..2e30e6975 100644 --- a/api/auth/auth_jwt.py +++ b/api/auth/auth_jwt.py @@ -45,8 +45,6 @@ class JWTAuth(HTTPBearer): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials) - logger.info("------ jwt_payload ------") - logger.info(jwt_payload) auth_exists = jwt_payload is not None and users.auth_exists(user_id=jwt_payload.get("userId", -1), jwt_iat=jwt_payload.get("iat", 100)) if jwt_payload is None \ @@ -120,8 +118,7 @@ class JWTAuth(HTTPBearer): jwt_payload = None else: jwt_payload = authorizers.jwt_refresh_authorizer(scheme="Bearer", token=request.cookies["spotRefreshToken"]) - logger.info("__process_spot_refresh_call") - logger.info(jwt_payload) + if jwt_payload is None or jwt_payload.get("jti") is None: logger.warning("Null spotRefreshToken's payload, or null JTI.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index c3ac00b69..01f04803a 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -12,20 +12,6 @@ from chalicelib.utils.TimeUTC import TimeUTC logger = logging.getLogger(__name__) -# TODO: refactor this to split -# timeseries / -# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs -# remove "table of" calls from this function -def __try_live(project_id, data: schemas.CardSchema): - results = [] - for i, s in enumerate(data.series): - results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, - view_type=data.view_type, metric_type=data.metric_type, - metric_of=data.metric_of, metric_value=data.metric_value)) - - return results - - def __get_table_of_series(project_id, data: schemas.CardSchema): results = [] for i, s in enumerate(data.series): @@ -43,9 +29,6 @@ def __get_funnel_chart(project: schemas.ProjectContext, data: schemas.CardFunnel "totalDropDueToIssues": 0 } - # return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, - # data=data.series[0].filter, - # metric_format=data.metric_format) return funnels.get_simple_funnel(project=project, data=data.series[0].filter, metric_format=data.metric_format) @@ -93,7 +76,12 @@ def __get_path_analysis_chart(project: schemas.ProjectContext, user_id: int, dat def __get_timeseries_chart(project: schemas.ProjectContext, data: schemas.CardTimeSeries, user_id: int = None): - series_charts = __try_live(project_id=project.project_id, data=data) + series_charts = [] + for i, s in enumerate(data.series): + series_charts.append(sessions.search2_series(data=s.filter, project_id=project.project_id, density=data.density, + view_type=data.view_type, metric_type=data.metric_type, + metric_of=data.metric_of, metric_value=data.metric_value)) + results = [{}] * len(series_charts[0]) for i in range(len(results)): for j, series_chart in enumerate(series_charts): @@ -179,12 +167,6 @@ def get_chart(project: schemas.ProjectContext, data: schemas.CardSchema, user_id def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): - # No need for this because UI is sending the full payload - # card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) - # if card is None: - # return None - # metric: schemas.CardSchema = schemas.CardSchema(**card) - # metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id): return None results = [] @@ -553,17 +535,7 @@ def change_state(project_id, metric_id, user_id, status): def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, - data: schemas.CardSessionsSchema - # , range_value=None, start_date=None, end_date=None - ): - # No need for this because UI is sending the full payload - # card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) - # if card is None: - # return None - # metric: schemas.CardSchema = schemas.CardSchema(**card) - # metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) - # if metric is None: - # return None + data: schemas.CardSessionsSchema): if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id): return None for s in data.series: diff --git a/api/requirements-alerts.txt b/api/requirements-alerts.txt index 4f356e46c..ee8bbc950 100644 --- a/api/requirements-alerts.txt +++ b/api/requirements-alerts.txt @@ -1,5 +1,4 @@ -# Keep this version to not have conflicts between requests and boto3 -urllib3==1.26.16 +urllib3==2.2.3 requests==2.32.3 boto3==1.35.76 pyjwt==2.10.1 diff --git a/api/requirements.txt b/api/requirements.txt index 7cbe41468..d643061f1 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,4 +1,3 @@ -# Keep this version to not have conflicts between requests and boto3 urllib3==2.2.3 requests==2.32.3 boto3==1.35.76 diff --git a/ee/api/Pipfile b/ee/api/Pipfile index ee2617767..a32b99a8e 100644 --- a/ee/api/Pipfile +++ b/ee/api/Pipfile @@ -4,12 +4,12 @@ verify_ssl = true name = "pypi" [packages] -urllib3 = "==1.26.16" +urllib3 = "==2.2.3" requests = "==2.32.3" boto3 = "==1.35.76" pyjwt = "==2.10.1" psycopg2-binary = "==2.9.10" -psycopg = {extras = ["binary", "pool"], version = "==3.2.3"} +psycopg = {extras = ["pool", "binary"], version = "==3.2.3"} clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"} clickhouse-connect = "==0.8.9" elasticsearch = "==8.16.0" @@ -21,10 +21,10 @@ gunicorn = "==23.0.0" python-decouple = "==3.8" pydantic = {extras = ["email"], version = "==2.10.3"} apscheduler = "==3.11.0" +redis = "==5.2.1" python3-saml = "==1.16.0" python-multipart = "==0.0.17" -redis = "==5.2.0" -azure-storage-blob = "==12.23.1" +azure-storage-blob = "==12.24.0" [dev-packages] diff --git a/ee/api/requirements-alerts.txt b/ee/api/requirements-alerts.txt index ebf1aea0d..adfc42bba 100644 --- a/ee/api/requirements-alerts.txt +++ b/ee/api/requirements-alerts.txt @@ -1,5 +1,4 @@ -# Keep this version to not have conflicts between requests and boto3 -urllib3==1.26.16 +urllib3==2.2.3 requests==2.32.3 boto3==1.35.76 pyjwt==2.10.1 @@ -19,4 +18,4 @@ python-decouple==3.8 pydantic[email]==2.10.3 apscheduler==3.11.0 -azure-storage-blob==12.23.1 \ No newline at end of file +azure-storage-blob==12.24.0 \ No newline at end of file diff --git a/ee/api/requirements-crons.txt b/ee/api/requirements-crons.txt index 6aa2370d8..68c115f1c 100644 --- a/ee/api/requirements-crons.txt +++ b/ee/api/requirements-crons.txt @@ -1,5 +1,4 @@ -# Keep this version to not have conflicts between requests and boto3 -urllib3==1.26.16 +urllib3==2.2.3 requests==2.32.3 boto3==1.35.76 pyjwt==2.10.1 @@ -19,4 +18,4 @@ pydantic[email]==2.10.3 apscheduler==3.11.0 redis==5.2.0 -azure-storage-blob==12.23.1 +azure-storage-blob==12.24.0 diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 0c0b2902a..3e9076049 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,5 +1,4 @@ -# Keep this version to not have conflicts between requests and boto3 -urllib3==1.26.16 +urllib3==2.2.3 requests==2.32.3 boto3==1.35.76 pyjwt==2.10.1 @@ -20,12 +19,13 @@ python-decouple==3.8 pydantic[email]==2.10.3 apscheduler==3.11.0 +redis==5.2.1 + # TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252 #--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml python3-saml==1.16.0 --no-binary=lxml python-multipart==0.0.17 -redis==5.2.0 #confluent-kafka==2.1.0 -azure-storage-blob==12.23.1 +azure-storage-blob==12.24.0