* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* fix(chalice): fixed Math-operators validation
refactor(chalice): search for sessions that have events for heatmaps

* refactor(chalice): search for sessions that have at least 1 location event for heatmaps

* feat(chalice): autocomplete return top 10 with stats

* fix(chalice): fixed autocomplete top 10 meta-filters

* refactor(chalice): refactored authorize

* refactor(chalice): upgraded dependencies
refactor(alerts): upgraded dependencies
refactor(crons): upgraded dependencies

* refactor(chalice): refactored custom_metrics
This commit is contained in:
Kraiem Taha Yassine 2024-12-09 16:05:54 +01:00 committed by GitHub
parent d0ef617e40
commit d35837416b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 21 additions and 56 deletions

View file

@ -45,8 +45,6 @@ class JWTAuth(HTTPBearer):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid authentication scheme.")
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
logger.info("------ jwt_payload ------")
logger.info(jwt_payload)
auth_exists = jwt_payload is not None and users.auth_exists(user_id=jwt_payload.get("userId", -1),
jwt_iat=jwt_payload.get("iat", 100))
if jwt_payload is None \
@ -120,8 +118,7 @@ class JWTAuth(HTTPBearer):
jwt_payload = None
else:
jwt_payload = authorizers.jwt_refresh_authorizer(scheme="Bearer", token=request.cookies["spotRefreshToken"])
logger.info("__process_spot_refresh_call")
logger.info(jwt_payload)
if jwt_payload is None or jwt_payload.get("jti") is None:
logger.warning("Null spotRefreshToken's payload, or null JTI.")
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,

View file

@ -12,20 +12,6 @@ from chalicelib.utils.TimeUTC import TimeUTC
logger = logging.getLogger(__name__)
# TODO: refactor this to split
# timeseries /
# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs
# remove "table of" calls from this function
def __try_live(project_id, data: schemas.CardSchema):
results = []
for i, s in enumerate(data.series):
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value))
return results
def __get_table_of_series(project_id, data: schemas.CardSchema):
results = []
for i, s in enumerate(data.series):
@ -43,9 +29,6 @@ def __get_funnel_chart(project: schemas.ProjectContext, data: schemas.CardFunnel
"totalDropDueToIssues": 0
}
# return funnels.get_top_insights_on_the_fly_widget(project_id=project_id,
# data=data.series[0].filter,
# metric_format=data.metric_format)
return funnels.get_simple_funnel(project=project,
data=data.series[0].filter,
metric_format=data.metric_format)
@ -93,7 +76,12 @@ def __get_path_analysis_chart(project: schemas.ProjectContext, user_id: int, dat
def __get_timeseries_chart(project: schemas.ProjectContext, data: schemas.CardTimeSeries, user_id: int = None):
series_charts = __try_live(project_id=project.project_id, data=data)
series_charts = []
for i, s in enumerate(data.series):
series_charts.append(sessions.search2_series(data=s.filter, project_id=project.project_id, density=data.density,
view_type=data.view_type, metric_type=data.metric_type,
metric_of=data.metric_of, metric_value=data.metric_value))
results = [{}] * len(series_charts[0])
for i in range(len(results)):
for j, series_chart in enumerate(series_charts):
@ -179,12 +167,6 @@ def get_chart(project: schemas.ProjectContext, data: schemas.CardSchema, user_id
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
# No need for this because UI is sending the full payload
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
# if card is None:
# return None
# metric: schemas.CardSchema = schemas.CardSchema(**card)
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
return None
results = []
@ -553,17 +535,7 @@ def change_state(project_id, metric_id, user_id, status):
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
data: schemas.CardSessionsSchema
# , range_value=None, start_date=None, end_date=None
):
# No need for this because UI is sending the full payload
# card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
# if card is None:
# return None
# metric: schemas.CardSchema = schemas.CardSchema(**card)
# metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
# if metric is None:
# return None
data: schemas.CardSessionsSchema):
if not card_exists(metric_id=metric_id, project_id=project_id, user_id=user_id):
return None
for s in data.series:

View file

@ -1,5 +1,4 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
urllib3==2.2.3
requests==2.32.3
boto3==1.35.76
pyjwt==2.10.1

View file

@ -1,4 +1,3 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==2.2.3
requests==2.32.3
boto3==1.35.76

View file

@ -4,12 +4,12 @@ verify_ssl = true
name = "pypi"
[packages]
urllib3 = "==1.26.16"
urllib3 = "==2.2.3"
requests = "==2.32.3"
boto3 = "==1.35.76"
pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["binary", "pool"], version = "==3.2.3"}
psycopg = {extras = ["pool", "binary"], version = "==3.2.3"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
clickhouse-connect = "==0.8.9"
elasticsearch = "==8.16.0"
@ -21,10 +21,10 @@ gunicorn = "==23.0.0"
python-decouple = "==3.8"
pydantic = {extras = ["email"], version = "==2.10.3"}
apscheduler = "==3.11.0"
redis = "==5.2.1"
python3-saml = "==1.16.0"
python-multipart = "==0.0.17"
redis = "==5.2.0"
azure-storage-blob = "==12.23.1"
azure-storage-blob = "==12.24.0"
[dev-packages]

View file

@ -1,5 +1,4 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
urllib3==2.2.3
requests==2.32.3
boto3==1.35.76
pyjwt==2.10.1
@ -19,4 +18,4 @@ python-decouple==3.8
pydantic[email]==2.10.3
apscheduler==3.11.0
azure-storage-blob==12.23.1
azure-storage-blob==12.24.0

View file

@ -1,5 +1,4 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
urllib3==2.2.3
requests==2.32.3
boto3==1.35.76
pyjwt==2.10.1
@ -19,4 +18,4 @@ pydantic[email]==2.10.3
apscheduler==3.11.0
redis==5.2.0
azure-storage-blob==12.23.1
azure-storage-blob==12.24.0

View file

@ -1,5 +1,4 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
urllib3==2.2.3
requests==2.32.3
boto3==1.35.76
pyjwt==2.10.1
@ -20,12 +19,13 @@ python-decouple==3.8
pydantic[email]==2.10.3
apscheduler==3.11.0
redis==5.2.1
# TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252
#--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml
python3-saml==1.16.0
--no-binary=lxml
python-multipart==0.0.17
redis==5.2.0
#confluent-kafka==2.1.0
azure-storage-blob==12.23.1
azure-storage-blob==12.24.0