* refactor(chalice): upgraded dependencies
refactor(crons): upgraded dependencies
refactor(alerts): upgraded dependencies

* fix(chalice): fixed boarding

* fix(chalice): fixed assign session

* refactor(assist-stats): upgraded dependencies

* fixed(assist-stats): fixed import issue

* fix(chalice): changed env vars

* fix(chalice): fixed search sessions for EE
This commit is contained in:
Kraiem Taha Yassine 2025-02-04 19:06:10 +01:00 committed by GitHub
parent 0e5fe14dc2
commit cb8d87e367
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
26 changed files with 80 additions and 71 deletions

View file

@ -6,7 +6,7 @@ name = "pypi"
[packages]
urllib3 = "==2.3.0"
requests = "==2.32.3"
boto3 = "==1.36.10"
boto3 = "==1.36.12"
pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
@ -14,7 +14,7 @@ clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
clickhouse-connect = "==0.8.15"
elasticsearch = "==8.17.1"
jira = "==3.8.0"
cachetools = "==5.5.0"
cachetools = "==5.5.1"
fastapi = "==0.115.8"
uvicorn = {extras = ["standard"], version = "==0.34.0"}
python-decouple = "==3.8"

View file

@ -1,6 +1,5 @@
from chalicelib.core import log_tools
import requests
from chalicelib.core.log_tools import log_tools
from schemas import schemas
IN_TY = "bugsnag"

View file

@ -1,5 +1,5 @@
import boto3
from chalicelib.core import log_tools
from chalicelib.core.log_tools import log_tools
from schemas import schemas
IN_TY = "cloudwatch"

View file

@ -1,4 +1,4 @@
from chalicelib.core import log_tools
from chalicelib.core.log_tools import log_tools
from schemas import schemas
IN_TY = "datadog"

View file

@ -1,8 +1,7 @@
import logging
from chalicelib.core.log_tools import log_tools
from elasticsearch import Elasticsearch
from chalicelib.core import log_tools
from schemas import schemas
logger = logging.getLogger(__name__)

View file

@ -1,6 +1,7 @@
from chalicelib.utils import pg_client, helper
import json
from chalicelib.core.modules import TENANT_CONDITION
from chalicelib.utils import pg_client, helper
EXCEPT = ["jira_server", "jira_cloud"]

View file

@ -1,4 +1,4 @@
from chalicelib.core import log_tools
from chalicelib.core.log_tools import log_tools
from schemas import schemas
IN_TY = "newrelic"

View file

@ -1,4 +1,4 @@
from chalicelib.core import log_tools
from chalicelib.core.log_tools import log_tools
from schemas import schemas
IN_TY = "rollbar"

View file

@ -1,5 +1,5 @@
import requests
from chalicelib.core import log_tools
from chalicelib.core.log_tools import log_tools
from schemas import schemas
IN_TY = "sentry"

View file

@ -1,4 +1,4 @@
from chalicelib.core import log_tools
from chalicelib.core.log_tools import log_tools
from schemas import schemas
IN_TY = "stackdriver"

View file

@ -1,4 +1,4 @@
from chalicelib.core import log_tools
from chalicelib.core.log_tools import log_tools
from schemas import schemas
IN_TY = "sumologic"

View file

@ -39,8 +39,8 @@ def create_new_assignment(tenant_id, project_id, session_id, creator_id, assigne
issue = integration.issue_handler.create_new_assignment(title=title, assignee=assignee, description=description,
issue_type=issue_type,
integration_project_id=integration_project_id)
except integration_base_issue.RequestException as e:
return integration_base_issue.proxy_issues_handler(e)
except base_issue.RequestException as e:
return base_issue.proxy_issues_handler(e)
if issue is None or "id" not in issue:
return {"errors": ["something went wrong while creating the issue"]}
with pg_client.PostgresClient() as cur:

View file

@ -63,4 +63,9 @@ sessions_region=us-east-1
SITE_URL=http://127.0.0.1:3333
sourcemaps_bucket=
sourcemaps_reader=http://127.0.0.1:3000/sourcemaps
TZ=UTC
TZ=UTC
EXP_CH_DRIVER=true
EXP_AUTOCOMPLETE=true
EXP_ALERTS=true
EXP_ERRORS_SEARCH=true
EXP_METRICS=true

View file

@ -1,6 +1,6 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.10
boto3==1.36.12
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
@ -8,7 +8,7 @@ clickhouse-driver[lz4]==0.2.9
clickhouse-connect==0.8.15
elasticsearch==8.17.1
jira==3.8.0
cachetools==5.5.0
cachetools==5.5.1
fastapi==0.115.8
uvicorn[standard]==0.34.0

View file

@ -1,6 +1,6 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.10
boto3==1.36.12
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
@ -8,7 +8,7 @@ clickhouse-driver[lz4]==0.2.9
clickhouse-connect==0.8.15
elasticsearch==8.17.1
jira==3.8.0
cachetools==5.5.0
cachetools==5.5.1
fastapi==0.115.8
uvicorn[standard]==0.34.0

View file

@ -6,15 +6,15 @@ name = "pypi"
[packages]
urllib3 = "==2.3.0"
requests = "==2.32.3"
boto3 = "==1.36.10"
boto3 = "==1.36.12"
pyjwt = "==2.10.1"
psycopg2-binary = "==2.9.10"
psycopg = {extras = ["pool", "binary"], version = "==3.2.4"}
psycopg = {extras = ["binary", "pool"], version = "==3.2.4"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"}
clickhouse-connect = "==0.8.15"
elasticsearch = "==8.17.1"
jira = "==3.8.0"
cachetools = "==5.5.0"
cachetools = "==5.5.1"
fastapi = "==0.115.8"
uvicorn = {extras = ["standard"], version = "==0.34.0"}
gunicorn = "==23.0.0"

View file

@ -1,13 +1,10 @@
import ast
import logging
from typing import List, Union
import schemas
from chalicelib.core import events, metadata, projects
from chalicelib.core.metrics import metrics
from chalicelib.core.sessions import sessions_favorite, performance_event, sessions_legacy, sessions
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
from chalicelib.utils import sql_helper as sh
from chalicelib.core import metadata, projects
from chalicelib.core.sessions import sessions_favorite, sessions_legacy, sessions
from chalicelib.utils import pg_client, helper, ch_client, exp_ch_helper
logger = logging.getLogger(__name__)
@ -167,7 +164,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
logging.debug(main_query)
logging.debug("--------------------")
try:
sessions = cur.execute(main_query)
sessions_list = cur.execute(main_query)
except Exception as err:
logging.warning("--------- SESSIONS-CH SEARCH QUERY EXCEPTION -----------")
logging.warning(main_query)
@ -176,27 +173,27 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
logging.warning("--------------------")
raise err
if errors_only or ids_only:
return helper.list_to_camel_case(sessions)
return helper.list_to_camel_case(sessions_list)
if len(sessions) > 0:
sessions = sessions[0]
if len(sessions_list) > 0:
sessions_list = sessions_list[0]
total = sessions["count"]
sessions = sessions["sessions"]
total = sessions_list["count"]
sessions_list = sessions_list["sessions"]
if data.group_by_user:
for i, s in enumerate(sessions):
sessions[i] = {**s.pop("last_session")[0], **s}
sessions[i].pop("rn")
sessions[i]["metadata"] = ast.literal_eval(sessions[i]["metadata"])
for i, s in enumerate(sessions_list):
sessions_list[i] = {**s.pop("last_session")[0], **s}
sessions_list[i].pop("rn")
sessions_list[i]["metadata"] = ast.literal_eval(sessions_list[i]["metadata"])
else:
for i in range(len(sessions)):
sessions[i]["metadata"] = ast.literal_eval(sessions[i]["metadata"])
sessions[i] = schemas.SessionModel.parse_obj(helper.dict_to_camel_case(sessions[i]))
for i in range(len(sessions_list)):
sessions_list[i]["metadata"] = ast.literal_eval(sessions_list[i]["metadata"])
sessions_list[i] = schemas.SessionModel.parse_obj(helper.dict_to_camel_case(sessions_list[i]))
return {
'total': total,
'sessions': sessions
'sessions': sessions_list
}

View file

@ -25,14 +25,16 @@ EMAIL_USE_TLS=true
EMAIL_USER=
ENABLE_SSO=false
EXP_7D_MV=false
EXP_ALERTS=false
EXP_AUTOCOMPLETE=true
EXP_ERRORS_GET=false
EXP_ERRORS_SEARCH=false
EXP_FUNNELS=false
EXP_RESOURCES=true
EXP_SESSIONS_SEARCH=false
EXP_SESSIONS_SEARCH_METRIC=true
EXP_CH_DRIVER=true
EXP_AUTOCOMPLETE=true
EXP_ALERTS=true
EXP_ERRORS_SEARCH=true
EXP_METRICS=true
FS_DIR=
idp_entityId=
idp_name=okta

View file

@ -1,6 +1,6 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.10
boto3==1.36.12
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
@ -8,7 +8,7 @@ clickhouse-driver[lz4]==0.2.9
clickhouse-connect==0.8.15
elasticsearch==8.17.1
jira==3.8.0
cachetools==5.5.0
cachetools==5.5.1
fastapi==0.115.8
uvicorn[standard]==0.34.0

View file

@ -1,6 +1,6 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.10
boto3==1.36.12
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
@ -8,7 +8,7 @@ clickhouse-driver[lz4]==0.2.9
clickhouse-connect==0.8.15
elasticsearch==8.17.1
jira==3.8.0
cachetools==5.5.0
cachetools==5.5.1
fastapi==0.115.8
python-decouple==3.8

View file

@ -1,6 +1,6 @@
urllib3==2.3.0
requests==2.32.3
boto3==1.36.10
boto3==1.36.12
pyjwt==2.10.1
psycopg2-binary==2.9.10
psycopg[pool,binary]==3.2.4
@ -8,7 +8,7 @@ clickhouse-driver[lz4]==0.2.9
clickhouse-connect==0.8.15
elasticsearch==8.17.1
jira==3.8.0
cachetools==5.5.0
cachetools==5.5.1
fastapi==0.115.8
uvicorn[standard]==0.34.0

View file

@ -12,7 +12,7 @@ from chalicelib.core import assist, signup, feature_flags
from chalicelib.core.errors import errors
from chalicelib.core.metrics import heatmaps
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_assignments, \
sessions_viewed, unprocessed_sessions
sessions_viewed, unprocessed_sessions, sessions_search
from chalicelib.core import tenants, users, projects, license
from chalicelib.core import webhook
from chalicelib.core.collaborations.collaboration_slack import Slack
@ -266,10 +266,10 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
@app.post('/{projectId}/sessions/search', tags=["sessions"],
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
def search_sessions(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id,
platform=context.project.platform)
data = sessions_search.search_sessions(data=data, project_id=projectId, user_id=context.user_id,
platform=context.project.platform)
return {'data': data}
@ -277,8 +277,8 @@ def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema =
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True,
platform=context.project.platform)
data = sessions_search.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True,
platform=context.project.platform)
return {'data': data}

View file

@ -1,4 +1,4 @@
FROM python:3.11-alpine
FROM python:3.12-alpine
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="Shekar Sirikonda<sshekarsiri@gmail.com>"
ARG GIT_SHA

View file

@ -4,13 +4,14 @@ verify_ssl = true
name = "pypi"
[packages]
fastapi = "*"
sqlalchemy = "==2.0.21"
uvicorn = "==0.23.2"
fastapi = "==0.115.8"
sqlalchemy = "==2.0.37"
uvicorn = "==0.34.0"
python-decouple = "==3.8"
psycopg2-binary = "==2.9.7"
psycopg2-binary = "==2.9.10"
[dev-packages]
[requires]
python_version = "3.11"
python_version = "3.12"
python_full_version = "3.12.8"

View file

@ -9,4 +9,10 @@ MAX_OVERFLOW=10
POOL_TIMEOUT=30
POOL_RECYCLE=3600
ACCESS_TOKEN=
ACCESS_TOKEN=
EXP_CH_DRIVER=true
EXP_AUTOCOMPLETE=true
EXP_ALERTS=true
EXP_ERRORS_SEARCH=true
EXP_METRICS=true

View file

@ -1,6 +1,5 @@
fastapi
SQLAlchemy==2.0.21
uvicorn==0.23.2
fastapi==0.115.8
SQLAlchemy==2.0.37
uvicorn==0.34.0
python-decouple==3.8
psycopg2-binary==2.9.7
psycopg2-binary==2.9.10