Api v1.15.0 (#1536)

* feat(chalice): upgraded dependencies

* feat(chalice): changed path analysis schema

* feat(DB): click coordinate support

* feat(chalice): changed path analysis issues schema
feat(chalice): upgraded dependencies

* fix(chalice): fixed pydantic issue

* refactor(chalice): refresh token validator

* feat(chalice): role restrictions

* feat(chalice): EE path analysis changes

* refactor(DB): changed creation queries
refactor(DB): changed delte queries
feat(DB): support new path analysis payload

* feat(chalice): save path analysis card

* feat(chalice): restrict access

* feat(chalice): restrict access

* feat(chalice): EE save new path analysis card

* refactor(chalice): path analysis

* feat(chalice): path analysis new query

* fix(chalice): configurable CH config

* fix(chalice): assist autocomplete

* refactor(chalice): refactored permissions

* refactor(chalice): changed log level

* refactor(chalice): upgraded dependencies

* refactor(chalice): changed path analysis query

* refactor(chalice): changed path analysis query

* refactor(chalice): upgraded dependencies
refactor(alerts): upgraded dependencies
refactor(crons): upgraded dependencies

* feat(chalice): path analysis ignore start point

* feat(chalice): path analysis in progress

* refactor(chalice): path analysis changed link sort

* refactor(chalice): path analysis changed link sort

* refactor(chalice): path analysis changed link sort

* refactor(chalice): path analysis new query
refactor(chalice): authorizers

* refactor(chalice): refactored authorizer

* fix(chalice): fixed create card of PathAnalysis

* refactor(chalice): compute link-percentage for Path Analysis

* refactor(chalice): remove null starting point from Path Analysis

* feat(chalice): path analysis CH query

* refactor(chalice): changed Path Analysis links-value
fix(chalice): fixed search notes for EE

* feat(chalice): path analysis enhanced query results

* feat(chalice): include timezone in search sessions response

* refactor(chalice): refactored logs

* refactor(chalice): refactored logs
feat(chalice): get path analysis issues

* fix(chalice): fixed path analysis issues pagination

* fix(chalice): sessions-search handle null values

* feat(chalice): PathAnalysis start event support middle-event matching

* feat(chalice): PathAnalysis start event support middle-event matching

* feat(chalice): PathAnalysis support mixed events with start-point

* fix(chalice): PathAnalysis fixed eventType value when metricValue is missing

* fix(chalice): PathAnalysis fixed wrong super-class model for update card

* fix(chalice): PathAnalysis fixed search issues
refactor(chalice): upgraded dependencies

* fix(chalice): enforce isEvent if missing

* fix(chalice): enforce isEvent if missing

* refactor(chalice): refactored custom-metrics

* refactor(chalice): small changes

* feat(chalice): path analysis EE new query

* fix(chalice): fixed hide-excess state for Path Analysis

* fix(chalice): fixed update start point and excludes for Path Analysis

* fix(chalice): fix payload validation
fix(chalice): fix update widget endpoint

* fix(chalice): fix payload validation
fix(chalice): fix update widget endpoint

* fix(chalice): fix add member

* refactor(chalice): upgraded dependencies
refactor!(chalice): upgraded SAML dependencies

* feat(chalice): ios-project support 1/5
This commit is contained in:
Kraiem Taha Yassine 2023-10-20 18:16:15 +02:00 committed by GitHub
parent 4730cf7b06
commit d97034ccda
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 84 additions and 73 deletions

View file

@ -5,12 +5,12 @@ name = "pypi"
[packages]
requests = "==2.31.0"
boto3 = "==1.28.64"
boto3 = "==1.28.67"
pyjwt = "==2.8.0"
psycopg2-binary = "==2.9.9"
elasticsearch = "==8.10.1"
jira = "==3.5.2"
fastapi = "==0.103.2"
fastapi = "==0.104.0"
python-decouple = "==3.8"
apscheduler = "==3.10.4"
redis = "==5.0.1"

View file

@ -20,11 +20,19 @@ class ProjectAuthorizer:
return
current_user: schemas.CurrentContext = await OR_context(request)
value = request.path_params[self.project_identifier]
if (self.project_identifier == "projectId" \
and (not (isinstance(value, int) or isinstance(value, str) and value.isnumeric())
or projects.get_project(project_id=value, tenant_id=current_user.tenant_id) is None)) \
or (self.project_identifier == "projectKey" \
and projects.get_internal_project_id(project_key=value) is None):
current_project = None
if self.project_identifier == "projectId" \
and isinstance(value, int) or isinstance(value, str) and value.isnumeric():
current_project = projects.get_project(project_id=value, tenant_id=current_user.tenant_id)
elif self.project_identifier == "projectKey":
current_project = projects.get_by_project_key(project_key=value)
if current_project is None:
logger.debug("project not found")
logger.debug(value)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="project not found.")
else:
current_project = schemas.CurrentProjectContext(projectId=current_project["projectId"],
projectKey=current_project["projectKey"],
platform=current_project["platform"])
request.state.currentContext.project = current_project

View file

@ -119,7 +119,8 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
query = cur.mogrify(f"""SELECT s.project_id,
s.project_key,
s.name,
s.save_request_payloads
s.save_request_payloads,
s.platform
{extra_select}
FROM public.projects AS s
WHERE s.project_id =%(project_id)s
@ -131,28 +132,6 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
return helper.dict_to_camel_case(row)
def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
extra_select = ""
if include_last_session:
extra_select += """,(SELECT max(ss.start_ts)
FROM public.sessions AS ss
WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at"""
if include_gdpr:
extra_select += ",s.gdpr"
query = cur.mogrify(f"""SELECT s.project_key,
s.name
{extra_select}
FROM public.projects AS s
WHERE s.project_key =%(project_key)s
AND s.deleted_at IS NULL
LIMIT 1;""",
{"project_key": project_key})
cur.execute(query=query)
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if __exists_by_name(name=data.name, exclude_id=None):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.")
@ -218,16 +197,18 @@ def edit_gdpr(project_id, gdpr: schemas.GdprSchema):
return row
def get_internal_project_id(project_key):
def get_by_project_key(project_key):
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""SELECT project_id
query = cur.mogrify("""SELECT project_id,
project_key,
platform
FROM public.projects
WHERE project_key =%(project_key)s
AND deleted_at ISNULL;""",
{"project_key": project_key})
cur.execute(query=query)
row = cur.fetchone()
return row["project_id"] if row else None
return helper.dict_to_camel_case(row)
def get_project_key(project_id):

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.64
boto3==1.28.67
pyjwt==2.8.0
psycopg2-binary==2.9.9
elasticsearch==8.10.1
@ -9,7 +9,7 @@ jira==3.5.2
fastapi==0.103.2
fastapi==0.104.0
uvicorn[standard]==0.23.2
python-decouple==3.8
pydantic[email]==2.3.0

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.64
boto3==1.28.67
pyjwt==2.8.0
psycopg2-binary==2.9.9
elasticsearch==8.10.1
@ -9,7 +9,7 @@ jira==3.5.2
fastapi==0.103.2
fastapi==0.104.0
uvicorn[standard]==0.23.2
python-decouple==3.8
pydantic[email]==2.3.0

View file

@ -565,7 +565,7 @@ def delete_alert(projectId: int, alertId: int, _=Body(None),
@app_apikey.put('/{projectKey}/sourcemaps', tags=["sourcemaps"])
def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
project_id = projects.get_internal_project_id(projectKey)
project_id = projects.get_by_project_key(project_key=projectKey)["projectId"]
if project_id is None:
return {"errors": ["Project not found."]}

View file

@ -68,8 +68,8 @@ def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
dashboard_id=dashboardId, data=data)}
@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
# @app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
# @app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int,
data: schemas.UpdateWidgetPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):

View file

@ -10,7 +10,7 @@ public_app, app, app_apikey = get_routers()
@app_apikey.get('/v1/{projectKey}/users/{userId}/sessions', tags=["api"])
def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_date: int = None):
projectId = projects.get_internal_project_id(projectKey)
projectId = projects.get_by_project_key(project_key=projectKey)["projectId"]
if projectId is None:
return {"errors": ["invalid projectKey"]}
return {
@ -25,7 +25,7 @@ def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_
@app_apikey.get('/v1/{projectKey}/sessions/{sessionId}/events', tags=["api"])
def get_session_events(projectKey: str, sessionId: int):
projectId = projects.get_internal_project_id(projectKey)
projectId = projects.get_by_project_key(project_key=projectKey)["projectId"]
if projectId is None:
return {"errors": ["invalid projectKey"]}
return {
@ -38,7 +38,7 @@ def get_session_events(projectKey: str, sessionId: int):
@app_apikey.get('/v1/{projectKey}/users/{userId}', tags=["api"])
def get_user_details(projectKey: str, userId: str):
projectId = projects.get_internal_project_id(projectKey)
projectId = projects.get_by_project_key(project_key=projectKey)["projectId"]
if projectId is None:
return {"errors": ["invalid projectKey"]}
return {
@ -51,7 +51,7 @@ def get_user_details(projectKey: str, userId: str):
@app_apikey.delete('/v1/{projectKey}/users/{userId}', tags=["api"])
def schedule_to_delete_user_data(projectKey: str, userId: str, _=Body(None)):
projectId = projects.get_internal_project_id(projectKey)
projectId = projects.get_by_project_key(project_key=projectKey)["projectId"]
if projectId is None:
return {"errors": ["invalid projectKey"]}
record = jobs.create(project_id=projectId, user_id=userId)
@ -60,7 +60,7 @@ def schedule_to_delete_user_data(projectKey: str, userId: str, _=Body(None)):
@app_apikey.get('/v1/{projectKey}/jobs', tags=["api"])
def get_jobs(projectKey: str):
projectId = projects.get_internal_project_id(projectKey)
projectId = projects.get_by_project_key(project_key=projectKey)["projectId"]
if projectId is None:
return {"errors": ["invalid projectKey"]}
return {"data": jobs.get_all(project_id=projectId)}
@ -97,7 +97,7 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
@app_apikey.get('/v1/projects/{projectKey}', tags=["api"])
def get_project(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": projects.get_project_by_key(tenant_id=context.tenant_id, project_key=projectKey)
"data": projects.get_by_project_key(tenant_id=context.tenant_id, project_key=projectKey)
}

View file

@ -109,8 +109,15 @@ class CreateProjectSchema(BaseModel):
_transform_name = field_validator('name', mode='before')(remove_whitespace)
class CurrentProjectContext(BaseModel):
project_id: int = Field(...)
project_key: str = Field(...)
platform: str = Field(...)
class CurrentAPIContext(BaseModel):
tenant_id: int = Field(...)
project: Optional[CurrentProjectContext] = Field(default=None)
class CurrentContext(CurrentAPIContext):
@ -797,7 +804,7 @@ class PathAnalysisSubFilterSchema(BaseModel):
@model_validator(mode="before")
def __force_is_event(cls, values):
for v in values.get("filters"):
for v in values.get("filters", []):
if v.get("isEvent") is None:
v["isEvent"] = True
return values

View file

@ -6,16 +6,16 @@ name = "pypi"
[packages]
urllib3 = "==1.26.16"
requests = "==2.31.0"
boto3 = "==1.28.64"
boto3 = "==1.28.67"
pyjwt = "==2.8.0"
psycopg2-binary = "==2.9.9"
elasticsearch = "==8.10.1"
jira = "==3.5.2"
fastapi = "==0.103.2"
fastapi = "==0.104.0"
gunicorn = "==21.2.0"
python-decouple = "==3.8"
apscheduler = "==3.10.4"
python3-saml = "==1.15.0"
python3-saml = "==1.16.0"
python-multipart = "==0.0.6"
redis = "==5.0.1"
azure-storage-blob = "==12.18.3"

View file

@ -21,13 +21,26 @@ class ProjectAuthorizer:
current_user: schemas.CurrentContext = await OR_context(request)
value = request.path_params[self.project_identifier]
user_id = current_user.user_id if request.state.authorizer_identity == "jwt" else None
current_project = None
if (self.project_identifier == "projectId" \
and not projects.is_authorized(project_id=value, tenant_id=current_user.tenant_id,
user_id=user_id)) \
or (self.project_identifier == "projectKey" \
and not projects.is_authorized(
project_id=projects.get_internal_project_id(value),
tenant_id=current_user.tenant_id, user_id=user_id)):
and isinstance(value, int) or (isinstance(value, str) and value.isnumeric()) \
and projects.is_authorized(project_id=value, tenant_id=current_user.tenant_id,
user_id=user_id)):
current_project = projects.get_project(tenant_id=current_user.tenant_id, project_id=value)
elif self.project_identifier == "projectKey":
current_project = projects.get_by_project_key(value)
if current_project is not None \
and projects.is_authorized(project_id=current_project["projectId"],
tenant_id=current_user.tenant_id,
user_id=user_id):
current_project = None
if current_project is None:
logger.debug("unauthorized project")
logger.debug(value)
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="unauthorized project.")
else:
current_project = schemas.CurrentProjectContext(projectId=current_project["projectId"],
projectKey=current_project["projectKey"],
platform=current_project["platform"])
request.state.currentContext.project = current_project

View file

@ -236,9 +236,11 @@ def edit_gdpr(project_id, gdpr: schemas.GdprSchema):
return row
def get_internal_project_id(project_key):
def get_by_project_key(project_key):
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""SELECT project_id
query = cur.mogrify("""SELECT project_id,
project_key,
platform
FROM public.projects
WHERE project_key =%(project_key)s
AND deleted_at ISNULL;""",

View file

@ -224,7 +224,7 @@ def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, backgrou
if data.name is None or len(data.name) == 0:
data.name = data.email
role_id = data.get("roleId")
role_id = data.roleId
if role_id is None:
role_id = roles.get_role_by_name(tenant_id=tenant_id, name="member").get("roleId")
invitation_token = __generate_invitation_token()
@ -237,11 +237,11 @@ def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, backgrou
new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token,
admin=data.get("admin", False), name=data.name, role_id=role_id)
else:
new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token,
admin=data.get("admin", False), name=data.name, role_id=role_id)
new_member = create_new_member(tenant_id=tenant_id, email=data.email, invitation_token=invitation_token,
admin=data.admin, name=data.name, role_id=role_id)
new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken"))
background_tasks.add_task(email_helper.send_team_invitation, **{
"recipient": data["email"],
"recipient": data.email,
"invitation_link": new_member["invitationLink"],
"client_id": tenants.get_by_tenant_id(tenant_id)["name"],
"sender_name": admin["name"]

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.64
boto3==1.28.67
pyjwt==2.8.0
psycopg2-binary==2.9.9
elasticsearch==8.10.1
@ -9,7 +9,7 @@ jira==3.5.2
fastapi==0.103.2
fastapi==0.104.0
uvicorn[standard]==0.23.2
python-decouple==3.8
pydantic[email]==2.3.0

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.64
boto3==1.28.67
pyjwt==2.8.0
psycopg2-binary==2.9.9
elasticsearch==8.10.1
@ -9,7 +9,7 @@ jira==3.5.2
fastapi==0.103.2
fastapi==0.104.0
python-decouple==3.8
pydantic[email]==2.3.0
apscheduler==3.10.4

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.64
boto3==1.28.67
pyjwt==2.8.0
psycopg2-binary==2.9.9
elasticsearch==8.10.1
@ -9,7 +9,7 @@ jira==3.5.2
fastapi==0.103.2
fastapi==0.104.0
uvicorn[standard]==0.23.2
gunicorn==21.2.0
python-decouple==3.8
@ -20,7 +20,7 @@ clickhouse-driver[lz4]==0.2.6
# TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252
#--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml
#python3-saml==1.15.0 --no-binary=lxml
python3-saml==1.15.0
python3-saml==1.16.0
python-multipart==0.0.6
redis==5.0.1

View file

@ -68,8 +68,8 @@ def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
dashboard_id=dashboardId, data=data)}
@app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
# @app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
# @app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int,
data: schemas.UpdateWidgetPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):

View file

@ -20,7 +20,7 @@ def get_assist_credentials():
@app_apikey.get('/v1/{projectKey}/assist/sessions', tags=["api"])
def get_sessions_live(projectKey: str, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)):
projectId = projects.get_internal_project_id(projectKey)
projectId = projects.get_by_project_key(projectKey)
if projectId is None:
return {"errors": ["invalid projectKey"]}
return core.get_sessions_live(projectId=projectId, userId=userId, context=context)
@ -29,7 +29,7 @@ def get_sessions_live(projectKey: str, userId: str = None, context: schemas.Curr
@app_apikey.post('/v1/{projectKey}/assist/sessions', tags=["api"])
def sessions_live(projectKey: str, data: schemas.LiveSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
projectId = projects.get_internal_project_id(projectKey)
projectId = projects.get_by_project_key(projectKey)
if projectId is None:
return {"errors": ["invalid projectKey"]}
return core.sessions_live(projectId=projectId, data=data, context=context)