Api v1.15.0 (#1481)

* feat(chalice): upgraded dependencies

* feat(chalice): changed path analysis schema

* feat(DB): click coordinate support

* feat(chalice): changed path analysis issues schema
feat(chalice): upgraded dependencies

* fix(chalice): fixed pydantic issue

* refactor(chalice): refresh token validator

* feat(chalice): role restrictions
This commit is contained in:
Kraiem Taha Yassine 2023-09-28 09:59:31 +01:00 committed by GitHub
parent 1e198c9731
commit 33f5d078dd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
31 changed files with 372 additions and 175 deletions

View file

@ -5,18 +5,18 @@ name = "pypi"
[packages]
requests = "==2.31.0"
boto3 = "==1.28.42"
boto3 = "==1.28.55"
pyjwt = "==2.8.0"
psycopg2-binary = "==2.9.7"
elasticsearch = "==8.9.0"
elasticsearch = "==8.10.0"
jira = "==3.5.2"
fastapi = "==0.103.1"
python-decouple = "==3.8"
apscheduler = "==3.10.4"
redis = "==5.0.0"
redis = "==5.0.1"
urllib3 = "==1.26.16"
uvicorn = {version = "==0.23.2", extras = ["standard"]}
pydantic = {version = "==2.3.0", extras = ["email"]}
uvicorn = {extras = ["standard"], version = "==0.23.2"}
pydantic = {extras = ["email"], version = "==2.3.0"}
[dev-packages]

View file

@ -18,7 +18,8 @@ def _get_current_auth_context(request: Request, jwt_payload: dict) -> schemas.Cu
request.state.authorizer_identity = "jwt"
request.state.currentContext = schemas.CurrentContext(tenantId=jwt_payload.get("tenantId", -1),
userId=jwt_payload.get("userId", -1),
email=user["email"])
email=user["email"],
role=user["role"])
return request.state.currentContext

View file

@ -1,5 +1,5 @@
import json
from typing import Union
from typing import Union, List
from decouple import config
from fastapi import HTTPException, status
@ -103,8 +103,7 @@ def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardP
elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema):
data.series[0].filter = schemas.PathAnalysisSchema()
return product_analytics.path_analysis(project_id=project_id, data=data.series[0].filter, density=data.density,
selected_event_type=data.metric_value, hide_minor_paths=data.hide_excess)
return product_analytics.path_analysis(project_id=project_id, data=data)
def __get_timeseries_chart(project_id: int, data: schemas.CardTimeSeries, user_id: int = None):
@ -293,28 +292,28 @@ def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel)
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.series) == 0:
return {"data": []}
filters = []
print(data.series[0].filter.filters)
for f in data.series[0].filter.filters:
if schemas.ProductAnalyticsFilterType.has_value(f.type):
for sf in f.filters:
o = sf.model_dump()
o["isEvent"] = True
if f.type == schemas.ProductAnalyticsFilterType.exclude:
o["operator"] = "notOn"
filters.append(o)
card_table = schemas.CardTable(
startTimestamp=data.startTimestamp,
endTimestamp=data.endTimestamp,
metricType=schemas.MetricType.table,
metricOf=schemas.MetricOfTable.issues,
viewType=schemas.MetricTableViewType.table,
series=data.model_dump()["series"])
for s in data.start_point:
if data.start_type == "end":
card_table.series[0].filter.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=s.operator,
value=s.value))
else:
o = f.model_dump()
o["isEvent"] = False
filters.append(o)
return __get_table_of_issues(project_id=project_id, user_id=user_id,
data=schemas.CardTable(
startTimestamp=data.startTimestamp,
endTimestamp=data.endTimestamp,
metricType=schemas.MetricType.table,
metricOf=schemas.MetricOfTable.issues,
viewType=schemas.MetricTableViewType.table,
series=[{"filter": {"filters": filters}}]))
card_table.series[0].filter.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type,
operator=s.operator,
value=s.value))
for s in data.exclude:
card_table.series[0].filter.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=schemas.SearchEventOperator._not_on,
value=s.value))
return __get_table_of_issues(project_id=project_id, user_id=user_id, data=card_table)
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):

View file

@ -63,35 +63,47 @@ JOURNEY_TYPES = {
}
def path_analysis(project_id: int, data: schemas.PathAnalysisSchema,
selected_event_type: List[schemas.ProductAnalyticsSelectedEventType],
density: int = 4, hide_minor_paths: bool = False):
# pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
# time_constraint=True)
# TODO: check if data=args is required
pg_sub_query_subset = __get_constraints(project_id=project_id, duration=True, main_table="s", time_constraint=True)
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sub_events = []
start_points_join = ""
start_points_conditions = []
sessions_conditions = ["start_ts>=%(startTimestamp)s", "start_ts<%(endTimestamp)s",
"project_id=%(project_id)s", "events_count > 1", "duration>0"]
if len(selected_event_type) == 0:
selected_event_type.append(schemas.ProductAnalyticsSelectedEventType.location)
if len(data.metric_value) == 0:
data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.location)
sub_events.append({"table": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["table"],
"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"],
"eventType": schemas.ProductAnalyticsSelectedEventType.location.value})
else:
for v in selected_event_type:
for v in data.metric_value:
if JOURNEY_TYPES.get(v):
sub_events.append({"table": JOURNEY_TYPES[v]["table"],
"column": JOURNEY_TYPES[v]["column"],
"eventType": v})
extra_values = {}
reverse = False
meta_keys = None
reverse = data.start_type == "end"
for i, sf in enumerate(data.start_point):
f_k = f"start_point_{i}"
op = sh.get_sql_operator(sf.operator)
is_not = sh.is_negation_operator(sf.operator)
extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)}
start_points_conditions.append(f"(event_type='{sf.type}' AND " +
sh.multi_conditions(f'e_value {op} %({f_k})s', sf.value, is_not=is_not,
value_key=f_k)
+ ")")
exclusions = {}
for i, f in enumerate(data.filters):
for i, ef in enumerate(data.exclude):
if ef.type in data.metric_value:
f_k = f"exclude_{i}"
extra_values = {**extra_values, **sh.multi_values(ef.value, value_key=f_k)}
exclusions[ef.type] = [
sh.multi_conditions(f'{JOURNEY_TYPES[ef.type]["column"]} != %({f_k})s', ef.value, is_not=True,
value_key=f_k)]
meta_keys = None
for i, f in enumerate(data.series[0].filter.filters):
op = sh.get_sql_operator(f.operator)
is_any = sh.isAny_opreator(f.operator)
is_not = sh.is_negation_operator(f.operator)
@ -99,23 +111,6 @@ def path_analysis(project_id: int, data: schemas.PathAnalysisSchema,
f_k = f"f_value_{i}"
extra_values = {**extra_values, **sh.multi_values(f.value, value_key=f_k)}
if f.type in [schemas.ProductAnalyticsFilterType.start_point, schemas.ProductAnalyticsFilterType.end_point]:
for sf in f.filters:
extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)}
start_points_conditions.append(f"(event_type='{sf.type}' AND " +
sh.multi_conditions(f'e_value {op} %({f_k})s', sf.value, is_not=is_not,
value_key=f_k)
+ ")")
reverse = f.type == schemas.ProductAnalyticsFilterType.end_point
elif f.type == schemas.ProductAnalyticsFilterType.exclude:
for sf in f.filters:
if sf.type in selected_event_type:
extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)}
exclusions[sf.type] = [
sh.multi_conditions(f'{JOURNEY_TYPES[sf.type]["column"]} != %({f_k})s', sf.value, is_not=True,
value_key=f_k)]
# ---- meta-filters
if f.type == schemas.FilterType.user_browser:
if is_any:
@ -347,8 +342,8 @@ FROM limited_events
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, sessions_count
ORDER BY event_number_in_session, e_value, next_value;"""
params = {"project_id": project_id, "startTimestamp": data.startTimestamp,
"endTimestamp": data.endTimestamp, "density": density,
"eventThresholdNumberInGroup": 8 if hide_minor_paths else 6,
"endTimestamp": data.endTimestamp, "density": data.density,
"eventThresholdNumberInGroup": 8 if data.hide_excess else 6,
# TODO: add if data=args is required
# **__get_constraint_values(args),
**extra_values}

View file

@ -19,7 +19,7 @@ change_password_link=/reset-password?invitation=%s&&pass=%s
invitation_link=/api/users/invitation?token=%s
js_cache_bucket=sessions-assets
jwt_algorithm=HS512
JWT_EXPIRATION=120
JWT_EXPIRATION=1800
JWT_REFRESH_EXPIRATION=604800
JWT_ISSUER=openreplay-oss
jwt_secret="SET A RANDOM STRING HERE"
@ -60,3 +60,4 @@ REDIS_STRING=redis://redis-master.db.svc.cluster.local:6379
SCH_DELETE_DAYS=30
IOS_BUCKET=mobs
IOS_VIDEO_BUCKET=mobs
TZ=UTC

65
api/env.dev Normal file
View file

@ -0,0 +1,65 @@
EMAIL_FROM=Openreplay-taha<do-not-reply@openreplay.com>
EMAIL_HOST=email-smtp.eu-west-1.amazonaws.com
EMAIL_PASSWORD=password
EMAIL_PORT=587
EMAIL_SSL_CERT=''
EMAIL_SSL_KEY=''
EMAIL_USER=user
EMAIL_USE_SSL=false
EMAIL_USE_TLS=true
S3_HOST=https://foss.openreplay.com:443
S3_KEY=key
S3_SECRET=secret
SITE_URL=http://127.0.0.1:3333
announcement_url=https://asayer-announcements.s3.eu-central-1.amazonaws.com/
captcha_key=
captcha_server=
change_password_link=/changepassword?invitation=%s&&pass=%s
invitation_link=/users/invitation?token=%s
js_cache_bucket=asayer-sessions-assets-staging
jwt_algorithm=HS512
JWT_EXPIRATION=6000
JWT_REFRESH_EXPIRATION=60
JWT_ISSUER=openreplay-local-staging
jwt_secret=secret
JWT_REFRESH_SECRET=another_secret
ASSIST_URL=http://127.0.0.1:9001/assist/%s
assist=/sockets-live
assistList=/sockets-list
# FOSS
pg_dbname=postgres
pg_host=127.0.0.1
pg_password=password
pg_port=5420
pg_user=postgres
PG_TIMEOUT=20
PG_MINCONN=2
PG_MAXCONN=5
PG_RETRY_MAX=50
PG_RETRY_INTERVAL=2
PG_POOL=true
sessions_bucket=mobs
sessions_region=us-east-1
sourcemaps_bucket=asayer-sourcemaps-staging
sourcemaps_reader=http://127.0.0.1:3000/sourcemaps
LOGLEVEL=INFO
FS_DIR=/Users/tahayk/asayer/openreplay/api/.local
ASSIST_KEY=abc
EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob
EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs
SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe
DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs
PRESIGNED_URL_EXPIRATION=3600
ASSIST_JWT_EXPIRATION=14400
ASSIST_JWT_SECRET=secret
REDIS_STRING=redis://127.0.0.1:6379
LOCAL_DEV=true
TZ=UTC
docs_url=/docs
root_path=''
docs_url=/docs
IOS_BUCKET=mobs
IOS_VIDEO_BUCKET=mobs

View file

@ -6,6 +6,8 @@ from starlette import status
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import Response, JSONResponse
from fastapi.security import SecurityScopes
from fastapi import Depends, Security
import schemas
from chalicelib.utils import helper
@ -48,3 +50,14 @@ class ORRoute(APIRoute):
return response
return custom_route_handler
def __check_role(required_roles: SecurityScopes, context: schemas.CurrentContext = Depends(OR_context)):
if len(required_roles.scopes) > 0:
if context.role not in required_roles.scopes:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
detail="You need a different role to access this resource")
def OR_role(*required_roles):
return Security(__check_role, scopes=list(required_roles))

9
api/prepare-dev.sh Executable file
View file

@ -0,0 +1,9 @@
#!/bin/bash
DOTENV_FILE=./.env
if [ -f "$DOTENV_FILE" ]; then
echo "$DOTENV_FILE exists, nothing to do."
else
cp env.dev $DOTENV_FILE
echo "$DOTENV_FILE was created, please fill the missing required values."
fi

View file

@ -1,10 +1,10 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.42
boto3==1.28.55
pyjwt==2.8.0
psycopg2-binary==2.9.7
elasticsearch==8.9.0
elasticsearch==8.10.0
jira==3.5.2

View file

@ -1,10 +1,10 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.42
boto3==1.28.55
pyjwt==2.8.0
psycopg2-binary==2.9.7
elasticsearch==8.9.0
elasticsearch==8.10.0
jira==3.5.2
@ -15,4 +15,4 @@ python-decouple==3.8
pydantic[email]==2.3.0
apscheduler==3.10.4
redis==5.0.0
redis==5.0.1

View file

@ -13,7 +13,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig
custom_metrics, saved_search, integrations_global
from chalicelib.core.collaboration_msteams import MSTeams
from chalicelib.core.collaboration_slack import Slack
from or_dependencies import OR_context
from or_dependencies import OR_context, OR_role
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@ -609,7 +609,7 @@ def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignPayload
return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data.keys)}
@app.post('/projects', tags=['projects'])
@app.post('/projects', tags=['projects'], dependencies=[OR_role("owner", "admin")])
def create_project(data: schemas.CreateProjectSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return projects.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data)
@ -624,13 +624,13 @@ def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_con
return {"data": data}
@app.put('/projects/{projectId}', tags=['projects'])
@app.put('/projects/{projectId}', tags=['projects'], dependencies=[OR_role("owner", "admin")])
def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return projects.edit(tenant_id=context.tenant_id, user_id=context.user_id, data=data, project_id=projectId)
@app.delete('/projects/{projectId}', tags=['projects'])
@app.delete('/projects/{projectId}', tags=['projects'], dependencies=[OR_role("owner", "admin")])
def delete_project(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return projects.delete(tenant_id=context.tenant_id, user_id=context.user_id, project_id=projectId)
@ -731,22 +731,22 @@ def delete_webhook(webhookId: int, _=Body(None), context: schemas.CurrentContext
return webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)
@app.get('/client/members', tags=["client"])
@app.get('/client/members', tags=["client"], dependencies=[OR_role("owner", "admin")])
def get_members(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": users.get_members(tenant_id=context.tenant_id)}
@app.get('/client/members/{memberId}/reset', tags=["client"])
@app.get('/client/members/{memberId}/reset', tags=["client"], dependencies=[OR_role("owner", "admin")])
def reset_reinvite_member(memberId: int, context: schemas.CurrentContext = Depends(OR_context)):
return users.reset_member(tenant_id=context.tenant_id, editor_id=context.user_id, user_id_to_update=memberId)
@app.delete('/client/members/{memberId}', tags=["client"])
@app.delete('/client/members/{memberId}', tags=["client"], dependencies=[OR_role("owner", "admin")])
def delete_member(memberId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return users.delete_member(tenant_id=context.tenant_id, user_id=context.user_id, id_to_delete=memberId)
@app.get('/account/new_api_key', tags=["account"])
@app.get('/account/new_api_key', tags=["account"], dependencies=[OR_role("owner", "admin")])
def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": users.generate_new_api_key(user_id=context.user_id)}

View file

@ -15,7 +15,7 @@ from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import captcha, smtp
from chalicelib.utils import helper
from chalicelib.utils.TimeUTC import TimeUTC
from or_dependencies import OR_context
from or_dependencies import OR_context, OR_role
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@ -148,7 +148,7 @@ def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSc
changes={"name": data.name, "endpoint": data.url})}
@app.post('/client/members', tags=["client"])
@app.post('/client/members', tags=["client"], dependencies=[OR_role("owner", "admin")])
def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data,
@ -185,7 +185,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
return users.set_password_invitation(new_password=data.password.get_secret_value(), user_id=user["userId"])
@app.put('/client/members/{memberId}', tags=["client"])
@app.put('/client/members/{memberId}', tags=["client"], dependencies=[OR_role("owner", "admin")])
def edit_member(memberId: int, data: schemas.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,

View file

@ -6,15 +6,15 @@ from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/insights/journey', tags=["insights"])
async def get_insights_journey(projectId: int):
return {"data": product_analytics.path_analysis(project_id=projectId, data=schemas.PathAnalysisSchema())}
@app.post('/{projectId}/insights/journey', tags=["insights"])
async def get_insights_journey(projectId: int, data: schemas.PathAnalysisSchema = Body(...)):
return {"data": product_analytics.path_analysis(project_id=projectId, data=data)}
#
# @app.get('/{projectId}/insights/journey', tags=["insights"])
# async def get_insights_journey(projectId: int):
# return {"data": product_analytics.path_analysis(project_id=projectId, data=schemas.PathAnalysisSchema())}
#
#
# @app.post('/{projectId}/insights/journey', tags=["insights"])
# async def get_insights_journey(projectId: int, data: schemas.PathAnalysisSchema = Body(...)):
# return {"data": product_analytics.path_analysis(project_id=projectId, data=data)}
#
#
# @app.post('/{projectId}/insights/users_acquisition', tags=["insights"])

View file

@ -123,9 +123,25 @@ class CurrentAPIContext(BaseModel):
class CurrentContext(CurrentAPIContext):
user_id: int = Field(...)
email: EmailStr = Field(...)
role: str = Field(...)
_transform_email = field_validator('email', mode='before')(transform_email)
@computed_field
@property
def is_owner(self) -> bool:
return self.role == "owner"
@computed_field
@property
def is_admin(self) -> bool:
return self.role == "admin"
@computed_field
@property
def is_member(self) -> bool:
return self.role == "member"
class AddCollaborationSchema(BaseModel):
name: str = Field(...)
@ -863,67 +879,19 @@ class PathAnalysisSubFilterSchema(BaseModel):
class ProductAnalyticsFilter(BaseModel):
# The filters attribute will help with startPoint/endPoint/exclude
filters: Optional[List[PathAnalysisSubFilterSchema]] = Field(default=[])
type: Union[ProductAnalyticsFilterType, FilterType]
type: FilterType
operator: Union[SearchEventOperator, ClickEventExtraOperator, MathOperator] = Field(...)
# TODO: support session metadat filters
value: List[Union[IssueType, PlatformType, int, str]] = Field(...)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
# @model_validator(mode='after')
# def __validator(cls, values):
# if values.type == ProductAnalyticsFilterType.event_type:
# assert values.value is not None and len(values.value) > 0, \
# f"value must be provided for type:{ProductAnalyticsFilterType.event_type}"
# assert ProductAnalyticsEventType.has_value(values.value[0]), \
# f"value must be of type {ProductAnalyticsEventType} for type:{ProductAnalyticsFilterType.event_type}"
#
# return values
class PathAnalysisSchema(_TimedSchema, _PaginatedSchema):
# startTimestamp: int = Field(default=TimeUTC.now(delta_days=-1))
# endTimestamp: int = Field(default=TimeUTC.now())
density: int = Field(default=7)
filters: List[ProductAnalyticsFilter] = Field(default=[])
type: Optional[str] = Field(default=None)
@model_validator(mode='after')
def __validator(cls, values):
filters = []
for f in values.filters:
if ProductAnalyticsFilterType.has_value(f.type) and (f.filters is None or len(f.filters) == 0):
continue
filters.append(f)
values.filters = filters
# Path analysis should have only 1 start-point with multiple values OR 1 end-point with multiple values
# start-point's value and end-point's value should not be excluded
s_e_detected = 0
s_e_values = {}
exclude_values = {}
for f in values.filters:
if f.type in (ProductAnalyticsFilterType.start_point, ProductAnalyticsFilterType.end_point):
s_e_detected += 1
for s in f.filters:
s_e_values[s.type] = s_e_values.get(s.type, []) + s.value
elif f.type in ProductAnalyticsFilterType.exclude:
for s in f.filters:
exclude_values[s.type] = exclude_values.get(s.type, []) + s.value
assert s_e_detected <= 1, f"Only 1 startPoint with multiple values OR 1 endPoint with multiple values is allowed"
for t in exclude_values:
for v in t:
assert v not in s_e_values.get(t, []), f"startPoint and endPoint cannot be excluded, value: {v}"
return values
# class AssistSearchPayloadSchema(BaseModel):
# filters: List[dict] = Field([])
class MobileSignPayloadSchema(BaseModel):
keys: List[str] = Field(...)
@ -1319,6 +1287,10 @@ class CardPathAnalysis(__CardSchema):
metric_value: List[ProductAnalyticsSelectedEventType] = Field(default=[ProductAnalyticsSelectedEventType.location])
density: int = Field(default=4, ge=2, le=10)
start_type: Literal["start", "end"] = Field(default="start")
start_point: List[PathAnalysisSubFilterSchema] = Field(default=[])
exclude: List[PathAnalysisSubFilterSchema] = Field(default=[])
series: List[CardPathAnalysisSchema] = Field(default=[])
@model_validator(mode="before")
@ -1331,11 +1303,8 @@ class CardPathAnalysis(__CardSchema):
@model_validator(mode="after")
def __enforce_metric_value(cls, values):
metric_value = []
for s in values.series:
for f in s.filter.filters:
if f.type in (ProductAnalyticsFilterType.start_point, ProductAnalyticsFilterType.end_point):
for ff in f.filters:
metric_value.append(ff.type)
for s in values.start_point:
metric_value.append(s.type)
if len(metric_value) > 0:
metric_value = remove_duplicate_values(metric_value)
@ -1343,9 +1312,29 @@ class CardPathAnalysis(__CardSchema):
return values
@model_validator(mode="after")
def __transform(cls, values):
# values.metric_of = MetricOfClickMap(values.metric_of)
# @model_validator(mode="after")
# def __transform(cls, values):
# # values.metric_of = MetricOfClickMap(values.metric_of)
# return values
@model_validator(mode='after')
def __validator(cls, values):
# Path analysis should have only 1 start-point with multiple values OR 1 end-point with multiple values
# start-point's value and end-point's value should not be excluded
s_e_values = {}
exclude_values = {}
for f in values.start_point:
s_e_values[f.type] = s_e_values.get(f.type, []) + f.value
for f in values.exclude:
exclude_values[f.type] = exclude_values.get(f.type, []) + f.value
assert len(
values.start_point) <= 1, f"Only 1 startPoint with multiple values OR 1 endPoint with multiple values is allowed"
for t in exclude_values:
for v in t:
assert v not in s_e_values.get(t, []), f"startPoint and endPoint cannot be excluded, value: {v}"
return values

View file

@ -6,10 +6,10 @@ name = "pypi"
[packages]
urllib3 = "==1.26.16"
requests = "==2.31.0"
boto3 = "==1.28.42"
boto3 = "==1.28.55"
pyjwt = "==2.8.0"
psycopg2-binary = "==2.9.7"
elasticsearch = "==8.9.0"
elasticsearch = "==8.10.0"
jira = "==3.5.2"
fastapi = "==0.103.1"
gunicorn = "==21.2.0"
@ -17,11 +17,11 @@ python-decouple = "==3.8"
apscheduler = "==3.10.4"
python3-saml = "==1.15.0"
python-multipart = "==0.0.6"
redis = "==5.0.0"
azure-storage-blob = "==12.17.0"
uvicorn = {version = "==0.23.2", extras = ["standard"]}
pydantic = {version = "==2.3.0", extras = ["email"]}
clickhouse-driver = {version = "==0.2.6", extras = ["lz4"]}
redis = "==5.0.1"
azure-storage-blob = "==12.18.2"
uvicorn = {extras = ["standard"], version = "==0.23.2"}
pydantic = {extras = ["email"], version = "==2.3.0"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.6"}
[dev-packages]

View file

@ -66,8 +66,7 @@ class JWTAuth(HTTPBearer):
auth_exists = jwt_payload is not None \
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
tenant_id=jwt_payload.get("tenantId", -1),
jwt_iat=jwt_payload.get("iat", 100),
jwt_aud=jwt_payload.get("aud", ""))
jwt_iat=jwt_payload.get("iat", 100))
if jwt_payload is None \
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
or not auth_exists:

View file

@ -627,7 +627,7 @@ def get_by_invitation_token(token, pass_token=None):
return helper.dict_to_camel_case(r)
def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
def auth_exists(user_id, tenant_id, jwt_iat):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
@ -651,7 +651,7 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
and (abs(jwt_iat - r["jwt_iat"]) <= 1))
def refresh_auth_exists(user_id, tenant_id, jwt_iat, jwt_aud, jwt_jti=None):
def refresh_auth_exists(user_id, tenant_id, jwt_jti=None):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""SELECT user_id
@ -852,6 +852,7 @@ def refresh(user_id: int, tenant_id: int) -> dict:
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int) - (jwt_iat - jwt_r_iat)
}
def authenticate_sso(email, internal_id, exp=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(

View file

@ -29,7 +29,7 @@ idp_x509cert=
invitation_link=/api/users/invitation?token=%s
js_cache_bucket=sessions-assets
jwt_algorithm=HS512
JWT_EXPIRATION=120
JWT_EXPIRATION=1800
JWT_REFRESH_EXPIRATION=604800
JWT_ISSUER=openreplay-oss
jwt_secret="SET A RANDOM STRING HERE"
@ -77,3 +77,4 @@ ASSIST_JWT_SECRET=
KAFKA_SERVERS=kafka.db.svc.cluster.local:9092
KAFKA_USE_SSL=false
SCH_DELETE_DAYS=30
TZ=UTC

87
ee/api/env.dev Normal file
View file

@ -0,0 +1,87 @@
announcement_url=https://asayer-announcements.s3.eu-central-1.amazonaws.com/
captcha_key=
captcha_server=
ch_host=127.0.0.1
## ee.openreplay
ch_port=8141
ch_user=""
ch_password=password
ch_timeout=30
ch_receive_timeout=10
change_password_link=/changepassword?invitation=%s&&pass=%s
EMAIL_FROM=Asayer-local<do-not-reply@asayer.io>
EMAIL_HOST=email-smtp.eu-west-1.amazonaws.com
EMAIL_PASSWORD=password
EMAIL_PORT=587
EMAIL_SSL_CERT=
EMAIL_SSL_KEY=
EMAIL_USE_SSL=false
EMAIL_USE_TLS=true
EMAIL_USER=user
invitation_link=/users/invitation?token=%s
IOS_BUCKET=asayer-mobile-mob-staging
IOS_MIDDLEWARE=https://staging-str.asayer.io
js_cache_bucket=asayer-sessions-assets-staging
jwt_algorithm=HS512
JWT_EXPIRATION=10
JWT_REFRESH_EXPIRATION=60
JWT_ISSUER=openreplay-local-staging
jwt_secret=secret
JWT_REFRESH_SECRET=another_secret
LICENSE_KEY=KEY
# ee.openreplay
pg_dbname=postgres
pg_host=127.0.0.1
pg_password=password
pg_port=5421
pg_user=postgres
PG_TIMEOUT=20
PG_MINCONN=5
PG_MAXCONN=10
PG_RETRY_MAX=50
PG_RETRY_INTERVAL=2
ASSIST_RECORDS_BUCKET=asayer-mobs-staging
sessions_bucket=asayer-mobs-staging
sessions_region=eu-central-1
SITE_URL=http://127.0.0.1:3333
sourcemaps_bucket=asayer-sourcemaps-staging
sourcemaps_reader=http://127.0.0.1:3000/
idp_entityId=
idp_sso_url=
idp_sls_url=''
idp_name=okta
idp_x509cert=
ASSIST_URL=http://127.0.0.1:9001/assist/%s
assist=http://127.0.0.1:9001/assist/%s/sockets-live
assistList=/sockets-list
FS_DIR=/tmp
PG_POOL=true
EXP_SESSIONS_SEARCH=false
EXP_AUTOCOMPLETE=true
EXP_ERRORS_SEARCH=false
EXP_ERRORS_GET=false
EXP_METRICS=true
EXP_7D_MV=false
EXP_ALERTS=false
EXP_FUNNELS=false
EXP_RESOURCES=true
EXP_SESSIONS_SEARCH_METRIC=true
ASSIST_KEY=abc
EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob
EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs
SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe
DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs
PRESIGNED_URL_EXPIRATION=3600
S3_HOST=https://ee.openreplay.com:443
S3_KEY=keys
S3_SECRET=secret
AWS_DEFAULT_REGION=us-east-1
REDIS_STRING=redis://127.0.0.1:6379
KAFKA_SERVERS=127.0.0.1:9092
KAFKA_USE_SSL=false
LOCAL_DEV=true
ENABLE_SSO=false
TZ=UTC

View file

@ -73,3 +73,14 @@ def __check(security_scopes: SecurityScopes, context: schemas.CurrentContext = D
def OR_scope(*scopes):
return Security(__check, scopes=list(scopes))
def __check_role(required_roles: SecurityScopes, context: schemas_ee.CurrentContext = Depends(OR_context)):
if len(required_roles.scopes) > 0:
if context.role not in required_roles.scopes:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
detail="You need a different role to access this resource")
def OR_role(*required_roles):
return Security(__check_role, scopes=list(required_roles))

View file

@ -1,2 +1,11 @@
#!/bin/bash
DOTENV_FILE=./.env
if [ -f "$DOTENV_FILE" ]; then
echo "$DOTENV_FILE exists, nothing to do."
else
cp env.dev $DOTENV_FILE
echo "$DOTENV_FILE was created, please fill the missing required values."
fi
rsync -avr --exclude=".*" --ignore-existing ../../api/* ./

View file

@ -1,10 +1,10 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.42
boto3==1.28.55
pyjwt==2.8.0
psycopg2-binary==2.9.7
elasticsearch==8.9.0
elasticsearch==8.10.0
jira==3.5.2
@ -17,4 +17,4 @@ apscheduler==3.10.4
clickhouse-driver[lz4]==0.2.6
python-multipart==0.0.6
azure-storage-blob==12.17.0
azure-storage-blob==12.18.2

View file

@ -1,10 +1,10 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.42
boto3==1.28.55
pyjwt==2.8.0
psycopg2-binary==2.9.7
elasticsearch==8.9.0
elasticsearch==8.10.0
jira==3.5.2
@ -15,5 +15,5 @@ pydantic[email]==2.3.0
apscheduler==3.10.4
clickhouse-driver[lz4]==0.2.6
redis==5.0.0
azure-storage-blob==12.17.0
redis==5.0.1
azure-storage-blob==12.18.2

View file

@ -1,10 +1,10 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.42
boto3==1.28.55
pyjwt==2.8.0
psycopg2-binary==2.9.7
elasticsearch==8.9.0
elasticsearch==8.10.0
jira==3.5.2
@ -23,6 +23,6 @@ clickhouse-driver[lz4]==0.2.6
python3-saml==1.15.0
python-multipart==0.0.6
redis==5.0.0
redis==5.0.1
#confluent-kafka==2.1.0
azure-storage-blob==12.17.0
azure-storage-blob==12.18.2

View file

@ -16,7 +16,7 @@ from chalicelib.utils import SAML2_helper, smtp
from chalicelib.utils import captcha
from chalicelib.utils import helper
from chalicelib.utils.TimeUTC import TimeUTC
from or_dependencies import OR_context, OR_scope
from or_dependencies import OR_context, OR_scope, OR_role
from routers.base import get_routers
from schemas import Permissions, ServicePermissions
@ -154,8 +154,8 @@ def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSc
changes={"name": data.name, "endpoint": data.url})}
@app.post('/client/members', tags=["client"])
def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
@app.post('/client/members', tags=["client"], dependencies=[OR_role("owner", "admin")])
def add_member(background_tasks: BackgroundTasks, data: schemas_ee.CreateMemberSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data,
background_tasks=background_tasks)
@ -194,8 +194,8 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
tenant_id=user["tenantId"])
@app.put('/client/members/{memberId}', tags=["client"])
def edit_member(memberId: int, data: schemas.EditMemberSchema,
@app.put('/client/members/{memberId}', tags=["client"], dependencies=[OR_role("owner", "admin")])
def edit_member(memberId: int, data: schemas_ee.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
user_id_to_update=memberId)

View file

@ -6,5 +6,8 @@ ALTER TABLE experimental.events
ALTER TABLE experimental.events
ADD COLUMN IF NOT EXISTS selector Nullable(String);
ALTER TABLE experimental.events
ADD COLUMN IF NOT EXISTS coordinate Tuple(x Nullable(UInt16), y Nullable(UInt16));
ALTER TABLE experimental.sessions
ADD COLUMN IF NOT EXISTS timezone LowCardinality(Nullable(String));

View file

@ -81,6 +81,7 @@ CREATE TABLE IF NOT EXISTS experimental.events
error_tags_values Array(Nullable(String)),
transfer_size Nullable(UInt32),
selector Nullable(String),
coordinate Tuple(x Nullable(UInt16), y Nullable(UInt16)),
message_id UInt64 DEFAULT 0,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
@ -278,6 +279,7 @@ SELECT session_id,
error_tags_values,
transfer_size,
selector,
coordinate,
message_id,
_timestamp
FROM experimental.events

View file

@ -112,6 +112,10 @@ ALTER TABLE IF EXISTS public.users
ADD COLUMN IF NOT EXISTS jwt_refresh_jti integer NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS jwt_refresh_iat timestamp without time zone NULL DEFAULT NULL;
ALTER TABLE IF EXISTS events.clicks
ADD COLUMN IF NOT EXISTS x integer DEFAULT NULL,
ADD COLUMN IF NOT EXISTS y integer DEFAULT NULL;
COMMIT;
\elif :is_next

View file

@ -1059,6 +1059,8 @@ $$
path text,
selector text DEFAULT '' NOT NULL,
hesitation integer DEFAULT NULL,
x integer DEFAULT NULL,
y integer DEFAULT NULL,
PRIMARY KEY (session_id, message_id)
);
CREATE INDEX IF NOT EXISTS clicks_session_id_idx ON events.clicks (session_id);

View file

@ -111,6 +111,10 @@ ALTER TABLE IF EXISTS public.users
ADD COLUMN IF NOT EXISTS jwt_refresh_jti integer NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS jwt_refresh_iat timestamp without time zone NULL DEFAULT NULL;
ALTER TABLE IF EXISTS events.clicks
ADD COLUMN IF NOT EXISTS x integer DEFAULT NULL,
ADD COLUMN IF NOT EXISTS y integer DEFAULT NULL;
COMMIT;
\elif :is_next

View file

@ -662,6 +662,8 @@ $$
path text,
selector text DEFAULT '' NOT NULL,
hesitation integer DEFAULT NULL,
x integer DEFAULT NULL,
y integer DEFAULT NULL,
PRIMARY KEY (session_id, message_id)
);
CREATE INDEX clicks_session_id_idx ON events.clicks (session_id);