feat(chalice): upgraded dependencies
feat(chalice): refactored code feat(chalice): assist validate session by DB feat(chalice): assist validate session by Live sessions feat(chalice): assist generate agent's token
This commit is contained in:
parent
6b9b3ed552
commit
18d4f17775
20 changed files with 193 additions and 196 deletions
|
|
@ -4,8 +4,8 @@ LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
|
|||
RUN apk add --no-cache build-base tini
|
||||
ARG envarg
|
||||
ENV APP_NAME=alerts \
|
||||
pg_minconn=1 \
|
||||
pg_maxconn=10 \
|
||||
PG_MINCONN=1 \
|
||||
PG_MAXCONN=10 \
|
||||
LISTEN_PORT=8000 \
|
||||
ENTERPRISE_BUILD=${envarg}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +1,14 @@
|
|||
from os import access, R_OK
|
||||
from os.path import exists
|
||||
from os.path import exists as path_exists
|
||||
|
||||
import jwt
|
||||
import requests
|
||||
from decouple import config
|
||||
from starlette.exceptions import HTTPException
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import projects
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
ASSIST_KEY = config("ASSIST_KEY")
|
||||
ASSIST_URL = config("ASSIST_URL") % ASSIST_KEY
|
||||
|
|
@ -51,13 +53,13 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche
|
|||
def __get_live_sessions_ws(project_id, data):
|
||||
project_key = projects.get_project_key(project_id)
|
||||
try:
|
||||
connected_peers = requests.post(ASSIST_URL + config("assist") + f"/{project_key}",
|
||||
json=data, timeout=config("assistTimeout", cast=int, default=5))
|
||||
if connected_peers.status_code != 200:
|
||||
print("!! issue with the peer-server")
|
||||
print(connected_peers.text)
|
||||
results = requests.post(ASSIST_URL + config("assist") + f"/{project_key}",
|
||||
json=data, timeout=config("assistTimeout", cast=int, default=5))
|
||||
if results.status_code != 200:
|
||||
print(f"!! issue with the peer-server code:{results.status_code}")
|
||||
print(results.text)
|
||||
return {"total": 0, "sessions": []}
|
||||
live_peers = connected_peers.json().get("data", [])
|
||||
live_peers = results.json().get("data", [])
|
||||
except requests.exceptions.Timeout:
|
||||
print("Timeout getting Assist response")
|
||||
live_peers = {"total": 0, "sessions": []}
|
||||
|
|
@ -66,7 +68,7 @@ def __get_live_sessions_ws(project_id, data):
|
|||
print(str(e))
|
||||
print("expected JSON, received:")
|
||||
try:
|
||||
print(connected_peers.text)
|
||||
print(results.text)
|
||||
except:
|
||||
print("couldn't get response")
|
||||
live_peers = {"total": 0, "sessions": []}
|
||||
|
|
@ -79,45 +81,63 @@ def __get_live_sessions_ws(project_id, data):
|
|||
return live_peers
|
||||
|
||||
|
||||
def __get_agent_token(project_id, project_key, session_id):
|
||||
iat = TimeUTC.now()
|
||||
return jwt.encode(
|
||||
payload={
|
||||
"projectKey": project_key,
|
||||
"projectId": project_id,
|
||||
"sessionId": session_id,
|
||||
"iat": iat // 1000,
|
||||
"exp": iat // 1000 + config("JWT_EXP_DELTA_SECONDS", cast=int) + TimeUTC.get_utc_offset() // 1000,
|
||||
"iss": config("JWT_ISSUER"),
|
||||
"aud": f"openreplay:agent"
|
||||
},
|
||||
key=config("jwt_secret"),
|
||||
algorithm=config("jwt_algorithm")
|
||||
)
|
||||
|
||||
|
||||
def get_live_session_by_id(project_id, session_id):
|
||||
project_key = projects.get_project_key(project_id)
|
||||
try:
|
||||
connected_peers = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
|
||||
timeout=config("assistTimeout", cast=int, default=5))
|
||||
if connected_peers.status_code != 200:
|
||||
print("!! issue with the peer-server")
|
||||
print(connected_peers.text)
|
||||
return False
|
||||
connected_peers = connected_peers.json().get("data")
|
||||
if connected_peers is None:
|
||||
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
|
||||
timeout=config("assistTimeout", cast=int, default=5))
|
||||
if results.status_code != 200:
|
||||
print(f"!! issue with the peer-server code:{results.status_code}")
|
||||
print(results.text)
|
||||
return None
|
||||
connected_peers["live"] = True
|
||||
results = results.json().get("data")
|
||||
if results is None:
|
||||
return None
|
||||
results["live"] = True
|
||||
results["agentToken"] = __get_agent_token(project_id=project_id, project_key=project_key, session_id=session_id)
|
||||
except requests.exceptions.Timeout:
|
||||
print("Timeout getting Assist response")
|
||||
print("!! Timeout getting Assist response")
|
||||
return None
|
||||
except Exception as e:
|
||||
print("issue getting Assist response")
|
||||
print(str(e))
|
||||
print("expected JSON, received:")
|
||||
try:
|
||||
print(connected_peers.text)
|
||||
print(results.text)
|
||||
except:
|
||||
print("couldn't get response")
|
||||
return None
|
||||
return connected_peers
|
||||
return results
|
||||
|
||||
|
||||
def is_live(project_id, session_id, project_key=None):
|
||||
if project_key is None:
|
||||
project_key = projects.get_project_key(project_id)
|
||||
try:
|
||||
connected_peers = requests.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
|
||||
timeout=config("assistTimeout", cast=int, default=5))
|
||||
if connected_peers.status_code != 200:
|
||||
print("!! issue with the peer-server")
|
||||
print(connected_peers.text)
|
||||
results = requests.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
|
||||
timeout=config("assistTimeout", cast=int, default=5))
|
||||
if results.status_code != 200:
|
||||
print(f"!! issue with the peer-server code:{results.status_code}")
|
||||
print(results.text)
|
||||
return False
|
||||
connected_peers = connected_peers.json().get("data")
|
||||
results = results.json().get("data")
|
||||
except requests.exceptions.Timeout:
|
||||
print("Timeout getting Assist response")
|
||||
return False
|
||||
|
|
@ -126,11 +146,11 @@ def is_live(project_id, session_id, project_key=None):
|
|||
print(str(e))
|
||||
print("expected JSON, received:")
|
||||
try:
|
||||
print(connected_peers.text)
|
||||
print(results.text)
|
||||
except:
|
||||
print("couldn't get response")
|
||||
return False
|
||||
return str(session_id) == connected_peers
|
||||
return str(session_id) == results
|
||||
|
||||
|
||||
def autocomplete(project_id, q: str, key: str = None):
|
||||
|
|
@ -143,7 +163,7 @@ def autocomplete(project_id, q: str, key: str = None):
|
|||
ASSIST_URL + config("assistList") + f"/{project_key}/autocomplete",
|
||||
params=params, timeout=config("assistTimeout", cast=int, default=5))
|
||||
if results.status_code != 200:
|
||||
print("!! issue with the peer-server")
|
||||
print(f"!! issue with the peer-server code:{results.status_code}")
|
||||
print(results.text)
|
||||
return {"errors": [f"Something went wrong wile calling assist:{results.text}"]}
|
||||
results = results.json().get("data", [])
|
||||
|
|
@ -169,7 +189,7 @@ def get_ice_servers():
|
|||
|
||||
def __get_efs_path():
|
||||
efs_path = config("FS_DIR")
|
||||
if not exists(efs_path):
|
||||
if not path_exists(efs_path):
|
||||
raise HTTPException(400, f"EFS not found in path: {efs_path}")
|
||||
|
||||
if not access(efs_path, R_OK):
|
||||
|
|
@ -185,7 +205,7 @@ def __get_mob_path(project_id, session_id):
|
|||
def get_raw_mob_by_id(project_id, session_id):
|
||||
efs_path = __get_efs_path()
|
||||
path_to_file = efs_path + "/" + __get_mob_path(project_id=project_id, session_id=session_id)
|
||||
if exists(path_to_file):
|
||||
if path_exists(path_to_file):
|
||||
if not access(path_to_file, R_OK):
|
||||
raise HTTPException(400, f"Replay file found under: {efs_path};"
|
||||
f" but it is not readable, please check permissions")
|
||||
|
|
@ -203,7 +223,7 @@ def __get_devtools_path(project_id, session_id):
|
|||
def get_raw_devtools_by_id(project_id, session_id):
|
||||
efs_path = __get_efs_path()
|
||||
path_to_file = efs_path + "/" + __get_devtools_path(project_id=project_id, session_id=session_id)
|
||||
if exists(path_to_file):
|
||||
if path_exists(path_to_file):
|
||||
if not access(path_to_file, R_OK):
|
||||
raise HTTPException(400, f"Devtools file found under: {efs_path};"
|
||||
f" but it is not readable, please check permissions")
|
||||
|
|
@ -211,3 +231,30 @@ def get_raw_devtools_by_id(project_id, session_id):
|
|||
return path_to_file
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def session_exists(project_id, session_id):
|
||||
project_key = projects.get_project_key(project_id)
|
||||
try:
|
||||
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
|
||||
timeout=config("assistTimeout", cast=int, default=5))
|
||||
if results.status_code != 200:
|
||||
print(f"!! issue with the peer-server code:{results.status_code}")
|
||||
print(results.text)
|
||||
return None
|
||||
results = results.json().get("data")
|
||||
if results is None:
|
||||
return False
|
||||
return True
|
||||
except requests.exceptions.Timeout:
|
||||
print("!! Timeout getting Assist response")
|
||||
return False
|
||||
except Exception as e:
|
||||
print("issue getting Assist response")
|
||||
print(str(e))
|
||||
print("expected JSON, received:")
|
||||
try:
|
||||
print(results.text)
|
||||
except:
|
||||
print("couldn't get response")
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -42,8 +42,8 @@ def generate_jwt(id, tenant_id, iat, aud):
|
|||
payload={
|
||||
"userId": id,
|
||||
"tenantId": tenant_id,
|
||||
"exp": iat // 1000 + config("jwt_exp_delta_seconds", cast=int) + TimeUTC.get_utc_offset() // 1000,
|
||||
"iss": config("jwt_issuer"),
|
||||
"exp": iat // 1000 + config("ASSIST_JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000,
|
||||
"iss": config("JWT_ISSUER"),
|
||||
"iat": iat // 1000,
|
||||
"aud": aud
|
||||
},
|
||||
|
|
|
|||
|
|
@ -195,13 +195,6 @@ def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks):
|
|||
admin=data.get("admin", False), name=name)
|
||||
new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken"))
|
||||
|
||||
# helper.async_post(config('email_basic') % 'member_invitation',
|
||||
# {
|
||||
# "email": data["email"],
|
||||
# "invitationLink": new_member["invitationLink"],
|
||||
# "clientId": tenants.get_by_tenant_id(tenant_id)["name"],
|
||||
# "senderName": admin["name"]
|
||||
# })
|
||||
background_tasks.add_task(email_helper.send_team_invitation, **{
|
||||
"recipient": data["email"],
|
||||
"invitation_link": new_member["invitationLink"],
|
||||
|
|
@ -552,7 +545,7 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
|
|||
)
|
||||
|
||||
|
||||
def authenticate(email, password, for_change_password=False, for_plugin=False):
|
||||
def authenticate(email, password, for_change_password=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT
|
||||
|
|
@ -587,7 +580,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False):
|
|||
return {
|
||||
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'],
|
||||
TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]),
|
||||
aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"),
|
||||
aud=f"front:{helper.get_stage_name()}"),
|
||||
"email": email,
|
||||
**r
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ def get_version_number():
|
|||
|
||||
|
||||
def get_stage_name():
|
||||
stage = config("stage")
|
||||
stage = config("STAGE")
|
||||
return stage[len(local_prefix):] if stage.startswith(local_prefix) else stage
|
||||
|
||||
|
||||
|
|
@ -35,7 +35,7 @@ def is_onprem():
|
|||
|
||||
|
||||
def is_local():
|
||||
return config("stage").startswith(local_prefix)
|
||||
return config("STAGE").startswith(local_prefix)
|
||||
|
||||
|
||||
def generate_salt():
|
||||
|
|
@ -141,18 +141,6 @@ def allow_captcha():
|
|||
and len(config("captcha_server")) > 0 and len(config("captcha_key")) > 0
|
||||
|
||||
|
||||
def allow_sentry():
|
||||
return config("sentryURL", default=None) is not None and len(config("sentryURL")) > 0
|
||||
|
||||
|
||||
def async_post(endpoint, data):
|
||||
data["auth"] = config("async_Token")
|
||||
try:
|
||||
requests.post(endpoint, timeout=1, json=data)
|
||||
except requests.exceptions.ReadTimeout:
|
||||
pass
|
||||
|
||||
|
||||
def string_to_sql_like(value):
|
||||
value = re.sub(' +', ' ', value)
|
||||
value = value.replace("*", "%")
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@ _PG_CONFIG = {"host": config("pg_host"),
|
|||
"port": config("pg_port", cast=int),
|
||||
"application_name": config("APP_NAME", default="PY")}
|
||||
PG_CONFIG = dict(_PG_CONFIG)
|
||||
if config("pg_timeout", cast=int, default=0) > 0:
|
||||
PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}"
|
||||
if config("PG_TIMEOUT", cast=int, default=0) > 0:
|
||||
PG_CONFIG["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int) * 1000}"
|
||||
|
||||
logging.info(f">PG_POOL:{config('PG_POOL', default=None)}")
|
||||
|
||||
|
|
@ -67,8 +67,8 @@ def make_pool():
|
|||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
logging.error("Error while closing all connexions to PostgreSQL", error)
|
||||
try:
|
||||
postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20),
|
||||
config("pg_maxconn", cast=int, default=80),
|
||||
postgreSQL_pool = ORThreadedConnectionPool(config("PG_MINCONN", cast=int, default=20),
|
||||
config("PG_MAXCONN", cast=int, default=80),
|
||||
**PG_CONFIG)
|
||||
if (postgreSQL_pool):
|
||||
logging.info("Connection pool created successfully")
|
||||
|
|
@ -109,7 +109,7 @@ class PostgresClient:
|
|||
elif not config('PG_POOL', cast=bool, default=True):
|
||||
single_config = dict(_PG_CONFIG)
|
||||
single_config["application_name"] += "-NOPOOL"
|
||||
single_config["options"] = f"-c statement_timeout={config('pg_timeout', cast=int, default=3 * 60) * 1000}"
|
||||
single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=3 * 60) * 1000}"
|
||||
self.connection = psycopg2.connect(**single_config)
|
||||
else:
|
||||
self.connection = postgreSQL_pool.getconn()
|
||||
|
|
|
|||
|
|
@ -12,19 +12,14 @@ S3_KEY=
|
|||
S3_SECRET=
|
||||
SITE_URL=
|
||||
announcement_url=
|
||||
async_Token=
|
||||
captcha_key=
|
||||
captcha_server=
|
||||
change_password_link=/reset-password?invitation=%s&&pass=%s
|
||||
email_basic=http://127.0.0.1:8000/async/basic/%s
|
||||
email_signup=http://127.0.0.1:8000/async/email_signup/%s
|
||||
invitation_link=/api/users/invitation?token=%s
|
||||
isEE=false
|
||||
isFOS=true
|
||||
js_cache_bucket=sessions-assets
|
||||
jwt_algorithm=HS512
|
||||
jwt_exp_delta_seconds=2592000
|
||||
jwt_issuer=openreplay-default-foss
|
||||
JWT_EXP_DELTA_SECONDS=2592000
|
||||
JWT_ISSUER=openreplay-oss
|
||||
jwt_secret="SET A RANDOM STRING HERE"
|
||||
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
|
||||
assist=/sockets-live
|
||||
|
|
@ -34,19 +29,17 @@ pg_host=postgresql.db.svc.cluster.local
|
|||
pg_password=asayerPostgres
|
||||
pg_port=5432
|
||||
pg_user=postgres
|
||||
pg_timeout=30
|
||||
pg_minconn=20
|
||||
pg_maxconn=50
|
||||
PG_TIMEOUT=30
|
||||
PG_MINCONN=20
|
||||
PG_MAXCONN=50
|
||||
PG_RETRY_MAX=50
|
||||
PG_RETRY_INTERVAL=2
|
||||
PG_POOL=true
|
||||
put_S3_TTL=20
|
||||
sentryURL=
|
||||
sessions_bucket=mobs
|
||||
sessions_region=us-east-1
|
||||
sourcemaps_bucket=sourcemaps
|
||||
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps
|
||||
stage=default-foss
|
||||
STAGE=default-foss
|
||||
version_number=1.4.0
|
||||
FS_DIR=/mnt/efs
|
||||
EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob
|
||||
|
|
@ -54,4 +47,6 @@ EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
|
|||
SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs
|
||||
SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe
|
||||
DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
|
||||
PRESIGNED_URL_EXPIRATION=3600
|
||||
PRESIGNED_URL_EXPIRATION=3600
|
||||
ASSIST_JWT_EXPIRATION=1800
|
||||
ASSIST_JWT_SECRET=
|
||||
|
|
@ -1,15 +1,15 @@
|
|||
requests==2.28.1
|
||||
urllib3==1.26.10
|
||||
boto3==1.24.53
|
||||
pyjwt==2.4.0
|
||||
urllib3==1.26.12
|
||||
boto3==1.24.80
|
||||
pyjwt==2.5.0
|
||||
psycopg2-binary==2.9.3
|
||||
elasticsearch==8.3.3
|
||||
jira==3.3.1
|
||||
elasticsearch==8.4.2
|
||||
jira==3.4.1
|
||||
|
||||
|
||||
|
||||
fastapi==0.80.0
|
||||
uvicorn[standard]==0.18.2
|
||||
fastapi==0.85.0
|
||||
uvicorn[standard]==0.18.3
|
||||
python-decouple==3.6
|
||||
pydantic[email]==1.9.2
|
||||
pydantic[email]==1.10.2
|
||||
apscheduler==3.9.1
|
||||
|
|
@ -1,15 +1,15 @@
|
|||
requests==2.28.1
|
||||
urllib3==1.26.10
|
||||
boto3==1.24.53
|
||||
pyjwt==2.4.0
|
||||
urllib3==1.26.12
|
||||
boto3==1.24.80
|
||||
pyjwt==2.5.0
|
||||
psycopg2-binary==2.9.3
|
||||
elasticsearch==8.3.3
|
||||
jira==3.3.1
|
||||
elasticsearch==8.4.2
|
||||
jira==3.4.1
|
||||
|
||||
|
||||
|
||||
fastapi==0.80.0
|
||||
uvicorn[standard]==0.18.2
|
||||
fastapi==0.85.0
|
||||
uvicorn[standard]==0.18.3
|
||||
python-decouple==3.6
|
||||
pydantic[email]==1.9.2
|
||||
pydantic[email]==1.10.2
|
||||
apscheduler==3.9.1
|
||||
|
|
@ -31,7 +31,7 @@ def login(data: schemas.UserLoginSchema = Body(...)):
|
|||
detail="Invalid captcha."
|
||||
)
|
||||
|
||||
r = users.authenticate(data.email, data.password, for_plugin=False)
|
||||
r = users.authenticate(data.email, data.password)
|
||||
if r is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
|
|
@ -546,21 +546,6 @@ def search_integrations(projectId: int, context: schemas.CurrentContext = Depend
|
|||
return log_tools.search(project_id=projectId)
|
||||
|
||||
|
||||
@public_app.post('/async/email_assignment', tags=["async mail"])
|
||||
def async_send_signup_emails(data: schemas.EmailPayloadSchema = Body(...)):
|
||||
if data.auth != config("async_Token"):
|
||||
return {}
|
||||
email_helper.send_assign_session(recipient=data.email, link=data.link, message=data.message)
|
||||
|
||||
|
||||
# @public_app.post('/async/basic/member_invitation', tags=["async mail"])
|
||||
# def async_basic_emails(data: schemas.MemberInvitationPayloadSchema = Body(...)):
|
||||
# if data.auth != config("async_Token"):
|
||||
# return {}
|
||||
# email_helper.send_team_invitation(recipient=data.email, invitation_link=data.invitation_link,
|
||||
# client_id=data.client_id, sender_name=data.sender_name)
|
||||
|
||||
|
||||
@app.get('/{projectId}/sample_rate', tags=["projects"])
|
||||
def get_capture_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": projects.get_capture_status(project_id=projectId)}
|
||||
|
|
@ -877,14 +862,6 @@ def batch_view_notifications(data: schemas.NotificationsViewSchema,
|
|||
tenant_id=context.tenant_id)}
|
||||
|
||||
|
||||
@public_app.post('/notifications', tags=['notifications'])
|
||||
@public_app.put('/notifications', tags=['notifications'])
|
||||
def create_notifications(data: schemas.CreateNotificationSchema):
|
||||
if data.token != config("async_Token"):
|
||||
return {"errors": ["missing token"]}
|
||||
return notifications.create(data.notifications)
|
||||
|
||||
|
||||
@app.get('/boarding', tags=['boarding'])
|
||||
def get_boarding_state(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": boarding.get_state(tenant_id=context.tenant_id)}
|
||||
|
|
|
|||
|
|
@ -279,16 +279,19 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
|
|||
@app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"])
|
||||
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
if isinstance(sessionId, str):
|
||||
print(f"{sessionId} not a valid number.")
|
||||
else:
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
not_found = {"errors": ["Replay file not found"]}
|
||||
if isinstance(sessionId, str):
|
||||
print(f"{sessionId} not a valid number.")
|
||||
return not_found
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
if not assist.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in Assist.")
|
||||
return not_found
|
||||
|
||||
return {"errors": ["Replay file not found"]}
|
||||
path = assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId)
|
||||
if path is None:
|
||||
return {"errors": ["Replay file not found"]}
|
||||
return not_found
|
||||
|
||||
return FileResponse(path=path, media_type="application/octet-stream")
|
||||
|
||||
|
|
@ -297,13 +300,16 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
|||
@app.get('/{projectId}/assist/sessions/{sessionId}/devtools', tags=["assist"])
|
||||
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
if isinstance(sessionId, str):
|
||||
print(f"{sessionId} not a valid number.")
|
||||
else:
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
not_found = {"errors": ["Devtools file not found"]}
|
||||
if isinstance(sessionId, str):
|
||||
print(f"{sessionId} not a valid number.")
|
||||
return not_found
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
if not assist.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in Assist.")
|
||||
return not_found
|
||||
|
||||
return {"errors": ["Devtools file not found"]}
|
||||
path = assist.get_raw_devtools_by_id(project_id=projectId, session_id=sessionId)
|
||||
if path is None:
|
||||
return {"errors": ["Devtools file not found"]}
|
||||
|
|
|
|||
|
|
@ -4,8 +4,8 @@ LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
|
|||
RUN apk add --no-cache build-base tini
|
||||
ARG envarg
|
||||
ENV APP_NAME=alerts \
|
||||
pg_minconn=1 \
|
||||
pg_maxconn=10 \
|
||||
PG_MINCONN=1 \
|
||||
PG_MAXCONN=10 \
|
||||
LISTEN_PORT=8000 \
|
||||
ENTERPRISE_BUILD=${envarg}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,8 +4,8 @@ LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
|
|||
RUN apk add --no-cache build-base tini
|
||||
ARG envarg
|
||||
ENV APP_NAME=crons \
|
||||
pg_minconn=2 \
|
||||
pg_maxconn=10 \
|
||||
PG_MINCONN=2 \
|
||||
PG_MAXCONN=10 \
|
||||
ENTERPRISE_BUILD=${envarg} \
|
||||
ACTION="" \
|
||||
PG_POOL=false
|
||||
|
|
|
|||
|
|
@ -43,9 +43,9 @@ def generate_jwt(id, tenant_id, iat, aud, exp=None):
|
|||
payload={
|
||||
"userId": id,
|
||||
"tenantId": tenant_id,
|
||||
"exp": iat // 1000 + int(config("jwt_exp_delta_seconds")) + TimeUTC.get_utc_offset() // 1000 \
|
||||
"exp": iat // 1000 + int(config("JWT_EXP_DELTA_SECONDS")) + TimeUTC.get_utc_offset() // 1000 \
|
||||
if exp is None else exp+ TimeUTC.get_utc_offset() // 1000,
|
||||
"iss": config("jwt_issuer"),
|
||||
"iss": config("JWT_ISSUER"),
|
||||
"iat": iat // 1000,
|
||||
"aud": aud
|
||||
},
|
||||
|
|
|
|||
|
|
@ -228,13 +228,6 @@ def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks):
|
|||
new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token,
|
||||
admin=data.get("admin", False), name=name, role_id=role_id)
|
||||
new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken"))
|
||||
# helper.async_post(config('email_basic') % 'member_invitation',
|
||||
# {
|
||||
# "email": data["email"],
|
||||
# "invitationLink": new_member["invitationLink"],
|
||||
# "clientId": tenants.get_by_tenant_id(tenant_id)["name"],
|
||||
# "senderName": admin["name"]
|
||||
# })
|
||||
background_tasks.add_task(email_helper.send_team_invitation, **{
|
||||
"recipient": data["email"],
|
||||
"invitation_link": new_member["invitationLink"],
|
||||
|
|
@ -628,7 +621,7 @@ def change_jwt_iat(user_id):
|
|||
return cur.fetchone().get("jwt_iat")
|
||||
|
||||
|
||||
def authenticate(email, password, for_change_password=False, for_plugin=False):
|
||||
def authenticate(email, password, for_change_password=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT
|
||||
|
|
@ -675,7 +668,7 @@ def authenticate(email, password, for_change_password=False, for_plugin=False):
|
|||
return {
|
||||
"jwt": authorizers.generate_jwt(r['userId'], r['tenantId'],
|
||||
TimeUTC.datetime_to_timestamp(jwt_iat),
|
||||
aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"),
|
||||
aud=f"front:{helper.get_stage_name()}"),
|
||||
"email": email,
|
||||
**r
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ S3_SECRET=
|
|||
SAML2_MD_URL=
|
||||
SITE_URL=
|
||||
announcement_url=
|
||||
async_Token=
|
||||
captcha_key=
|
||||
captcha_server=
|
||||
ch_host=
|
||||
|
|
@ -22,20 +21,15 @@ ch_port=
|
|||
ch_timeout=30
|
||||
ch_receive_timeout=10
|
||||
change_password_link=/reset-password?invitation=%s&&pass=%s
|
||||
email_basic=http://127.0.0.1:8000/async/basic/%s
|
||||
email_plans=http://127.0.0.1:8000/async/plans/%s
|
||||
email_signup=http://127.0.0.1:8000/async/email_signup/%s
|
||||
idp_entityId=
|
||||
idp_sls_url=
|
||||
idp_sso_url=
|
||||
idp_x509cert=
|
||||
invitation_link=/api/users/invitation?token=%s
|
||||
isEE=true
|
||||
isFOS=false
|
||||
js_cache_bucket=sessions-assets
|
||||
jwt_algorithm=HS512
|
||||
jwt_exp_delta_seconds=2592000
|
||||
jwt_issuer=openreplay-default-ee
|
||||
JWT_EXP_DELTA_SECONDS=2592000
|
||||
JWT_ISSUER=openreplay-ee
|
||||
jwt_secret="SET A RANDOM STRING HERE"
|
||||
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
|
||||
assist=/sockets-live
|
||||
|
|
@ -45,14 +39,12 @@ pg_host=postgresql.db.svc.cluster.local
|
|||
pg_password=asayerPostgres
|
||||
pg_port=5432
|
||||
pg_user=postgres
|
||||
pg_timeout=30
|
||||
pg_minconn=20
|
||||
pg_maxconn=50
|
||||
PG_TIMEOUT=30
|
||||
PG_MINCONN=20
|
||||
PG_MAXCONN=50
|
||||
PG_RETRY_MAX=50
|
||||
PG_RETRY_INTERVAL=2
|
||||
PG_POOL=true
|
||||
put_S3_TTL=20
|
||||
sentryURL=
|
||||
sessions_bucket=mobs
|
||||
sessions_region=us-east-1
|
||||
sourcemaps_bucket=sourcemaps
|
||||
|
|
|
|||
|
|
@ -1,17 +1,17 @@
|
|||
requests==2.28.1
|
||||
urllib3==1.26.10
|
||||
boto3==1.24.53
|
||||
pyjwt==2.4.0
|
||||
urllib3==1.26.12
|
||||
boto3==1.24.80
|
||||
pyjwt==2.5.0
|
||||
psycopg2-binary==2.9.3
|
||||
elasticsearch==8.3.3
|
||||
jira==3.3.1
|
||||
elasticsearch==8.4.2
|
||||
jira==3.4.1
|
||||
|
||||
|
||||
|
||||
fastapi==0.80.0
|
||||
uvicorn[standard]==0.18.2
|
||||
fastapi==0.85.0
|
||||
uvicorn[standard]==0.18.3
|
||||
python-decouple==3.6
|
||||
pydantic[email]==1.9.2
|
||||
pydantic[email]==1.10.2
|
||||
apscheduler==3.9.1
|
||||
|
||||
clickhouse-driver==0.2.4
|
||||
|
|
|
|||
|
|
@ -1,17 +1,17 @@
|
|||
requests==2.28.1
|
||||
urllib3==1.26.10
|
||||
boto3==1.24.53
|
||||
pyjwt==2.4.0
|
||||
urllib3==1.26.12
|
||||
boto3==1.24.80
|
||||
pyjwt==2.5.0
|
||||
psycopg2-binary==2.9.3
|
||||
elasticsearch==8.3.3
|
||||
jira==3.3.1
|
||||
elasticsearch==8.4.2
|
||||
jira==3.4.1
|
||||
|
||||
|
||||
|
||||
fastapi==0.80.0
|
||||
uvicorn[standard]==0.18.2
|
||||
fastapi==0.85.0
|
||||
uvicorn[standard]==0.18.3
|
||||
python-decouple==3.6
|
||||
pydantic[email]==1.9.2
|
||||
pydantic[email]==1.10.2
|
||||
apscheduler==3.9.1
|
||||
|
||||
clickhouse-driver==0.2.4
|
||||
|
|
|
|||
|
|
@ -1,17 +1,17 @@
|
|||
requests==2.28.1
|
||||
urllib3==1.26.10
|
||||
boto3==1.24.53
|
||||
pyjwt==2.4.0
|
||||
urllib3==1.26.12
|
||||
boto3==1.24.80
|
||||
pyjwt==2.5.0
|
||||
psycopg2-binary==2.9.3
|
||||
elasticsearch==8.3.3
|
||||
jira==3.3.1
|
||||
elasticsearch==8.4.2
|
||||
jira==3.4.1
|
||||
|
||||
|
||||
|
||||
fastapi==0.80.0
|
||||
uvicorn[standard]==0.18.2
|
||||
fastapi==0.85.0
|
||||
uvicorn[standard]==0.18.3
|
||||
python-decouple==3.6
|
||||
pydantic[email]==1.9.2
|
||||
pydantic[email]==1.10.2
|
||||
apscheduler==3.9.1
|
||||
|
||||
clickhouse-driver==0.2.4
|
||||
|
|
|
|||
|
|
@ -291,16 +291,19 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
|
|||
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)])
|
||||
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
if isinstance(sessionId, str):
|
||||
print(f"{sessionId} not a valid number.")
|
||||
else:
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
not_found = {"errors": ["Replay file not found"]}
|
||||
if isinstance(sessionId, str):
|
||||
print(f"{sessionId} not a valid number.")
|
||||
return not_found
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
if not assist.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in Assist.")
|
||||
return not_found
|
||||
|
||||
return {"errors": ["Replay file not found"]}
|
||||
path = assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId)
|
||||
if path is None:
|
||||
return {"errors": ["Replay file not found"]}
|
||||
return not_found
|
||||
|
||||
return FileResponse(path=path, media_type="application/octet-stream")
|
||||
|
||||
|
|
@ -311,13 +314,16 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
|||
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)])
|
||||
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(sessionId, str) or not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
if isinstance(sessionId, str):
|
||||
print(f"{sessionId} not a valid number.")
|
||||
else:
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
not_found = {"errors": ["Devtools file not found"]}
|
||||
if isinstance(sessionId, str):
|
||||
print(f"{sessionId} not a valid number.")
|
||||
return not_found
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
if not assist.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in Assist.")
|
||||
return not_found
|
||||
|
||||
return {"errors": ["Devtools file not found"]}
|
||||
path = assist.get_raw_devtools_by_id(project_id=projectId, session_id=sessionId)
|
||||
if path is None:
|
||||
return {"errors": ["Devtools file not found"]}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue