pulled dev
This commit is contained in:
commit
006f87f388
38 changed files with 955 additions and 623 deletions
|
|
@ -28,9 +28,6 @@ def jwt_authorizer(scheme: str, token: str, leeway=0) -> dict | None:
|
|||
if scheme.lower() != "bearer":
|
||||
return None
|
||||
try:
|
||||
logger.warning("Checking JWT token: %s", token)
|
||||
logger.warning("Against: %s", config("JWT_SECRET") if not is_spot_token(token) else config("JWT_SPOT_SECRET"))
|
||||
logger.warning(get_supported_audience())
|
||||
payload = jwt.decode(jwt=token,
|
||||
key=config("JWT_SECRET") if not is_spot_token(token) else config("JWT_SPOT_SECRET"),
|
||||
algorithms=config("JWT_ALGORITHM"),
|
||||
|
|
@ -40,8 +37,7 @@ def jwt_authorizer(scheme: str, token: str, leeway=0) -> dict | None:
|
|||
logger.debug("! JWT Expired signature")
|
||||
return None
|
||||
except BaseException as e:
|
||||
logger.warning("! JWT Base Exception")
|
||||
logger.debug(e)
|
||||
logger.warning("! JWT Base Exception", exc_info=e)
|
||||
return None
|
||||
return payload
|
||||
|
||||
|
|
@ -50,10 +46,6 @@ def jwt_refresh_authorizer(scheme: str, token: str):
|
|||
if scheme.lower() != "bearer":
|
||||
return None
|
||||
try:
|
||||
logger.warning("Checking JWT REF token: %s", token)
|
||||
logger.warning("Against REF: %s",
|
||||
config("JWT_REFRESH_SECRET") if not is_spot_token(token) else config("JWT_SPOT_REFRESH_SECRET"))
|
||||
logger.warning(get_supported_audience())
|
||||
payload = jwt.decode(jwt=token,
|
||||
key=config("JWT_REFRESH_SECRET") if not is_spot_token(token) \
|
||||
else config("JWT_SPOT_REFRESH_SECRET"),
|
||||
|
|
@ -63,8 +55,7 @@ def jwt_refresh_authorizer(scheme: str, token: str):
|
|||
logger.debug("! JWT-refresh Expired signature")
|
||||
return None
|
||||
except BaseException as e:
|
||||
logger.warning("! JWT-refresh Base Exception")
|
||||
logger.debug(e)
|
||||
logger.error("! JWT-refresh Base Exception", exc_info=e)
|
||||
return None
|
||||
return payload
|
||||
|
||||
|
|
@ -83,10 +74,6 @@ def generate_jwt(user_id, tenant_id, iat, aud, for_spot=False):
|
|||
key=config("JWT_SECRET") if not for_spot else config("JWT_SPOT_SECRET"),
|
||||
algorithm=config("JWT_ALGORITHM")
|
||||
)
|
||||
logger.warning("Generated JWT token: %s", token)
|
||||
logger.warning("For spot: %s", for_spot)
|
||||
logger.warning("Using: %s", config("JWT_SECRET") if not for_spot else config("JWT_SPOT_SECRET"))
|
||||
logger.warning(aud)
|
||||
return token
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -352,6 +352,108 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema):
|
|||
return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def search_metrics(project_id, user_id, data: schemas.MetricSearchSchema, include_series=False):
|
||||
constraints = ["metrics.project_id = %(project_id)s", "metrics.deleted_at ISNULL"]
|
||||
params = {
|
||||
"project_id": project_id,
|
||||
"user_id": user_id,
|
||||
"offset": (data.page - 1) * data.limit,
|
||||
"limit": data.limit,
|
||||
}
|
||||
if data.mine_only:
|
||||
constraints.append("user_id = %(user_id)s")
|
||||
else:
|
||||
constraints.append("(user_id = %(user_id)s OR metrics.is_public)")
|
||||
if data.shared_only:
|
||||
constraints.append("is_public")
|
||||
|
||||
if data.filter is not None:
|
||||
if data.filter.type:
|
||||
constraints.append("metrics.metric_type = %(filter_type)s")
|
||||
params["filter_type"] = data.filter.type
|
||||
if data.filter.query and len(data.filter.query) > 0:
|
||||
constraints.append("(metrics.name ILIKE %(filter_query)s OR owner.owner_name ILIKE %(filter_query)s)")
|
||||
params["filter_query"] = helper.values_for_operator(
|
||||
value=data.filter.query, op=schemas.SearchEventOperator.CONTAINS
|
||||
)
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
count_query = cur.mogrify(
|
||||
f"""SELECT COUNT(*)
|
||||
FROM metrics
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT email AS owner_email, name AS owner_name
|
||||
FROM users
|
||||
WHERE deleted_at ISNULL
|
||||
AND users.user_id = metrics.user_id
|
||||
) AS owner ON (TRUE)
|
||||
WHERE {" AND ".join(constraints)};""",
|
||||
params
|
||||
)
|
||||
cur.execute(count_query)
|
||||
total = cur.fetchone()["count"]
|
||||
|
||||
sub_join = ""
|
||||
if include_series:
|
||||
sub_join = """LEFT JOIN LATERAL (
|
||||
SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = metrics.metric_id
|
||||
AND metric_series.deleted_at ISNULL
|
||||
) AS metric_series ON (TRUE)"""
|
||||
|
||||
sort_column = data.sort.field if data.sort.field is not None else "created_at"
|
||||
# change ascend to asc and descend to desc
|
||||
sort_order = data.sort.order.value if hasattr(data.sort.order, "value") else data.sort.order
|
||||
if sort_order == "ascend":
|
||||
sort_order = "asc"
|
||||
elif sort_order == "descend":
|
||||
sort_order = "desc"
|
||||
|
||||
query = cur.mogrify(
|
||||
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, edited_at,
|
||||
metric_type, metric_of, metric_format, metric_value, view_type, is_pinned,
|
||||
dashboards, owner_email, owner_name, default_config AS config, thumbnail
|
||||
FROM metrics
|
||||
{sub_join}
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public, name),'[]'::jsonb) AS dashboards
|
||||
FROM (
|
||||
SELECT DISTINCT dashboard_id, name, is_public
|
||||
FROM dashboards
|
||||
INNER JOIN dashboard_widgets USING (dashboard_id)
|
||||
WHERE deleted_at ISNULL
|
||||
AND dashboard_widgets.metric_id = metrics.metric_id
|
||||
AND project_id = %(project_id)s
|
||||
AND ((dashboards.user_id = %(user_id)s OR is_public))
|
||||
) AS connected_dashboards
|
||||
) AS connected_dashboards ON (TRUE)
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT email AS owner_email, name AS owner_name
|
||||
FROM users
|
||||
WHERE deleted_at ISNULL
|
||||
AND users.user_id = metrics.user_id
|
||||
) AS owner ON (TRUE)
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY {sort_column} {sort_order}
|
||||
LIMIT %(limit)s OFFSET %(offset)s;""",
|
||||
params
|
||||
)
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
if include_series:
|
||||
for r in rows:
|
||||
for s in r.get("series", []):
|
||||
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
|
||||
else:
|
||||
for r in rows:
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"])
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
|
||||
return {"total": total, "list": rows}
|
||||
|
||||
|
||||
def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_series=False):
|
||||
constraints = ["metrics.project_id = %(project_id)s",
|
||||
"metrics.deleted_at ISNULL"]
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ def refresh_spot_jwt_iat_jti(user_id):
|
|||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
return row.get("spot_jwt_iat"), row.get("spot_jwt_refresh_jti"), row.get("spot_jwt_refresh_iat")
|
||||
return users.RefreshSpotJWTs(**row)
|
||||
|
||||
|
||||
def logout(user_id: int):
|
||||
|
|
@ -26,13 +26,13 @@ def logout(user_id: int):
|
|||
|
||||
|
||||
def refresh(user_id: int, tenant_id: int = -1) -> dict:
|
||||
spot_jwt_iat, spot_jwt_r_jti, spot_jwt_r_iat = refresh_spot_jwt_iat_jti(user_id=user_id)
|
||||
j = refresh_spot_jwt_iat_jti(user_id=user_id)
|
||||
return {
|
||||
"jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=spot_jwt_iat,
|
||||
"jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=j.spot_jwt_iat,
|
||||
aud=AUDIENCE, for_spot=True),
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=user_id, tenant_id=tenant_id, iat=spot_jwt_r_iat,
|
||||
aud=AUDIENCE, jwt_jti=spot_jwt_r_jti, for_spot=True),
|
||||
"refreshTokenMaxAge": config("JWT_SPOT_REFRESH_EXPIRATION", cast=int) - (spot_jwt_iat - spot_jwt_r_iat)
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=user_id, tenant_id=tenant_id, iat=j.spot_jwt_refresh_iat,
|
||||
aud=AUDIENCE, jwt_jti=j.spot_jwt_refresh_jti, for_spot=True),
|
||||
"refreshTokenMaxAge": config("JWT_SPOT_REFRESH_EXPIRATION", cast=int) - (j.spot_jwt_iat - j.spot_jwt_refresh_iat)
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
import json
|
||||
import secrets
|
||||
from typing import Optional
|
||||
|
||||
from decouple import config
|
||||
from fastapi import BackgroundTasks
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, model_validator
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import authorizers
|
||||
|
|
@ -83,7 +84,6 @@ def restore_member(user_id, email, invitation_token, admin, name, owner=False):
|
|||
"name": name, "invitation_token": invitation_token})
|
||||
cur.execute(query)
|
||||
result = cur.fetchone()
|
||||
cur.execute(query)
|
||||
result["created_at"] = TimeUTC.datetime_to_timestamp(result["created_at"])
|
||||
return helper.dict_to_camel_case(result)
|
||||
|
||||
|
|
@ -284,7 +284,7 @@ def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema,
|
|||
if editor_id != user_id_to_update:
|
||||
admin = get_user_role(tenant_id=tenant_id, user_id=editor_id)
|
||||
if not admin["superAdmin"] and not admin["admin"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
return {"errors": ["unauthorized, you must have admin privileges"]}
|
||||
if admin["admin"] and user["superAdmin"]:
|
||||
return {"errors": ["only the owner can edit his own details"]}
|
||||
else:
|
||||
|
|
@ -552,14 +552,35 @@ def refresh_auth_exists(user_id, jwt_jti=None):
|
|||
return r is not None
|
||||
|
||||
|
||||
class ChangeJwt(BaseModel):
|
||||
class FullLoginJWTs(BaseModel):
|
||||
jwt_iat: int
|
||||
jwt_refresh_jti: int
|
||||
jwt_refresh_jti: str
|
||||
jwt_refresh_iat: int
|
||||
spot_jwt_iat: int
|
||||
spot_jwt_refresh_jti: int
|
||||
spot_jwt_refresh_jti: str
|
||||
spot_jwt_refresh_iat: int
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def _transform_data(cls, values):
|
||||
if values.get("jwt_refresh_jti") is not None:
|
||||
values["jwt_refresh_jti"] = str(values["jwt_refresh_jti"])
|
||||
if values.get("spot_jwt_refresh_jti") is not None:
|
||||
values["spot_jwt_refresh_jti"] = str(values["spot_jwt_refresh_jti"])
|
||||
return values
|
||||
|
||||
|
||||
class RefreshLoginJWTs(FullLoginJWTs):
|
||||
spot_jwt_iat: Optional[int] = None
|
||||
spot_jwt_refresh_jti: Optional[str] = None
|
||||
spot_jwt_refresh_iat: Optional[int] = None
|
||||
|
||||
|
||||
class RefreshSpotJWTs(FullLoginJWTs):
|
||||
jwt_iat: Optional[int] = None
|
||||
jwt_refresh_jti: Optional[str] = None
|
||||
jwt_refresh_iat: Optional[int] = None
|
||||
|
||||
|
||||
def change_jwt_iat_jti(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
|
|
@ -580,7 +601,7 @@ def change_jwt_iat_jti(user_id):
|
|||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
return ChangeJwt(**row)
|
||||
return FullLoginJWTs(**row)
|
||||
|
||||
|
||||
def refresh_jwt_iat_jti(user_id):
|
||||
|
|
@ -595,7 +616,7 @@ def refresh_jwt_iat_jti(user_id):
|
|||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat")
|
||||
return RefreshLoginJWTs(**row)
|
||||
|
||||
|
||||
def authenticate(email, password, for_change_password=False) -> dict | bool | None:
|
||||
|
|
@ -663,13 +684,13 @@ def logout(user_id: int):
|
|||
|
||||
|
||||
def refresh(user_id: int, tenant_id: int = -1) -> dict:
|
||||
jwt_iat, jwt_r_jti, jwt_r_iat = refresh_jwt_iat_jti(user_id=user_id)
|
||||
j = refresh_jwt_iat_jti(user_id=user_id)
|
||||
return {
|
||||
"jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=jwt_iat,
|
||||
"jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=j.jwt_iat,
|
||||
aud=AUDIENCE),
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=user_id, tenant_id=tenant_id, iat=jwt_r_iat,
|
||||
aud=AUDIENCE, jwt_jti=jwt_r_jti),
|
||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int) - (jwt_iat - jwt_r_iat)
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=user_id, tenant_id=tenant_id, iat=j.jwt_refresh_iat,
|
||||
aud=AUDIENCE, jwt_jti=j.jwt_refresh_jti),
|
||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int) - (j.jwt_iat - j.jwt_refresh_iat),
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -7,27 +7,30 @@ from fastapi import HTTPException, status
|
|||
from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Response
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core import assist, signup, feature_flags
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.errors import errors, errors_details
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
sessions_assignments, unprocessed_sessions, sessions_search
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.core.errors import errors, errors_details
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
sessions_assignments, unprocessed_sessions, sessions_search
|
||||
from chalicelib.utils import captcha, smtp
|
||||
from chalicelib.utils import contextual_validators
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from or_dependencies import OR_context, OR_role
|
||||
from routers.base import get_routers
|
||||
from routers.subs import spot
|
||||
from chalicelib.utils import contextual_validators
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
COOKIE_PATH = "/api/refresh"
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
COOKIE_PATH = "/refresh"
|
||||
else:
|
||||
COOKIE_PATH = "/api/refresh"
|
||||
|
||||
|
||||
@public_app.get('/signup', tags=['signup'])
|
||||
|
|
@ -73,11 +76,6 @@ def __process_authentication_response(response: JSONResponse, data: dict) -> dic
|
|||
|
||||
@public_app.post('/login', tags=["authentication"])
|
||||
def login_user(response: JSONResponse, data: schemas.UserLoginSchema = Body(...)):
|
||||
if data.email != 'tahay@asayer.io':
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Enforced testing mode is active."
|
||||
)
|
||||
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
|
|
|
|||
|
|
@ -9,172 +9,330 @@ from routers.base import get_routers
|
|||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.post('/{projectId}/dashboards', tags=["dashboard"])
|
||||
def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data)
|
||||
@app.post("/{projectId}/dashboards", tags=["dashboard"])
|
||||
def create_dashboards(
|
||||
projectId: int,
|
||||
data: schemas.CreateDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return dashboards.create_dashboard(
|
||||
project_id=projectId, user_id=context.user_id, data=data
|
||||
)
|
||||
|
||||
|
||||
@app.get('/{projectId}/dashboards', tags=["dashboard"])
|
||||
def get_dashboards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": dashboards.get_dashboards(project_id=projectId, user_id=context.user_id)}
|
||||
@app.get("/{projectId}/dashboards", tags=["dashboard"])
|
||||
def get_dashboards(
|
||||
projectId: int, context: schemas.CurrentContext = Depends(OR_context)
|
||||
):
|
||||
return {
|
||||
"data": dashboards.get_dashboards(project_id=projectId, user_id=context.user_id)
|
||||
}
|
||||
|
||||
|
||||
@app.get('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
|
||||
def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = dashboards.get_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)
|
||||
@app.get("/{projectId}/dashboards/{dashboardId}", tags=["dashboard"])
|
||||
def get_dashboard(
|
||||
projectId: int,
|
||||
dashboardId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
data = dashboards.get_dashboard(
|
||||
project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId
|
||||
)
|
||||
if data is None:
|
||||
return {"errors": ["dashboard not found"]}
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
|
||||
def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": dashboards.update_dashboard(project_id=projectId, user_id=context.user_id,
|
||||
dashboard_id=dashboardId, data=data)}
|
||||
@app.put("/{projectId}/dashboards/{dashboardId}", tags=["dashboard"])
|
||||
def update_dashboard(
|
||||
projectId: int,
|
||||
dashboardId: int,
|
||||
data: schemas.EditDashboardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return {
|
||||
"data": dashboards.update_dashboard(
|
||||
project_id=projectId,
|
||||
user_id=context.user_id,
|
||||
dashboard_id=dashboardId,
|
||||
data=data,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
|
||||
def delete_dashboard(projectId: int, dashboardId: int, _=Body(None),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return dashboards.delete_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)
|
||||
@app.delete("/{projectId}/dashboards/{dashboardId}", tags=["dashboard"])
|
||||
def delete_dashboard(
|
||||
projectId: int,
|
||||
dashboardId: int,
|
||||
_=Body(None),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return dashboards.delete_dashboard(
|
||||
project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId
|
||||
)
|
||||
|
||||
|
||||
@app.get('/{projectId}/dashboards/{dashboardId}/pin', tags=["dashboard"])
|
||||
def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": dashboards.pin_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)}
|
||||
@app.get("/{projectId}/dashboards/{dashboardId}/pin", tags=["dashboard"])
|
||||
def pin_dashboard(
|
||||
projectId: int,
|
||||
dashboardId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return {
|
||||
"data": dashboards.pin_dashboard(
|
||||
project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"])
|
||||
def add_card_to_dashboard(projectId: int, dashboardId: int,
|
||||
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": dashboards.add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
|
||||
data=data)}
|
||||
@app.post("/{projectId}/dashboards/{dashboardId}/cards", tags=["cards"])
|
||||
def add_card_to_dashboard(
|
||||
projectId: int,
|
||||
dashboardId: int,
|
||||
data: schemas.AddWidgetToDashboardPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return {
|
||||
"data": dashboards.add_widget(
|
||||
project_id=projectId,
|
||||
user_id=context.user_id,
|
||||
dashboard_id=dashboardId,
|
||||
data=data,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
|
||||
@app.post("/{projectId}/dashboards/{dashboardId}/metrics", tags=["dashboard"])
|
||||
# @app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"])
|
||||
def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int,
|
||||
data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": dashboards.create_metric_add_widget(project=context.project, user_id=context.user_id,
|
||||
dashboard_id=dashboardId, data=data)}
|
||||
def create_metric_and_add_to_dashboard(
|
||||
projectId: int,
|
||||
dashboardId: int,
|
||||
data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return {
|
||||
"data": dashboards.create_metric_add_widget(
|
||||
project=context.project,
|
||||
user_id=context.user_id,
|
||||
dashboard_id=dashboardId,
|
||||
data=data,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
|
||||
def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int,
|
||||
data: schemas.UpdateWidgetPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return dashboards.update_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
|
||||
widget_id=widgetId, data=data)
|
||||
@app.put("/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}", tags=["dashboard"])
|
||||
def update_widget_in_dashboard(
|
||||
projectId: int,
|
||||
dashboardId: int,
|
||||
widgetId: int,
|
||||
data: schemas.UpdateWidgetPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return dashboards.update_widget(
|
||||
project_id=projectId,
|
||||
user_id=context.user_id,
|
||||
dashboard_id=dashboardId,
|
||||
widget_id=widgetId,
|
||||
data=data,
|
||||
)
|
||||
|
||||
|
||||
@app.delete('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
|
||||
def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int, _=Body(None),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return dashboards.remove_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
|
||||
widget_id=widgetId)
|
||||
@app.delete(
|
||||
"/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}", tags=["dashboard"]
|
||||
)
|
||||
def remove_widget_from_dashboard(
|
||||
projectId: int,
|
||||
dashboardId: int,
|
||||
widgetId: int,
|
||||
_=Body(None),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return dashboards.remove_widget(
|
||||
project_id=projectId,
|
||||
user_id=context.user_id,
|
||||
dashboard_id=dashboardId,
|
||||
widget_id=widgetId,
|
||||
)
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/try', tags=["cards"])
|
||||
def try_card(projectId: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.get_chart(project=context.project, data=data, user_id=context.user_id)}
|
||||
@app.post("/{projectId}/cards/try", tags=["cards"])
|
||||
def try_card(
|
||||
projectId: int,
|
||||
data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return {
|
||||
"data": custom_metrics.get_chart(
|
||||
project=context.project, data=data, user_id=context.user_id
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/try/sessions', tags=["cards"])
|
||||
def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.get_sessions(project=context.project, user_id=context.user_id, data=data)
|
||||
@app.post("/{projectId}/cards/try/sessions", tags=["cards"])
|
||||
def try_card_sessions(
|
||||
projectId: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
data = custom_metrics.get_sessions(
|
||||
project=context.project, user_id=context.user_id, data=data
|
||||
)
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/try/issues', tags=["cards"])
|
||||
def try_card_issues(projectId: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.get_issues(project=context.project, user_id=context.user_id, data=data)}
|
||||
@app.post("/{projectId}/cards/try/issues", tags=["cards"])
|
||||
def try_card_issues(
|
||||
projectId: int,
|
||||
data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return {
|
||||
"data": custom_metrics.get_issues(
|
||||
project=context.project, user_id=context.user_id, data=data
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@app.get('/{projectId}/cards', tags=["cards"])
|
||||
@app.get("/{projectId}/cards", tags=["cards"])
|
||||
def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)}
|
||||
return {
|
||||
"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)
|
||||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards', tags=["cards"])
|
||||
def create_card(projectId: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return custom_metrics.create_card(project=context.project, user_id=context.user_id, data=data)
|
||||
@app.post("/{projectId}/cards", tags=["cards"])
|
||||
def create_card(
|
||||
projectId: int,
|
||||
data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return custom_metrics.create_card(
|
||||
project=context.project, user_id=context.user_id, data=data
|
||||
)
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/search', tags=["cards"])
|
||||
def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)}
|
||||
@app.post("/{projectId}/cards/search", tags=["cards"])
|
||||
def search_cards(
|
||||
projectId: int,
|
||||
data: schemas.MetricSearchSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return {
|
||||
"data": custom_metrics.search_metrics(
|
||||
project_id=projectId, user_id=context.user_id, data=data
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@app.get('/{projectId}/cards/{metric_id}', tags=["cards"])
|
||||
def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
|
||||
@app.get("/{projectId}/cards/{metric_id}", tags=["cards"])
|
||||
def get_card(
|
||||
projectId: int,
|
||||
metric_id: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
if metric_id.isnumeric():
|
||||
metric_id = int(metric_id)
|
||||
else:
|
||||
return {"errors": ["invalid card_id"]}
|
||||
data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
|
||||
data = custom_metrics.get_card(
|
||||
project_id=projectId, user_id=context.user_id, metric_id=metric_id
|
||||
)
|
||||
if data is None:
|
||||
return {"errors": ["card not found"]}
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"])
|
||||
def get_card_sessions(projectId: int, metric_id: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.get_sessions_by_card_id(project=context.project, user_id=context.user_id, metric_id=metric_id,
|
||||
data=data)
|
||||
@app.post("/{projectId}/cards/{metric_id}/sessions", tags=["cards"])
|
||||
def get_card_sessions(
|
||||
projectId: int,
|
||||
metric_id: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
data = custom_metrics.get_sessions_by_card_id(
|
||||
project=context.project, user_id=context.user_id, metric_id=metric_id, data=data
|
||||
)
|
||||
if data is None:
|
||||
return {"errors": ["custom metric not found"]}
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"])
|
||||
def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.get_funnel_sessions_by_issue(project_id=projectId, user_id=context.user_id,
|
||||
metric_id=metric_id, issue_id=issueId, data=data)
|
||||
@app.post(
|
||||
"/{projectId}/cards/{metric_id}/issues/{issueId}/sessions", tags=["dashboard"]
|
||||
)
|
||||
def get_metric_funnel_issue_sessions(
|
||||
projectId: int,
|
||||
metric_id: int,
|
||||
issueId: str,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
data = custom_metrics.get_funnel_sessions_by_issue(
|
||||
project_id=projectId,
|
||||
user_id=context.user_id,
|
||||
metric_id=metric_id,
|
||||
issue_id=issueId,
|
||||
data=data,
|
||||
)
|
||||
if data is None:
|
||||
return {"errors": ["custom metric not found"]}
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"])
|
||||
def get_card_chart(projectId: int, metric_id: int, data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.make_chart_from_card(project=context.project, user_id=context.user_id, metric_id=metric_id,
|
||||
data=data)
|
||||
@app.post("/{projectId}/cards/{metric_id}/chart", tags=["card"])
|
||||
def get_card_chart(
|
||||
projectId: int,
|
||||
metric_id: int,
|
||||
data: schemas.CardSessionsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
data = custom_metrics.make_chart_from_card(
|
||||
project=context.project, user_id=context.user_id, metric_id=metric_id, data=data
|
||||
)
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"])
|
||||
def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
|
||||
@app.post("/{projectId}/cards/{metric_id}", tags=["dashboard"])
|
||||
def update_card(
|
||||
projectId: int,
|
||||
metric_id: int,
|
||||
data: schemas.CardSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
data = custom_metrics.update_card(
|
||||
project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data
|
||||
)
|
||||
if data is None:
|
||||
return {"errors": ["custom metric not found"]}
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"])
|
||||
def update_card_state(projectId: int, metric_id: int,
|
||||
data: schemas.UpdateCardStatusSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
@app.post("/{projectId}/cards/{metric_id}/status", tags=["dashboard"])
|
||||
def update_card_state(
|
||||
projectId: int,
|
||||
metric_id: int,
|
||||
data: schemas.UpdateCardStatusSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return {
|
||||
"data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
|
||||
status=data.active)}
|
||||
"data": custom_metrics.change_state(
|
||||
project_id=projectId,
|
||||
user_id=context.user_id,
|
||||
metric_id=metric_id,
|
||||
status=data.active,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
|
||||
def delete_card(projectId: int, metric_id: int, _=Body(None),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
|
||||
@app.delete("/{projectId}/cards/{metric_id}", tags=["dashboard"])
|
||||
def delete_card(
|
||||
projectId: int,
|
||||
metric_id: int,
|
||||
_=Body(None),
|
||||
context: schemas.CurrentContext = Depends(OR_context),
|
||||
):
|
||||
return {
|
||||
"data": custom_metrics.delete_card(
|
||||
project_id=projectId, user_id=context.user_id, metric_id=metric_id
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from decouple import config
|
||||
from fastapi import Depends
|
||||
from starlette.responses import JSONResponse, Response
|
||||
|
||||
|
|
@ -8,7 +9,10 @@ from routers.base import get_routers
|
|||
|
||||
public_app, app, app_apikey = get_routers(prefix="/spot", tags=["spot"])
|
||||
|
||||
COOKIE_PATH = "/api/spot/refresh"
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
COOKIE_PATH = "/spot/refresh"
|
||||
else:
|
||||
COOKIE_PATH = "/api/spot/refresh"
|
||||
|
||||
|
||||
@app.get('/logout')
|
||||
|
|
|
|||
|
|
@ -1368,6 +1368,42 @@ class SearchCardsSchema(_PaginatedSchema):
|
|||
query: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
class MetricSortColumnType(str, Enum):
|
||||
NAME = "name"
|
||||
METRIC_TYPE = "metric_type"
|
||||
METRIC_OF = "metric_of"
|
||||
IS_PUBLIC = "is_public"
|
||||
CREATED_AT = "created_at"
|
||||
EDITED_AT = "edited_at"
|
||||
|
||||
|
||||
class MetricFilterColumnType(str, Enum):
|
||||
NAME = "name"
|
||||
METRIC_TYPE = "metric_type"
|
||||
METRIC_OF = "metric_of"
|
||||
IS_PUBLIC = "is_public"
|
||||
USER_ID = "user_id"
|
||||
CREATED_AT = "created_at"
|
||||
EDITED_AT = "edited_at"
|
||||
|
||||
|
||||
class MetricListSort(BaseModel):
|
||||
field: Optional[str] = Field(default=None)
|
||||
order: Optional[str] = Field(default=SortOrderType.DESC)
|
||||
|
||||
|
||||
class MetricFilter(BaseModel):
|
||||
type: Optional[str] = Field(default=None)
|
||||
query: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
class MetricSearchSchema(_PaginatedSchema):
|
||||
filter: Optional[MetricFilter] = Field(default=None)
|
||||
sort: Optional[MetricListSort] = Field(default=MetricListSort())
|
||||
shared_only: bool = Field(default=False)
|
||||
mine_only: bool = Field(default=False)
|
||||
|
||||
|
||||
class _HeatMapSearchEventRaw(SessionSearchEventSchema2):
|
||||
type: Literal[EventType.LOCATION] = Field(...)
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import (
|
|||
"openreplay/backend/pkg/logger"
|
||||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/metrics"
|
||||
storageMetrics "openreplay/backend/pkg/metrics/images"
|
||||
imagesMetrics "openreplay/backend/pkg/metrics/images"
|
||||
"openreplay/backend/pkg/objectstorage/store"
|
||||
"openreplay/backend/pkg/queue"
|
||||
)
|
||||
|
|
@ -23,14 +23,15 @@ func main() {
|
|||
ctx := context.Background()
|
||||
log := logger.New()
|
||||
cfg := config.New(log)
|
||||
metrics.New(log, storageMetrics.List())
|
||||
imageMetrics := imagesMetrics.New("images")
|
||||
metrics.New(log, imageMetrics.List())
|
||||
|
||||
objStore, err := store.NewStore(&cfg.ObjectsConfig)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init object storage: %s", err)
|
||||
}
|
||||
|
||||
srv, err := images.New(cfg, log, objStore)
|
||||
srv, err := images.New(cfg, log, objStore, imageMetrics)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init images service: %s", err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,7 +28,8 @@ func main() {
|
|||
}
|
||||
defer pgConn.Close()
|
||||
|
||||
builder, err := spot.NewServiceBuilder(log, cfg, webMetrics, pgConn)
|
||||
prefix := api.NoPrefix
|
||||
builder, err := spot.NewServiceBuilder(log, cfg, webMetrics, pgConn, prefix)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init services: %s", err)
|
||||
}
|
||||
|
|
@ -37,7 +38,7 @@ func main() {
|
|||
if err != nil {
|
||||
log.Fatal(ctx, "failed while creating router: %s", err)
|
||||
}
|
||||
router.AddHandlers(api.NoPrefix, builder.SpotsAPI)
|
||||
router.AddHandlers(prefix, builder.SpotsAPI)
|
||||
router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware)
|
||||
|
||||
server.Run(ctx, log, &cfg.HTTP, router)
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ import (
|
|||
|
||||
config "openreplay/backend/internal/config/images"
|
||||
"openreplay/backend/pkg/logger"
|
||||
"openreplay/backend/pkg/metrics/images"
|
||||
"openreplay/backend/pkg/objectstorage"
|
||||
"openreplay/backend/pkg/pool"
|
||||
)
|
||||
|
|
@ -38,9 +39,10 @@ type ImageStorage struct {
|
|||
objStorage objectstorage.ObjectStorage
|
||||
saverPool pool.WorkerPool
|
||||
uploaderPool pool.WorkerPool
|
||||
metrics images.Images
|
||||
}
|
||||
|
||||
func New(cfg *config.Config, log logger.Logger, objStorage objectstorage.ObjectStorage) (*ImageStorage, error) {
|
||||
func New(cfg *config.Config, log logger.Logger, objStorage objectstorage.ObjectStorage, metrics images.Images) (*ImageStorage, error) {
|
||||
switch {
|
||||
case cfg == nil:
|
||||
return nil, fmt.Errorf("config is empty")
|
||||
|
|
@ -48,11 +50,14 @@ func New(cfg *config.Config, log logger.Logger, objStorage objectstorage.ObjectS
|
|||
return nil, fmt.Errorf("logger is empty")
|
||||
case objStorage == nil:
|
||||
return nil, fmt.Errorf("objStorage is empty")
|
||||
case metrics == nil:
|
||||
return nil, fmt.Errorf("metrics is empty")
|
||||
}
|
||||
s := &ImageStorage{
|
||||
cfg: cfg,
|
||||
log: log,
|
||||
objStorage: objStorage,
|
||||
metrics: metrics,
|
||||
}
|
||||
s.saverPool = pool.NewPool(4, 8, s.writeToDisk)
|
||||
s.uploaderPool = pool.NewPool(8, 8, s.sendToS3)
|
||||
|
|
@ -92,8 +97,11 @@ func (v *ImageStorage) Process(ctx context.Context, sessID uint64, data []byte)
|
|||
v.log.Error(ctx, "ExtractTarGz: unknown type: %d in %s", header.Typeflag, header.Name)
|
||||
}
|
||||
}
|
||||
v.metrics.RecordOriginalArchiveExtractionDuration(time.Since(start).Seconds())
|
||||
v.metrics.RecordOriginalArchiveSize(float64(len(images)))
|
||||
v.metrics.IncreaseTotalSavedArchives()
|
||||
|
||||
v.log.Info(ctx, "arch size: %d, extracted archive in: %s", len(data), time.Since(start))
|
||||
v.log.Debug(ctx, "arch size: %d, extracted archive in: %s", len(data), time.Since(start))
|
||||
v.saverPool.Submit(&saveTask{ctx: ctx, sessionID: sessID, images: images})
|
||||
return nil
|
||||
}
|
||||
|
|
@ -115,6 +123,7 @@ func (v *ImageStorage) writeToDisk(payload interface{}) {
|
|||
// Write images to disk
|
||||
saved := 0
|
||||
for name, img := range task.images {
|
||||
start := time.Now()
|
||||
outFile, err := os.Create(path + name) // or open file in rewrite mode
|
||||
if err != nil {
|
||||
v.log.Error(task.ctx, "can't create file: %s", err.Error())
|
||||
|
|
@ -128,9 +137,11 @@ func (v *ImageStorage) writeToDisk(payload interface{}) {
|
|||
if err := outFile.Close(); err != nil {
|
||||
v.log.Warn(task.ctx, "can't close file: %s", err.Error())
|
||||
}
|
||||
v.metrics.RecordSavingImageDuration(time.Since(start).Seconds())
|
||||
v.metrics.IncreaseTotalSavedImages()
|
||||
saved++
|
||||
}
|
||||
v.log.Info(task.ctx, "saved %d images to disk", saved)
|
||||
v.log.Debug(task.ctx, "saved %d images to disk", saved)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -151,8 +162,10 @@ func (v *ImageStorage) PackScreenshots(ctx context.Context, sessID uint64, files
|
|||
if err != nil {
|
||||
return fmt.Errorf("failed to execute command: %v, stderr: %v", err, stderr.String())
|
||||
}
|
||||
v.log.Info(ctx, "packed replay in %v", time.Since(start))
|
||||
v.metrics.RecordArchivingDuration(time.Since(start).Seconds())
|
||||
v.metrics.IncreaseTotalCreatedArchives()
|
||||
|
||||
v.log.Debug(ctx, "packed replay in %v", time.Since(start))
|
||||
v.uploaderPool.Submit(&uploadTask{ctx: ctx, sessionID: sessionID, path: archPath, name: sessionID + "/replay.tar.zst"})
|
||||
return nil
|
||||
}
|
||||
|
|
@ -167,6 +180,9 @@ func (v *ImageStorage) sendToS3(payload interface{}) {
|
|||
if err := v.objStorage.Upload(bytes.NewReader(video), task.name, "application/octet-stream", objectstorage.NoContentEncoding, objectstorage.Zstd); err != nil {
|
||||
v.log.Fatal(task.ctx, "failed to upload replay file: %s", err)
|
||||
}
|
||||
v.log.Info(task.ctx, "replay file (size: %d) uploaded successfully in %v", len(video), time.Since(start))
|
||||
v.metrics.RecordUploadingDuration(time.Since(start).Seconds())
|
||||
v.metrics.RecordArchiveSize(float64(len(video)))
|
||||
|
||||
v.log.Debug(task.ctx, "replay file (size: %d) uploaded successfully in %v", len(video), time.Since(start))
|
||||
return
|
||||
}
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.
|
|||
return nil, err
|
||||
}
|
||||
return &ServicesBuilder{
|
||||
Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, nil),
|
||||
Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, nil, api.NoPrefix),
|
||||
RateLimiter: limiter.NewUserRateLimiter(10, 30, 1*time.Minute, 5*time.Minute),
|
||||
AuditTrail: audiTrail,
|
||||
CardsAPI: cardsHandlers,
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ func NewServiceBuilder(log logger.Logger, cfg *integrations.Config, webMetrics w
|
|||
return nil, err
|
||||
}
|
||||
builder := &ServiceBuilder{
|
||||
Auth: auth.NewAuth(log, cfg.JWTSecret, "", pgconn, nil),
|
||||
Auth: auth.NewAuth(log, cfg.JWTSecret, "", pgconn, nil, api.NoPrefix),
|
||||
RateLimiter: limiter.NewUserRateLimiter(10, 30, 1*time.Minute, 5*time.Minute),
|
||||
AuditTrail: auditrail,
|
||||
IntegrationsAPI: handlers,
|
||||
|
|
|
|||
|
|
@ -5,151 +5,187 @@ import (
|
|||
"openreplay/backend/pkg/metrics/common"
|
||||
)
|
||||
|
||||
var storageSessionSize = prometheus.NewHistogramVec(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: "storage",
|
||||
Name: "session_size_bytes",
|
||||
Help: "A histogram displaying the size of each session file in bytes prior to any manipulation.",
|
||||
Buckets: common.DefaultSizeBuckets,
|
||||
},
|
||||
[]string{"file_type"},
|
||||
)
|
||||
|
||||
func RecordSessionSize(fileSize float64, fileType string) {
|
||||
storageSessionSize.WithLabelValues(fileType).Observe(fileSize)
|
||||
type Images interface {
|
||||
RecordOriginalArchiveSize(size float64)
|
||||
RecordOriginalArchiveExtractionDuration(duration float64)
|
||||
IncreaseTotalSavedArchives()
|
||||
RecordSavingImageDuration(duration float64)
|
||||
IncreaseTotalSavedImages()
|
||||
IncreaseTotalCreatedArchives()
|
||||
RecordArchivingDuration(duration float64)
|
||||
RecordArchiveSize(size float64)
|
||||
RecordUploadingDuration(duration float64)
|
||||
List() []prometheus.Collector
|
||||
}
|
||||
|
||||
var storageTotalSessions = prometheus.NewCounter(
|
||||
prometheus.CounterOpts{
|
||||
Namespace: "storage",
|
||||
Name: "sessions_total",
|
||||
Help: "A counter displaying the total number of all processed sessions.",
|
||||
},
|
||||
)
|
||||
|
||||
func IncreaseStorageTotalSessions() {
|
||||
storageTotalSessions.Inc()
|
||||
type imagesImpl struct {
|
||||
originalArchiveSize prometheus.Histogram
|
||||
originalArchiveExtractionDuration prometheus.Histogram
|
||||
totalSavedArchives prometheus.Counter
|
||||
savingImageDuration prometheus.Histogram
|
||||
totalSavedImages prometheus.Counter
|
||||
totalCreatedArchives prometheus.Counter
|
||||
archivingDuration prometheus.Histogram
|
||||
archiveSize prometheus.Histogram
|
||||
uploadingDuration prometheus.Histogram
|
||||
}
|
||||
|
||||
var storageSkippedSessionSize = prometheus.NewHistogramVec(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: "storage",
|
||||
Name: "session_size_bytes",
|
||||
Help: "A histogram displaying the size of each skipped session file in bytes.",
|
||||
Buckets: common.DefaultSizeBuckets,
|
||||
},
|
||||
[]string{"file_type"},
|
||||
)
|
||||
|
||||
func RecordSkippedSessionSize(fileSize float64, fileType string) {
|
||||
storageSkippedSessionSize.WithLabelValues(fileType).Observe(fileSize)
|
||||
}
|
||||
|
||||
var storageTotalSkippedSessions = prometheus.NewCounter(
|
||||
prometheus.CounterOpts{
|
||||
Namespace: "storage",
|
||||
Name: "sessions_skipped_total",
|
||||
Help: "A counter displaying the total number of all skipped sessions because of the size limits.",
|
||||
},
|
||||
)
|
||||
|
||||
func IncreaseStorageTotalSkippedSessions() {
|
||||
storageTotalSkippedSessions.Inc()
|
||||
}
|
||||
|
||||
var storageSessionReadDuration = prometheus.NewHistogramVec(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: "storage",
|
||||
Name: "read_duration_seconds",
|
||||
Help: "A histogram displaying the duration of reading for each session in seconds.",
|
||||
Buckets: common.DefaultDurationBuckets,
|
||||
},
|
||||
[]string{"file_type"},
|
||||
)
|
||||
|
||||
func RecordSessionReadDuration(durMillis float64, fileType string) {
|
||||
storageSessionReadDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0)
|
||||
}
|
||||
|
||||
var storageSessionSortDuration = prometheus.NewHistogramVec(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: "storage",
|
||||
Name: "sort_duration_seconds",
|
||||
Help: "A histogram displaying the duration of sorting for each session in seconds.",
|
||||
Buckets: common.DefaultDurationBuckets,
|
||||
},
|
||||
[]string{"file_type"},
|
||||
)
|
||||
|
||||
func RecordSessionSortDuration(durMillis float64, fileType string) {
|
||||
storageSessionSortDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0)
|
||||
}
|
||||
|
||||
var storageSessionEncryptionDuration = prometheus.NewHistogramVec(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: "storage",
|
||||
Name: "encryption_duration_seconds",
|
||||
Help: "A histogram displaying the duration of encoding for each session in seconds.",
|
||||
Buckets: common.DefaultDurationBuckets,
|
||||
},
|
||||
[]string{"file_type"},
|
||||
)
|
||||
|
||||
func RecordSessionEncryptionDuration(durMillis float64, fileType string) {
|
||||
storageSessionEncryptionDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0)
|
||||
}
|
||||
|
||||
var storageSessionCompressDuration = prometheus.NewHistogramVec(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: "storage",
|
||||
Name: "compress_duration_seconds",
|
||||
Help: "A histogram displaying the duration of compressing for each session in seconds.",
|
||||
Buckets: common.DefaultDurationBuckets,
|
||||
},
|
||||
[]string{"file_type"},
|
||||
)
|
||||
|
||||
func RecordSessionCompressDuration(durMillis float64, fileType string) {
|
||||
storageSessionCompressDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0)
|
||||
}
|
||||
|
||||
var storageSessionUploadDuration = prometheus.NewHistogramVec(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: "storage",
|
||||
Name: "upload_duration_seconds",
|
||||
Help: "A histogram displaying the duration of uploading to s3 for each session in seconds.",
|
||||
Buckets: common.DefaultDurationBuckets,
|
||||
},
|
||||
[]string{"file_type"},
|
||||
)
|
||||
|
||||
func RecordSessionUploadDuration(durMillis float64, fileType string) {
|
||||
storageSessionUploadDuration.WithLabelValues(fileType).Observe(durMillis / 1000.0)
|
||||
}
|
||||
|
||||
var storageSessionCompressionRatio = prometheus.NewHistogramVec(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: "storage",
|
||||
Name: "compression_ratio",
|
||||
Help: "A histogram displaying the compression ratio of mob files for each session.",
|
||||
Buckets: common.DefaultDurationBuckets,
|
||||
},
|
||||
[]string{"file_type"},
|
||||
)
|
||||
|
||||
func RecordSessionCompressionRatio(ratio float64, fileType string) {
|
||||
storageSessionCompressionRatio.WithLabelValues(fileType).Observe(ratio)
|
||||
}
|
||||
|
||||
func List() []prometheus.Collector {
|
||||
return []prometheus.Collector{
|
||||
storageSessionSize,
|
||||
storageTotalSessions,
|
||||
storageSessionReadDuration,
|
||||
storageSessionSortDuration,
|
||||
storageSessionEncryptionDuration,
|
||||
storageSessionCompressDuration,
|
||||
storageSessionUploadDuration,
|
||||
storageSessionCompressionRatio,
|
||||
func New(serviceName string) Images {
|
||||
return &imagesImpl{
|
||||
originalArchiveSize: newOriginalArchiveSize(serviceName),
|
||||
originalArchiveExtractionDuration: newOriginalArchiveExtractionDuration(serviceName),
|
||||
totalSavedArchives: newTotalSavedArchives(serviceName),
|
||||
savingImageDuration: newSavingImageDuration(serviceName),
|
||||
totalSavedImages: newTotalSavedImages(serviceName),
|
||||
totalCreatedArchives: newTotalCreatedArchives(serviceName),
|
||||
archivingDuration: newArchivingDuration(serviceName),
|
||||
archiveSize: newArchiveSize(serviceName),
|
||||
uploadingDuration: newUploadingDuration(serviceName),
|
||||
}
|
||||
}
|
||||
|
||||
func (i *imagesImpl) List() []prometheus.Collector {
|
||||
return []prometheus.Collector{
|
||||
i.originalArchiveSize,
|
||||
i.originalArchiveExtractionDuration,
|
||||
i.totalSavedArchives,
|
||||
i.savingImageDuration,
|
||||
i.totalSavedImages,
|
||||
i.totalCreatedArchives,
|
||||
i.archivingDuration,
|
||||
i.archiveSize,
|
||||
i.uploadingDuration,
|
||||
}
|
||||
}
|
||||
|
||||
func newOriginalArchiveSize(serviceName string) prometheus.Histogram {
|
||||
return prometheus.NewHistogram(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: serviceName,
|
||||
Name: "original_archive_size_bytes",
|
||||
Help: "A histogram displaying the original archive size in bytes.",
|
||||
Buckets: common.DefaultSizeBuckets,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (i *imagesImpl) RecordOriginalArchiveSize(size float64) {
|
||||
i.archiveSize.Observe(size)
|
||||
}
|
||||
|
||||
func newOriginalArchiveExtractionDuration(serviceName string) prometheus.Histogram {
|
||||
return prometheus.NewHistogram(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: serviceName,
|
||||
Name: "original_archive_extraction_duration_seconds",
|
||||
Help: "A histogram displaying the duration of extracting the original archive.",
|
||||
Buckets: common.DefaultDurationBuckets,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (i *imagesImpl) RecordOriginalArchiveExtractionDuration(duration float64) {
|
||||
i.originalArchiveExtractionDuration.Observe(duration)
|
||||
}
|
||||
|
||||
func newTotalSavedArchives(serviceName string) prometheus.Counter {
|
||||
return prometheus.NewCounter(
|
||||
prometheus.CounterOpts{
|
||||
Namespace: serviceName,
|
||||
Name: "total_saved_archives",
|
||||
Help: "A counter displaying the total number of saved original archives.",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (i *imagesImpl) IncreaseTotalSavedArchives() {
|
||||
i.totalSavedArchives.Inc()
|
||||
}
|
||||
|
||||
func newSavingImageDuration(serviceName string) prometheus.Histogram {
|
||||
return prometheus.NewHistogram(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: serviceName,
|
||||
Name: "saving_image_duration_seconds",
|
||||
Help: "A histogram displaying the duration of saving each image in seconds.",
|
||||
Buckets: common.DefaultDurationBuckets,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (i *imagesImpl) RecordSavingImageDuration(duration float64) {
|
||||
i.savingImageDuration.Observe(duration)
|
||||
}
|
||||
|
||||
func newTotalSavedImages(serviceName string) prometheus.Counter {
|
||||
return prometheus.NewCounter(
|
||||
prometheus.CounterOpts{
|
||||
Namespace: serviceName,
|
||||
Name: "total_saved_images",
|
||||
Help: "A counter displaying the total number of saved images.",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (i *imagesImpl) IncreaseTotalSavedImages() {
|
||||
i.totalSavedImages.Inc()
|
||||
}
|
||||
|
||||
func newTotalCreatedArchives(serviceName string) prometheus.Counter {
|
||||
return prometheus.NewCounter(
|
||||
prometheus.CounterOpts{
|
||||
Namespace: serviceName,
|
||||
Name: "total_created_archives",
|
||||
Help: "A counter displaying the total number of created archives.",
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (i *imagesImpl) IncreaseTotalCreatedArchives() {
|
||||
i.totalCreatedArchives.Inc()
|
||||
}
|
||||
|
||||
func newArchivingDuration(serviceName string) prometheus.Histogram {
|
||||
return prometheus.NewHistogram(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: serviceName,
|
||||
Name: "archiving_duration_seconds",
|
||||
Help: "A histogram displaying the duration of archiving each session in seconds.",
|
||||
Buckets: common.DefaultDurationBuckets,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (i *imagesImpl) RecordArchivingDuration(duration float64) {
|
||||
i.archivingDuration.Observe(duration)
|
||||
}
|
||||
|
||||
func newArchiveSize(serviceName string) prometheus.Histogram {
|
||||
return prometheus.NewHistogram(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: serviceName,
|
||||
Name: "archive_size_bytes",
|
||||
Help: "A histogram displaying the session's archive size in bytes.",
|
||||
Buckets: common.DefaultSizeBuckets,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (i *imagesImpl) RecordArchiveSize(size float64) {
|
||||
i.archiveSize.Observe(size)
|
||||
}
|
||||
|
||||
func newUploadingDuration(serviceName string) prometheus.Histogram {
|
||||
return prometheus.NewHistogram(
|
||||
prometheus.HistogramOpts{
|
||||
Namespace: serviceName,
|
||||
Name: "uploading_duration_seconds",
|
||||
Help: "A histogram displaying the duration of uploading each session's archive to S3 in seconds.",
|
||||
Buckets: common.DefaultDurationBuckets,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (i *imagesImpl) RecordUploadingDuration(duration float64) {
|
||||
i.uploadingDuration.Observe(duration)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,15 +24,17 @@ type authImpl struct {
|
|||
spotSecret string
|
||||
pgconn pool.Pool
|
||||
keys keys.Keys
|
||||
prefix string
|
||||
}
|
||||
|
||||
func NewAuth(log logger.Logger, jwtSecret, jwtSpotSecret string, conn pool.Pool, keys keys.Keys) Auth {
|
||||
func NewAuth(log logger.Logger, jwtSecret, jwtSpotSecret string, conn pool.Pool, keys keys.Keys, prefix string) Auth {
|
||||
return &authImpl{
|
||||
log: log,
|
||||
secret: jwtSecret,
|
||||
spotSecret: jwtSpotSecret,
|
||||
pgconn: conn,
|
||||
keys: keys,
|
||||
prefix: prefix,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -36,9 +36,9 @@ func (e *authImpl) isExtensionRequest(r *http.Request) bool {
|
|||
if err != nil {
|
||||
e.log.Error(r.Context(), "failed to get path template: %s", err)
|
||||
} else {
|
||||
if pathTemplate == "/v1/ping" ||
|
||||
(pathTemplate == "/v1/spots" && r.Method == "POST") ||
|
||||
(pathTemplate == "/v1/spots/{id}/uploaded" && r.Method == "POST") {
|
||||
if pathTemplate == e.prefix+"/v1/ping" ||
|
||||
(pathTemplate == e.prefix+"/v1/spots" && r.Method == "POST") ||
|
||||
(pathTemplate == e.prefix+"/v1/spots/{id}/uploaded" && r.Method == "POST") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
|
@ -53,9 +53,9 @@ func (e *authImpl) isSpotWithKeyRequest(r *http.Request) bool {
|
|||
if err != nil {
|
||||
return false
|
||||
}
|
||||
getSpotPrefix := "/v1/spots/{id}" // GET
|
||||
addCommentPrefix := "/v1/spots/{id}/comment" // POST
|
||||
getStatusPrefix := "/v1/spots/{id}/status" // GET
|
||||
getSpotPrefix := e.prefix + "/v1/spots/{id}" // GET
|
||||
addCommentPrefix := e.prefix + "/v1/spots/{id}/comment" // POST
|
||||
getStatusPrefix := e.prefix + "/v1/spots/{id}/status" // GET
|
||||
if (pathTemplate == getSpotPrefix && r.Method == "GET") ||
|
||||
(pathTemplate == addCommentPrefix && r.Method == "POST") ||
|
||||
(pathTemplate == getStatusPrefix && r.Method == "GET") {
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ type ServicesBuilder struct {
|
|||
SpotsAPI api.Handlers
|
||||
}
|
||||
|
||||
func NewServiceBuilder(log logger.Logger, cfg *spot.Config, webMetrics web.Web, pgconn pool.Pool) (*ServicesBuilder, error) {
|
||||
func NewServiceBuilder(log logger.Logger, cfg *spot.Config, webMetrics web.Web, pgconn pool.Pool, prefix string) (*ServicesBuilder, error) {
|
||||
objStore, err := store.NewStore(&cfg.ObjectsConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -45,7 +45,7 @@ func NewServiceBuilder(log logger.Logger, cfg *spot.Config, webMetrics web.Web,
|
|||
return nil, err
|
||||
}
|
||||
return &ServicesBuilder{
|
||||
Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, keys),
|
||||
Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, keys, prefix),
|
||||
RateLimiter: limiter.NewUserRateLimiter(10, 30, 1*time.Minute, 5*time.Minute),
|
||||
AuditTrail: auditrail,
|
||||
SpotsAPI: handlers,
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import json
|
||||
import logging
|
||||
import secrets
|
||||
from typing import Optional
|
||||
|
||||
from decouple import config
|
||||
from fastapi import BackgroundTasks, HTTPException
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, model_validator
|
||||
from starlette import status
|
||||
|
||||
import schemas
|
||||
|
|
@ -657,14 +658,35 @@ def refresh_auth_exists(user_id, tenant_id, jwt_jti=None):
|
|||
return r is not None
|
||||
|
||||
|
||||
class ChangeJwt(BaseModel):
|
||||
class FullLoginJWTs(BaseModel):
|
||||
jwt_iat: int
|
||||
jwt_refresh_jti: int
|
||||
jwt_refresh_jti: str
|
||||
jwt_refresh_iat: int
|
||||
spot_jwt_iat: int
|
||||
spot_jwt_refresh_jti: int
|
||||
spot_jwt_refresh_jti: str
|
||||
spot_jwt_refresh_iat: int
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def _transform_data(cls, values):
|
||||
if values.get("jwt_refresh_jti") is not None:
|
||||
values["jwt_refresh_jti"] = str(values["jwt_refresh_jti"])
|
||||
if values.get("spot_jwt_refresh_jti") is not None:
|
||||
values["spot_jwt_refresh_jti"] = str(values["spot_jwt_refresh_jti"])
|
||||
return values
|
||||
|
||||
|
||||
class RefreshLoginJWTs(FullLoginJWTs):
|
||||
spot_jwt_iat: Optional[int] = None
|
||||
spot_jwt_refresh_jti: Optional[str] = None
|
||||
spot_jwt_refresh_iat: Optional[int] = None
|
||||
|
||||
|
||||
class RefreshSpotJWTs(FullLoginJWTs):
|
||||
jwt_iat: Optional[int] = None
|
||||
jwt_refresh_jti: Optional[str] = None
|
||||
jwt_refresh_iat: Optional[int] = None
|
||||
|
||||
|
||||
def change_jwt_iat_jti(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
|
|
@ -685,7 +707,7 @@ def change_jwt_iat_jti(user_id):
|
|||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
return ChangeJwt(**row)
|
||||
return FullLoginJWTs(**row)
|
||||
|
||||
|
||||
def refresh_jwt_iat_jti(user_id):
|
||||
|
|
@ -700,7 +722,7 @@ def refresh_jwt_iat_jti(user_id):
|
|||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
row = cur.fetchone()
|
||||
return row.get("jwt_iat"), row.get("jwt_refresh_jti"), row.get("jwt_refresh_iat")
|
||||
return RefreshLoginJWTs(**row)
|
||||
|
||||
|
||||
def authenticate(email, password, for_change_password=False) -> dict | bool | None:
|
||||
|
|
@ -759,9 +781,12 @@ def authenticate(email, password, for_change_password=False) -> dict | bool | No
|
|||
response = {
|
||||
"jwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'], iat=j_r.jwt_iat,
|
||||
aud=AUDIENCE),
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'], tenant_id=r['tenantId'],
|
||||
iat=j_r.jwt_refresh_iat, aud=AUDIENCE,
|
||||
jwt_jti=j_r.jwt_refresh_jti),
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=r['userId'],
|
||||
tenant_id=r['tenantId'],
|
||||
iat=j_r.jwt_refresh_iat,
|
||||
aud=AUDIENCE,
|
||||
jwt_jti=j_r.jwt_refresh_jti,
|
||||
for_spot=False),
|
||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int),
|
||||
"email": email,
|
||||
"spotJwt": authorizers.generate_jwt(user_id=r['userId'], tenant_id=r['tenantId'],
|
||||
|
|
@ -856,14 +881,14 @@ def logout(user_id: int):
|
|||
cur.execute(query)
|
||||
|
||||
|
||||
def refresh(user_id: int, tenant_id: int) -> dict:
|
||||
jwt_iat, jwt_r_jti, jwt_r_iat = refresh_jwt_iat_jti(user_id=user_id)
|
||||
def refresh(user_id: int, tenant_id: int = -1) -> dict:
|
||||
j = refresh_jwt_iat_jti(user_id=user_id)
|
||||
return {
|
||||
"jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=jwt_iat,
|
||||
"jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=j.jwt_iat,
|
||||
aud=AUDIENCE),
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=user_id, tenant_id=tenant_id, iat=jwt_r_iat,
|
||||
aud=AUDIENCE, jwt_jti=jwt_r_jti),
|
||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int) - (jwt_iat - jwt_r_iat)
|
||||
"refreshToken": authorizers.generate_jwt_refresh(user_id=user_id, tenant_id=tenant_id, iat=j.jwt_refresh_iat,
|
||||
aud=AUDIENCE, jwt_jti=j.jwt_refresh_jti),
|
||||
"refreshTokenMaxAge": config("JWT_REFRESH_EXPIRATION", cast=int) - (j.jwt_iat - j.jwt_refresh_iat),
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -14,14 +14,15 @@ from chalicelib.core import webhook
|
|||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.core.errors import errors, errors_details
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_assignments, \
|
||||
sessions_viewed, unprocessed_sessions, sessions_search
|
||||
from chalicelib.utils import SAML2_helper, smtp
|
||||
from chalicelib.utils import captcha
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \
|
||||
sessions_assignments, unprocessed_sessions, sessions_search
|
||||
from chalicelib.utils import SAML2_helper
|
||||
from chalicelib.utils import captcha, smtp
|
||||
from chalicelib.utils import contextual_validators
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from or_dependencies import OR_context, OR_scope, OR_role
|
||||
from or_dependencies import OR_context, OR_role
|
||||
from or_dependencies import OR_scope
|
||||
from routers.base import get_routers
|
||||
from routers.subs import spot
|
||||
from schemas import Permissions, ServicePermissions
|
||||
|
|
@ -31,7 +32,10 @@ if config("ENABLE_SSO", cast=bool, default=True):
|
|||
logger = logging.getLogger(__name__)
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
COOKIE_PATH = "/api/refresh"
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
COOKIE_PATH = "/refresh"
|
||||
else:
|
||||
COOKIE_PATH = "/api/refresh"
|
||||
|
||||
|
||||
@public_app.get('/signup', tags=['signup'])
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from decouple import config
|
||||
from fastapi import Depends
|
||||
from starlette.responses import JSONResponse, Response
|
||||
|
||||
|
|
@ -8,7 +9,10 @@ from routers.base import get_routers
|
|||
|
||||
public_app, app, app_apikey = get_routers(prefix="/spot", tags=["spot"])
|
||||
|
||||
COOKIE_PATH = "/api/spot/refresh"
|
||||
if config("LOCAL_DEV", cast=bool, default=False):
|
||||
COOKIE_PATH = "/spot/refresh"
|
||||
else:
|
||||
COOKIE_PATH = "/api/spot/refresh"
|
||||
|
||||
|
||||
@app.get('/logout')
|
||||
|
|
|
|||
BIN
frontend/app/assets/img/img-tagging.jpg
Normal file
BIN
frontend/app/assets/img/img-tagging.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 88 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 67 KiB After Width: | Height: | Size: 55 KiB |
|
|
@ -28,32 +28,24 @@ function ProjectTags() {
|
|||
|
||||
return (
|
||||
<div className="flex flex-col gap-6">
|
||||
<Space direction="vertical">
|
||||
<Typography.Text>
|
||||
{t(
|
||||
'Manage Tag Elements here. Rename tags for easy identification or delete those you no longer need.',
|
||||
)}
|
||||
</Typography.Text>
|
||||
<ul className="!list-disc list-inside">
|
||||
<li>
|
||||
<Typography.Text>
|
||||
{t('To create new tags, navigate to the Tags tab while playing a session')}
|
||||
</Typography.Text>
|
||||
</li>
|
||||
<li>
|
||||
<Typography.Text>
|
||||
{t('Use tags in OmniSearch to quickly find relevant sessions.')}
|
||||
</Typography.Text>
|
||||
</li>
|
||||
</ul>
|
||||
</Space>
|
||||
<List
|
||||
locale={{
|
||||
emptyText: (
|
||||
<Empty
|
||||
description={t('No tags found')}
|
||||
image={<AnimatedSVG name={ICONS.NO_METADATA} size={60} />}
|
||||
/>
|
||||
emptyText: (
|
||||
<div>
|
||||
<div className="w-fit border border-gray-100 rounded-lg overflow-hidden bg-white shadow-sm mx-auto">
|
||||
<div className="w-full h-48 md:h-64 lg:h-96 flex items-center justify-center border border-gray-100 bg-white rounded-md">
|
||||
<img src="/assets/img/img-tagging.jpg" alt="Tag Elements" className="max-w-full max-h-full object-contain" />
|
||||
</div>
|
||||
</div>
|
||||
<div className="text-center mt-4">
|
||||
<Typography.Text className="my-2 text-lg font-medium">
|
||||
{t('Organize and Manage Your Element Tags')}
|
||||
</Typography.Text>
|
||||
<div className="mb-2 text-lg text-gray-500 leading-normal">
|
||||
{t('Tag elements during session playback and use them in OmniSearch to find relevant sessions.')}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
loading={tagWatchStore.isLoading}
|
||||
|
|
|
|||
|
|
@ -41,9 +41,9 @@ function MetricViewHeader() {
|
|||
// Show header if there are cards or if a filter is active
|
||||
const showHeader = cardsLength > 0 || isFilterActive;
|
||||
|
||||
useEffect(() => {
|
||||
metricStore.updateKey('sort', { by: 'desc' });
|
||||
}, [metricStore]);
|
||||
// useEffect(() => {
|
||||
// metricStore.updateKey('sort', { by: 'desc' });
|
||||
// }, [metricStore]);
|
||||
|
||||
const handleMenuClick = ({ key }: { key: string }) => {
|
||||
metricStore.updateKey('filter', { ...filter, type: key });
|
||||
|
|
|
|||
|
|
@ -8,13 +8,13 @@ import {
|
|||
Button,
|
||||
Dropdown,
|
||||
Modal as AntdModal,
|
||||
Avatar,
|
||||
Avatar, TableColumnType
|
||||
} from 'antd';
|
||||
import {
|
||||
TeamOutlined,
|
||||
LockOutlined,
|
||||
EditOutlined,
|
||||
DeleteOutlined,
|
||||
DeleteOutlined
|
||||
} from '@ant-design/icons';
|
||||
import { EllipsisVertical } from 'lucide-react';
|
||||
import { TablePaginationConfig, SorterResult } from 'antd/lib/table/interface';
|
||||
|
|
@ -37,90 +37,41 @@ interface Props {
|
|||
toggleSelection?: (metricId: number | number[]) => void;
|
||||
disableSelection?: boolean;
|
||||
inLibrary?: boolean;
|
||||
loading?: boolean;
|
||||
}
|
||||
|
||||
const ListView: React.FC<Props> = ({
|
||||
list,
|
||||
siteId,
|
||||
selectedList,
|
||||
toggleSelection,
|
||||
disableSelection = false,
|
||||
inLibrary = false
|
||||
}) => {
|
||||
list,
|
||||
siteId,
|
||||
selectedList,
|
||||
toggleSelection,
|
||||
disableSelection = false,
|
||||
inLibrary = false,
|
||||
loading = false
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const [sorter, setSorter] = useState<{ field: string; order: 'ascend' | 'descend' }>({
|
||||
field: 'lastModified',
|
||||
order: 'descend',
|
||||
});
|
||||
const [pagination, setPagination] = useState<TablePaginationConfig>({
|
||||
current: 1,
|
||||
pageSize: 10,
|
||||
});
|
||||
const [editingMetricId, setEditingMetricId] = useState<number | null>(null);
|
||||
const [newName, setNewName] = useState('');
|
||||
const { metricStore } = useStore();
|
||||
const history = useHistory();
|
||||
|
||||
const sortedData = useMemo(
|
||||
() =>
|
||||
[...list].sort((a, b) => {
|
||||
if (sorter.field === 'lastModified') {
|
||||
return sorter.order === 'ascend'
|
||||
? new Date(a.lastModified).getTime() -
|
||||
new Date(b.lastModified).getTime()
|
||||
: new Date(b.lastModified).getTime() -
|
||||
new Date(a.lastModified).getTime();
|
||||
}
|
||||
if (sorter.field === 'name') {
|
||||
return sorter.order === 'ascend'
|
||||
? a.name?.localeCompare(b.name) || 0
|
||||
: b.name?.localeCompare(a.name) || 0;
|
||||
}
|
||||
if (sorter.field === 'owner') {
|
||||
return sorter.order === 'ascend'
|
||||
? a.owner?.localeCompare(b.owner) || 0
|
||||
: b.owner?.localeCompare(a.owner) || 0;
|
||||
}
|
||||
return 0;
|
||||
}),
|
||||
[list, sorter],
|
||||
);
|
||||
|
||||
const paginatedData = useMemo(() => {
|
||||
const start = ((pagination.current || 1) - 1) * (pagination.pageSize || 10);
|
||||
return sortedData.slice(start, start + (pagination.pageSize || 10));
|
||||
}, [sortedData, pagination]);
|
||||
|
||||
const totalMessage = (
|
||||
<>
|
||||
{t('Showing')}{' '}
|
||||
<Text strong>
|
||||
{(pagination.pageSize || 10) * ((pagination.current || 1) - 1) + 1}
|
||||
{(metricStore.pageSize || 10) * ((metricStore.page || 1) - 1) + 1}
|
||||
</Text>{' '}
|
||||
{t('to')}{' '}
|
||||
<Text strong>
|
||||
{Math.min(
|
||||
(pagination.pageSize || 10) * (pagination.current || 1),
|
||||
list.length,
|
||||
(metricStore.pageSize || 10) * (metricStore.page || 1),
|
||||
list.length
|
||||
)}
|
||||
</Text>{' '}
|
||||
{t('of')} <Text strong>{list.length}</Text> {t('cards')}
|
||||
</>
|
||||
);
|
||||
|
||||
const handleTableChange = (
|
||||
pag: TablePaginationConfig,
|
||||
_filters: Record<string, (string | number | boolean)[] | null>,
|
||||
sorterParam: SorterResult<Widget> | SorterResult<Widget>[],
|
||||
) => {
|
||||
const sortRes = sorterParam as SorterResult<Widget>;
|
||||
setSorter({
|
||||
field: sortRes.field as string,
|
||||
order: sortRes.order as 'ascend' | 'descend',
|
||||
});
|
||||
setPagination(pag);
|
||||
};
|
||||
|
||||
const parseDate = (dateString: string) => {
|
||||
let date = new Date(dateString);
|
||||
if (isNaN(date.getTime())) {
|
||||
|
|
@ -182,7 +133,7 @@ const ListView: React.FC<Props> = ({
|
|||
cancelText: t('No'),
|
||||
onOk: async () => {
|
||||
await metricStore.delete(metric);
|
||||
},
|
||||
}
|
||||
});
|
||||
}
|
||||
if (key === 'rename') {
|
||||
|
|
@ -206,7 +157,7 @@ const ListView: React.FC<Props> = ({
|
|||
|
||||
const menuItems = [
|
||||
{ key: 'rename', icon: <EditOutlined />, label: t('Rename') },
|
||||
{ key: 'delete', icon: <DeleteOutlined />, label: t('Delete') },
|
||||
{ key: 'delete', icon: <DeleteOutlined />, label: t('Delete') }
|
||||
];
|
||||
|
||||
const renderTitle = (_text: string, metric: Widget) => (
|
||||
|
|
@ -245,80 +196,109 @@ const ListView: React.FC<Props> = ({
|
|||
</div>
|
||||
);
|
||||
|
||||
const columns = [
|
||||
const columns: TableColumnType<any>[] = [
|
||||
{
|
||||
title: t('Title'),
|
||||
dataIndex: 'name',
|
||||
key: 'title',
|
||||
className: 'cap-first pl-4',
|
||||
sorter: true,
|
||||
sortOrder: metricStore.sort.field === 'name' ? metricStore.sort.order : undefined,
|
||||
width: inLibrary ? '31%' : '25%',
|
||||
render: renderTitle,
|
||||
render: renderTitle
|
||||
},
|
||||
{
|
||||
title: t('Owner'),
|
||||
dataIndex: 'owner',
|
||||
dataIndex: 'owner_email',
|
||||
key: 'owner',
|
||||
className: 'capitalize',
|
||||
sorter: true,
|
||||
sortOrder: metricStore.sort.field === 'owner_email' ? metricStore.sort.order : undefined,
|
||||
width: inLibrary ? '31%' : '25%',
|
||||
render: renderOwner,
|
||||
render: renderOwner
|
||||
},
|
||||
{
|
||||
title: t('Last Modified'),
|
||||
dataIndex: 'lastModified',
|
||||
dataIndex: 'edited_at',
|
||||
key: 'lastModified',
|
||||
sorter: true,
|
||||
sortOrder: metricStore.sort.field === 'edited_at' ? metricStore.sort.order : undefined,
|
||||
width: inLibrary ? '31%' : '25%',
|
||||
render: renderLastModified,
|
||||
},
|
||||
render: renderLastModified
|
||||
}
|
||||
];
|
||||
|
||||
if (!inLibrary) {
|
||||
columns.push({
|
||||
title: '',
|
||||
key: 'options',
|
||||
className: 'text-right',
|
||||
width: '5%',
|
||||
render: renderOptions,
|
||||
render: renderOptions
|
||||
});
|
||||
}
|
||||
|
||||
// if (metricStore.sort.field) {
|
||||
// columns.forEach((col) => {
|
||||
// col.sortOrder = col.key === metricStore.sort.field ? metricStore.sort.order : false;
|
||||
// });
|
||||
// }
|
||||
|
||||
console.log('store', metricStore.sort);
|
||||
|
||||
const handleTableChange = (
|
||||
pag: TablePaginationConfig,
|
||||
_filters: Record<string, (string | number | boolean)[] | null>,
|
||||
sorterParam: SorterResult<Widget> | SorterResult<Widget>[]
|
||||
) => {
|
||||
const sorter = Array.isArray(sorterParam) ? sorterParam[0] : sorterParam;
|
||||
let order = sorter.order;
|
||||
if (metricStore.sort.field === sorter.field) {
|
||||
order = metricStore.sort.order === 'ascend' ? 'descend' : 'ascend';
|
||||
}
|
||||
console.log('sorter', { field: sorter.field, order });
|
||||
metricStore.updateKey('sort', { field: sorter.field, order });
|
||||
metricStore.updateKey('page', pag.current || 1);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<Table
|
||||
loading={loading}
|
||||
columns={columns}
|
||||
dataSource={paginatedData}
|
||||
dataSource={list}
|
||||
rowKey="metricId"
|
||||
showSorterTooltip={false}
|
||||
onChange={handleTableChange}
|
||||
sortDirections={['ascend', 'descend']}
|
||||
onRow={
|
||||
inLibrary
|
||||
? (record) => ({
|
||||
onClick: () => {
|
||||
if (!disableSelection) toggleSelection?.(record?.metricId);
|
||||
},
|
||||
})
|
||||
onClick: () => {
|
||||
if (!disableSelection) toggleSelection?.(record?.metricId);
|
||||
}
|
||||
})
|
||||
: undefined
|
||||
}
|
||||
rowSelection={
|
||||
!disableSelection
|
||||
? {
|
||||
selectedRowKeys: selectedList,
|
||||
onChange: (keys) => toggleSelection && toggleSelection(keys),
|
||||
columnWidth: 16,
|
||||
}
|
||||
selectedRowKeys: selectedList,
|
||||
onChange: (keys) => toggleSelection && toggleSelection(keys),
|
||||
columnWidth: 16
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
pagination={{
|
||||
current: pagination.current,
|
||||
pageSize: pagination.pageSize,
|
||||
total: sortedData.length,
|
||||
current: metricStore.page,
|
||||
pageSize: metricStore.pageSize,
|
||||
total: metricStore.total,
|
||||
showSizeChanger: false,
|
||||
className: 'px-4',
|
||||
showLessItems: true,
|
||||
showTotal: () => totalMessage,
|
||||
size: 'small',
|
||||
simple: true,
|
||||
simple: true
|
||||
}}
|
||||
/>
|
||||
<AntdModal
|
||||
|
|
|
|||
|
|
@ -6,16 +6,15 @@ import { sliceListPerPage } from 'App/utils';
|
|||
import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG';
|
||||
import { Popover, Button } from 'antd';
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import GridView from './GridView';
|
||||
import ListView from './ListView';
|
||||
import AddCardSection from '../AddCardSection/AddCardSection';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
function MetricsList({
|
||||
siteId,
|
||||
onSelectionChange,
|
||||
inLibrary,
|
||||
}: {
|
||||
siteId,
|
||||
onSelectionChange,
|
||||
inLibrary
|
||||
}: {
|
||||
siteId: string;
|
||||
onSelectionChange?: (selected: any[]) => void;
|
||||
inLibrary?: boolean;
|
||||
|
|
@ -23,28 +22,27 @@ function MetricsList({
|
|||
const { t } = useTranslation();
|
||||
const { metricStore, dashboardStore } = useStore();
|
||||
const metricsSearch = metricStore.filter.query;
|
||||
const listView = inLibrary ? true : metricStore.listView;
|
||||
const [selectedMetrics, setSelectedMetrics] = useState<any>([]);
|
||||
|
||||
const dashboard = dashboardStore.selectedDashboard;
|
||||
const existingCardIds = useMemo(
|
||||
() => dashboard?.widgets?.map((i) => parseInt(i.metricId)),
|
||||
[dashboard],
|
||||
[dashboard]
|
||||
);
|
||||
const cards = useMemo(
|
||||
() =>
|
||||
onSelectionChange
|
||||
? metricStore.filteredCards.filter(
|
||||
(i) => !existingCardIds?.includes(parseInt(i.metricId)),
|
||||
)
|
||||
(i) => !existingCardIds?.includes(parseInt(i.metricId))
|
||||
)
|
||||
: metricStore.filteredCards,
|
||||
[metricStore.filteredCards, existingCardIds, onSelectionChange],
|
||||
[metricStore.filteredCards, existingCardIds, onSelectionChange]
|
||||
);
|
||||
const loading = metricStore.isLoading;
|
||||
|
||||
useEffect(() => {
|
||||
void metricStore.fetchList();
|
||||
}, [metricStore]);
|
||||
}, [metricStore.page, metricStore.filter, metricStore.sort]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!onSelectionChange) return;
|
||||
|
|
@ -69,14 +67,8 @@ function MetricsList({
|
|||
metricStore.updateKey('sessionsPage', 1);
|
||||
}, [metricStore]);
|
||||
|
||||
const showOwn = metricStore.filter.showMine;
|
||||
const toggleOwn = () => {
|
||||
metricStore.updateKey('showMine', !showOwn);
|
||||
};
|
||||
|
||||
const isFiltered =
|
||||
metricsSearch !== '' ||
|
||||
(metricStore.filter.type && metricStore.filter.type !== 'all');
|
||||
const isFiltered = metricStore.filter.query !== '' || metricStore.filter.type !== 'all';
|
||||
|
||||
const searchImageDimensions = { width: 60, height: 'auto' };
|
||||
const defaultImageDimensions = { width: 600, height: 'auto' };
|
||||
|
|
@ -86,101 +78,65 @@ function MetricsList({
|
|||
: defaultImageDimensions;
|
||||
|
||||
return (
|
||||
<Loader loading={loading}>
|
||||
<NoContent
|
||||
show={length === 0}
|
||||
title={
|
||||
<div className="flex flex-col items-center justify-center">
|
||||
<AnimatedSVG name={emptyImage} size={imageDimensions.width} />
|
||||
<div className="text-center mt-3 text-lg font-medium">
|
||||
{isFiltered
|
||||
? t('No matching results')
|
||||
: t('Unlock insights with data cards')}
|
||||
</div>
|
||||
<NoContent
|
||||
show={!loading && length === 0}
|
||||
title={
|
||||
<div className="flex flex-col items-center justify-center">
|
||||
<AnimatedSVG name={emptyImage} size={imageDimensions.width} />
|
||||
<div className="text-center mt-3 text-lg font-medium">
|
||||
{isFiltered
|
||||
? t('No matching results')
|
||||
: t('Unlock insights with data cards')}
|
||||
</div>
|
||||
}
|
||||
subtext={
|
||||
isFiltered ? (
|
||||
''
|
||||
) : (
|
||||
<div className="flex flex-col items-center">
|
||||
<div>
|
||||
{t('Create and customize cards to analyze trends and user behavior effectively.')}
|
||||
</div>
|
||||
<Popover
|
||||
arrow={false}
|
||||
overlayInnerStyle={{ padding: 0, borderRadius: '0.75rem' }}
|
||||
content={<AddCardSection fit inCards />}
|
||||
trigger="click"
|
||||
>
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<PlusOutlined />}
|
||||
className="btn-create-card mt-3"
|
||||
>
|
||||
{t('Create Card')}
|
||||
</Button>
|
||||
</Popover>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
>
|
||||
{listView ? (
|
||||
<ListView
|
||||
disableSelection={!onSelectionChange}
|
||||
siteId={siteId}
|
||||
list={cards}
|
||||
inLibrary={inLibrary}
|
||||
selectedList={selectedMetrics}
|
||||
existingCardIds={existingCardIds}
|
||||
toggleSelection={toggleMetricSelection}
|
||||
allSelected={cards.length === selectedMetrics.length}
|
||||
showOwn={showOwn}
|
||||
toggleOwn={toggleOwn}
|
||||
toggleAll={({ target: { checked } }) =>
|
||||
setSelectedMetrics(
|
||||
checked
|
||||
? cards
|
||||
.map((i: any) => i.metricId)
|
||||
.slice(0, 30 - (existingCardIds?.length || 0))
|
||||
: [],
|
||||
)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
subtext={
|
||||
isFiltered ? (
|
||||
''
|
||||
) : (
|
||||
<>
|
||||
<GridView
|
||||
siteId={siteId}
|
||||
list={sliceListPerPage(
|
||||
cards,
|
||||
metricStore.page - 1,
|
||||
metricStore.pageSize,
|
||||
)}
|
||||
selectedList={selectedMetrics}
|
||||
toggleSelection={toggleMetricSelection}
|
||||
/>
|
||||
<div className="w-full flex items-center justify-between py-4 px-6 border-t">
|
||||
<div>
|
||||
{t('Showing')}{' '}
|
||||
<span className="font-medium">
|
||||
{Math.min(cards.length, metricStore.pageSize)}
|
||||
</span>{' '}
|
||||
{t('out of')}
|
||||
<span className="font-medium">{cards.length}</span>
|
||||
{t('cards')}
|
||||
</div>
|
||||
<Pagination
|
||||
page={metricStore.page}
|
||||
total={length}
|
||||
onPageChange={(page) => metricStore.updateKey('page', page)}
|
||||
limit={metricStore.pageSize}
|
||||
debounceRequest={100}
|
||||
/>
|
||||
<div className="flex flex-col items-center">
|
||||
<div>
|
||||
{t('Create and customize cards to analyze trends and user behavior effectively.')}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</NoContent>
|
||||
</Loader>
|
||||
<Popover
|
||||
arrow={false}
|
||||
overlayInnerStyle={{ padding: 0, borderRadius: '0.75rem' }}
|
||||
content={<AddCardSection fit inCards />}
|
||||
trigger="click"
|
||||
>
|
||||
<Button
|
||||
type="primary"
|
||||
icon={<PlusOutlined />}
|
||||
className="btn-create-card mt-3"
|
||||
>
|
||||
{t('Create Card')}
|
||||
</Button>
|
||||
</Popover>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
>
|
||||
<ListView
|
||||
loading={loading}
|
||||
disableSelection={!onSelectionChange}
|
||||
siteId={siteId}
|
||||
list={cards}
|
||||
inLibrary={inLibrary}
|
||||
selectedList={selectedMetrics}
|
||||
// existingCardIds={existingCardIds}
|
||||
toggleSelection={toggleMetricSelection}
|
||||
// allSelected={cards.length === selectedMetrics.length}
|
||||
// toggleAll={({ target: { checked } }) =>
|
||||
// setSelectedMetrics(
|
||||
// checked
|
||||
// ? cards
|
||||
// .map((i: any) => i.metricId)
|
||||
// .slice(0, 30 - (existingCardIds?.length || 0))
|
||||
// : []
|
||||
// )
|
||||
// }
|
||||
/>
|
||||
</NoContent>
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ function Controls(props: any) {
|
|||
const session = sessionStore.current;
|
||||
const fetchAssistSessions = sessionStore.fetchLiveSessions;
|
||||
const totalAssistSessions = sessionStore.totalLiveSessions;
|
||||
const closedLive = !!sessionStore.errorStack || !!sessionStore.current;
|
||||
const closedLive = !!sessionStore.errorStack?.length || !sessionStore.current;
|
||||
|
||||
const onKeyDown = (e: any) => {
|
||||
if (
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ export default function MetaMoreButton(props: Props) {
|
|||
<Popover
|
||||
render={() => (
|
||||
<div
|
||||
className="text-sm grid grid-col p-4 gap-3 bg-white"
|
||||
className="text-sm grid grid-col gap-3 bg-white"
|
||||
style={{ maxHeight: '200px', overflowY: 'auto' }}
|
||||
>
|
||||
{list.slice(maxLength).map(({ label, value }, index) => (
|
||||
|
|
@ -26,7 +26,7 @@ export default function MetaMoreButton(props: Props) {
|
|||
placement="bottom"
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<Button size={'small'} variant="text">
|
||||
<Button type="link">
|
||||
+{list.length - maxLength}
|
||||
{' '}
|
||||
{t('More')}
|
||||
|
|
|
|||
|
|
@ -288,7 +288,7 @@ function SessionItem(props: RouteComponentProps & Props) {
|
|||
</div>
|
||||
</div>
|
||||
{_metaList.length > 0 && (
|
||||
<SessionMetaList metaList={_metaList} />
|
||||
<SessionMetaList maxLength={1} metaList={_metaList} />
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ function SessionDateRange() {
|
|||
searchStore.applyFilter(dateValues);
|
||||
};
|
||||
return (
|
||||
<div className="flex items-center">
|
||||
<div className="flex items-center text-start">
|
||||
<span className="mr-1">
|
||||
{t('No sessions')}
|
||||
{isCustom ? t('between') : t('in the')}
|
||||
|
|
|
|||
|
|
@ -779,7 +779,7 @@
|
|||
"min ago.": "минут назад",
|
||||
"Error getting service health status": "Ошибка при получении статуса работоспособности сервиса",
|
||||
"Captured": "Зафиксировано",
|
||||
"Events": "События",
|
||||
"Events": "Событий",
|
||||
"Observed installation Issue with the following": "Выявлены проблемы при установке с",
|
||||
"Version": "Версия",
|
||||
"Error log:": "Журнал ошибок:",
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import {
|
|||
HEATMAP,
|
||||
USER_PATH,
|
||||
RETENTION,
|
||||
CATEGORIES,
|
||||
CATEGORIES
|
||||
} from 'App/constants/card';
|
||||
import { clickmapFilter } from 'App/types/filter/newFilter';
|
||||
import { getRE } from 'App/utils';
|
||||
|
|
@ -31,7 +31,7 @@ const handleFilter = (card: Widget, filterType?: string) => {
|
|||
FilterKey.ERRORS,
|
||||
FilterKey.FETCH,
|
||||
`${TIMESERIES}_4xx_requests`,
|
||||
`${TIMESERIES}_slow_network_requests`,
|
||||
`${TIMESERIES}_slow_network_requests`
|
||||
].includes(metricOf);
|
||||
}
|
||||
if (filterType === CATEGORIES.web_analytics) {
|
||||
|
|
@ -41,7 +41,7 @@ const handleFilter = (card: Widget, filterType?: string) => {
|
|||
FilterKey.REFERRER,
|
||||
FilterKey.USERID,
|
||||
FilterKey.LOCATION,
|
||||
FilterKey.USER_DEVICE,
|
||||
FilterKey.USER_DEVICE
|
||||
].includes(metricOf);
|
||||
}
|
||||
} else {
|
||||
|
|
@ -75,58 +75,42 @@ interface MetricFilter {
|
|||
query?: string;
|
||||
showMine?: boolean;
|
||||
type?: string;
|
||||
dashboard?: [];
|
||||
// dashboard?: [];
|
||||
}
|
||||
|
||||
export default class MetricStore {
|
||||
isLoading: boolean = false;
|
||||
|
||||
isSaving: boolean = false;
|
||||
|
||||
metrics: Widget[] = [];
|
||||
|
||||
instance = new Widget();
|
||||
|
||||
page: number = 1;
|
||||
|
||||
total: number = 0;
|
||||
pageSize: number = 10;
|
||||
|
||||
metricsSearch: string = '';
|
||||
|
||||
sort: any = { by: 'desc' };
|
||||
|
||||
filter: MetricFilter = { type: 'all', dashboard: [], query: '' };
|
||||
|
||||
sort: any = { columnKey: '', field: '', order: false };
|
||||
filter: any = { type: '', query: '' };
|
||||
sessionsPage: number = 1;
|
||||
|
||||
sessionsPageSize: number = 10;
|
||||
|
||||
listView?: boolean = true;
|
||||
|
||||
clickMapFilter: boolean = false;
|
||||
|
||||
clickMapSearch = '';
|
||||
|
||||
clickMapLabel = '';
|
||||
|
||||
cardCategory: string | null = CATEGORIES.product_analytics;
|
||||
|
||||
focusedSeriesName: string | null = null;
|
||||
|
||||
disabledSeries: string[] = [];
|
||||
|
||||
drillDown = false;
|
||||
|
||||
constructor() {
|
||||
makeAutoObservable(this);
|
||||
}
|
||||
|
||||
get sortedWidgets() {
|
||||
return [...this.metrics].sort((a, b) =>
|
||||
this.sort.by === 'desc'
|
||||
? b.lastModified - a.lastModified
|
||||
: a.lastModified - b.lastModified,
|
||||
);
|
||||
}
|
||||
// get sortedWidgets() {
|
||||
// return [...this.metrics].sort((a, b) =>
|
||||
// this.sort.by === 'desc'
|
||||
// ? b.lastModified - a.lastModified
|
||||
// : a.lastModified - b.lastModified
|
||||
// );
|
||||
// }
|
||||
|
||||
get filteredCards() {
|
||||
const filterRE = this.filter.query ? getRE(this.filter.query, 'i') : null;
|
||||
|
|
@ -138,7 +122,7 @@ export default class MetricStore {
|
|||
(card) =>
|
||||
(this.filter.showMine
|
||||
? card.owner ===
|
||||
JSON.parse(localStorage.getItem('user')!).account.email
|
||||
JSON.parse(localStorage.getItem('user')!).account.email
|
||||
: true) &&
|
||||
handleFilter(card, this.filter.type) &&
|
||||
(!dbIds.length ||
|
||||
|
|
@ -147,13 +131,13 @@ export default class MetricStore {
|
|||
.some((id) => dbIds.includes(id))) &&
|
||||
// @ts-ignore
|
||||
(!filterRE ||
|
||||
['name', 'owner'].some((key) => filterRE.test(card[key]))),
|
||||
)
|
||||
.sort((a, b) =>
|
||||
this.sort.by === 'desc'
|
||||
? b.lastModified - a.lastModified
|
||||
: a.lastModified - b.lastModified,
|
||||
['name', 'owner'].some((key) => filterRE.test(card[key])))
|
||||
);
|
||||
// .sort((a, b) =>
|
||||
// this.sort.by === 'desc'
|
||||
// ? b.lastModified - a.lastModified
|
||||
// : a.lastModified - b.lastModified
|
||||
// );
|
||||
}
|
||||
|
||||
// State Actions
|
||||
|
|
@ -182,6 +166,7 @@ export default class MetricStore {
|
|||
}
|
||||
|
||||
updateKey(key: string, value: any) {
|
||||
console.log('key', key, value);
|
||||
// @ts-ignore
|
||||
this[key] = value;
|
||||
|
||||
|
|
@ -207,7 +192,7 @@ export default class MetricStore {
|
|||
this.instance.series[i].filter.eventsOrderSupport = [
|
||||
'then',
|
||||
'or',
|
||||
'and',
|
||||
'and'
|
||||
];
|
||||
});
|
||||
if (type === HEATMAP && 'series' in obj) {
|
||||
|
|
@ -254,7 +239,7 @@ export default class MetricStore {
|
|||
namesMap: {},
|
||||
avg: 0,
|
||||
percentiles: [],
|
||||
values: [],
|
||||
values: []
|
||||
};
|
||||
const obj: any = { metricType: value, data: defaultData };
|
||||
obj.series = this.instance.series;
|
||||
|
|
@ -311,7 +296,7 @@ export default class MetricStore {
|
|||
if (obj.series[0] && obj.series[0].filter.filters.length < 1) {
|
||||
obj.series[0].filter.addFilter({
|
||||
...clickmapFilter,
|
||||
value: [''],
|
||||
value: ['']
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -341,7 +326,7 @@ export default class MetricStore {
|
|||
updateInList(metric: Widget) {
|
||||
// @ts-ignore
|
||||
const index = this.metrics.findIndex(
|
||||
(m: Widget) => m[Widget.ID_KEY] === metric[Widget.ID_KEY],
|
||||
(m: Widget) => m[Widget.ID_KEY] === metric[Widget.ID_KEY]
|
||||
);
|
||||
if (index >= 0) {
|
||||
this.metrics[index] = metric;
|
||||
|
|
@ -358,12 +343,6 @@ export default class MetricStore {
|
|||
this.metrics = this.metrics.filter((m) => m[Widget.ID_KEY] !== id);
|
||||
}
|
||||
|
||||
get paginatedList(): Widget[] {
|
||||
const start = (this.page - 1) * this.pageSize;
|
||||
const end = start + this.pageSize;
|
||||
return this.metrics.slice(start, end);
|
||||
}
|
||||
|
||||
// API Communication
|
||||
async save(metric: Widget): Promise<Widget> {
|
||||
this.isSaving = true;
|
||||
|
|
@ -396,16 +375,27 @@ export default class MetricStore {
|
|||
this.metrics = metrics;
|
||||
}
|
||||
|
||||
fetchList() {
|
||||
async fetchList() {
|
||||
this.setLoading(true);
|
||||
return metricService
|
||||
.getMetrics()
|
||||
.then((metrics: any[]) => {
|
||||
this.setMetrics(metrics.map((m) => new Widget().fromJson(m)));
|
||||
})
|
||||
.finally(() => {
|
||||
this.setLoading(false);
|
||||
});
|
||||
try {
|
||||
const resp = await metricService
|
||||
.getMetricsPaginated({
|
||||
page: this.page,
|
||||
limit: this.pageSize,
|
||||
sort: {
|
||||
field: this.sort.field,
|
||||
order: this.sort.order === 'ascend' ? 'asc' : 'desc'
|
||||
},
|
||||
filter: {
|
||||
query: this.filter.query,
|
||||
type: this.filter.type === 'all' ? '' : this.filter.type,
|
||||
}
|
||||
});
|
||||
this.total = resp.total;
|
||||
this.setMetrics(resp.list.map((m) => new Widget().fromJson(m)));
|
||||
} finally {
|
||||
this.setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetch(id: string, period?: any) {
|
||||
|
|
|
|||
|
|
@ -24,6 +24,17 @@ export default class MetricService {
|
|||
.then((response: { data: any }) => response.data || []);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all metrics paginated.
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
getMetricsPaginated(params: any): Promise<any> {
|
||||
return this.client
|
||||
.post('/cards/search', params)
|
||||
.then((response: { json: () => any }) => response.json())
|
||||
.then((response: { data: any }) => response.data || []);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a metric by metricId.
|
||||
* @param metricId
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@ import MiniCssExtractPlugin from 'mini-css-extract-plugin';
|
|||
import CompressionPlugin from "compression-webpack-plugin";
|
||||
import { EsbuildPlugin } from 'esbuild-loader';
|
||||
|
||||
const dotenv = require('dotenv').config({ path: __dirname + '/.env' })
|
||||
const isDevelopment = process.env.NODE_ENV !== 'production'
|
||||
const dotenv = require('dotenv').config({ path: __dirname + (isDevelopment ? '/.env' : '/.env.production') });
|
||||
const stylesHandler = MiniCssExtractPlugin.loader;
|
||||
const ENV_VARIABLES = JSON.stringify(dotenv.parsed);
|
||||
import pathAlias from './path-alias';
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ spec:
|
|||
# 4. Using AWS itself.
|
||||
# AWS uses bucketname.endpoint/object while others use endpoint/bucketname/object
|
||||
- name: ASSETS_ORIGIN
|
||||
value: "{{ include "openreplay.s3Endpoint" . }}/{{.Values.global.s3.assetsBucket}}"
|
||||
value: "{{ include "openreplay.assets_origin" . }}"
|
||||
{{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }}
|
||||
ports:
|
||||
{{- range $key, $val := .Values.service.ports }}
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ spec:
|
|||
# 4. Using AWS itself.
|
||||
# AWS uses bucketname.endpoint/object while others use endpoint/bucketname/object
|
||||
- name: ASSETS_ORIGIN
|
||||
value: "{{ include "openreplay.s3Endpoint" . }}/{{.Values.global.s3.assetsBucket}}"
|
||||
value: {{ include "openreplay.assets_origin" . }}
|
||||
{{- include "openreplay.env.redis_string" .Values.global.redis | nindent 12 }}
|
||||
ports:
|
||||
{{- range $key, $val := .Values.service.ports }}
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ spec:
|
|||
# 4. Using AWS itself.
|
||||
# AWS uses bucketname.endpoint/object while others use endpoint/bucketname/object
|
||||
- name: ASSETS_ORIGIN
|
||||
value: "{{ include "openreplay.s3Endpoint" . }}/{{.Values.global.s3.assetsBucket}}"
|
||||
value: {{ include "openreplay.assets_origin" . }}
|
||||
ports:
|
||||
{{- range $key, $val := .Values.service.ports }}
|
||||
- name: {{ $key }}
|
||||
|
|
|
|||
|
|
@ -142,3 +142,11 @@ Create the volume mount config for redis TLS certificates
|
|||
subPath: {{ .tls.certCAFilename }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "openreplay.assets_origin"}}
|
||||
{{- if .Values.global.assetsOrigin }}
|
||||
{{- .Values.global.assetsOrigin }}
|
||||
{{- else }}
|
||||
{{- include "openreplay.s3Endpoint" . }}/{{.Values.global.s3.assetsBucket}}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue