Dev (#2485)
* fix(chalice): fixed Math-operators validation refactor(chalice): search for sessions that have events for heatmaps * refactor(chalice): search for sessions that have at least 1 location event for heatmaps * fix(chalice): fixed Math-operators validation refactor(chalice): search for sessions that have events for heatmaps * refactor(chalice): search for sessions that have at least 1 location event for heatmaps * feat(chalice): get top 10 values for autocomplete CH * feat(chalice): autocomplete return top 10 with stats * fix(chalice): fixed autocomplete top 10 meta-filters * fix(chalice): fixed predefined metrics refactor(chalice): refactored schemas refactor(chalice): refactored routers refactor(chalice): refactored unprocessed sessions
This commit is contained in:
parent
6cfecb53a3
commit
b9fc397e72
18 changed files with 161 additions and 146 deletions
|
|
@ -9,6 +9,7 @@ from fastapi import FastAPI, Request
|
|||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.middleware.gzip import GZipMiddleware
|
||||
from psycopg import AsyncConnection
|
||||
from psycopg.rows import dict_row
|
||||
from starlette.responses import StreamingResponse
|
||||
|
||||
from chalicelib.utils import helper
|
||||
|
|
@ -20,7 +21,7 @@ from routers.subs import insights, metrics, v1_api, health, usability_tests, spo
|
|||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
logging.basicConfig(level=loglevel)
|
||||
from psycopg.rows import dict_row
|
||||
|
||||
|
||||
|
||||
class ORPYAsyncConnection(AsyncConnection):
|
||||
|
|
|
|||
|
|
@ -45,9 +45,8 @@ class JWTAuth(HTTPBearer):
|
|||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid authentication scheme.")
|
||||
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
|
||||
auth_exists = jwt_payload is not None \
|
||||
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
jwt_iat=jwt_payload.get("iat", 100))
|
||||
auth_exists = jwt_payload is not None and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
jwt_iat=jwt_payload.get("iat", 100))
|
||||
if jwt_payload is None \
|
||||
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
|
||||
or not auth_exists:
|
||||
|
|
|
|||
|
|
@ -1,9 +1,6 @@
|
|||
import logging
|
||||
from typing import Union
|
||||
|
||||
import logging
|
||||
from typing import Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import metrics
|
||||
|
||||
|
|
@ -30,7 +27,7 @@ def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
|
|||
schemas.MetricOfWebVitals.COUNT_REQUESTS: metrics.get_top_metrics_count_requests,
|
||||
schemas.MetricOfWebVitals.AVG_TIME_TO_RENDER: metrics.get_time_to_render,
|
||||
schemas.MetricOfWebVitals.AVG_USED_JS_HEAP_SIZE: metrics.get_memory_consumption,
|
||||
schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu,
|
||||
schemas.MetricOfWebVitals.AVG_CPU: metrics.get_avg_cpu,
|
||||
schemas.MetricOfWebVitals.AVG_FPS: metrics.get_avg_fps,
|
||||
schemas.MetricOfErrors.IMPACTED_SESSIONS_BY_JS_ERRORS: metrics.get_impacted_sessions_by_js_errors,
|
||||
schemas.MetricOfErrors.DOMAINS_ERRORS_4XX: metrics.get_domains_errors_4xx,
|
||||
|
|
|
|||
18
api/chalicelib/core/unprocessed_sessions.py
Normal file
18
api/chalicelib/core/unprocessed_sessions.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import logging
|
||||
|
||||
from chalicelib.core import sessions, assist
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_exists(project_id, session_id, not_found_response) -> (int | None, dict | None):
|
||||
if session_id is None or not session_id.isnumeric():
|
||||
return session_id, not_found_response
|
||||
else:
|
||||
session_id = int(session_id)
|
||||
if not sessions.session_exists(project_id=project_id, session_id=session_id):
|
||||
logger.warning(f"{project_id}/{session_id} not found in DB.")
|
||||
if not assist.session_exists(project_id=project_id, session_id=session_id):
|
||||
logger.warning(f"{project_id}/{session_id} not found in Assist.")
|
||||
return session_id, not_found_response
|
||||
return session_id, None
|
||||
|
|
@ -28,7 +28,7 @@ class TimeUTC:
|
|||
.astimezone(UTC_ZI)
|
||||
|
||||
@staticmethod
|
||||
def now(delta_days=0, delta_minutes=0, delta_seconds=0):
|
||||
def now(delta_days: int = 0, delta_minutes: int = 0, delta_seconds: int = 0) -> int:
|
||||
return int(TimeUTC.__now(delta_days=delta_days, delta_minutes=delta_minutes,
|
||||
delta_seconds=delta_seconds).timestamp() * 1000)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,13 +4,11 @@ from decouple import config
|
|||
from fastapi import Depends, Body, BackgroundTasks
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \
|
||||
alerts, issues, integrations_manager, metadata, \
|
||||
log_tool_elasticsearch, log_tool_datadog, \
|
||||
log_tool_stackdriver, reset_password, log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, sessions, \
|
||||
log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \
|
||||
assist, mobile, tenants, boarding, notifications, webhook, users, \
|
||||
custom_metrics, saved_search, integrations_global, tags, autocomplete
|
||||
from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, alerts, issues, \
|
||||
integrations_manager, metadata, log_tool_elasticsearch, log_tool_datadog, log_tool_stackdriver, reset_password, \
|
||||
log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, sessions, log_tool_newrelic, announcements, \
|
||||
log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, assist, mobile, tenants, boarding, \
|
||||
notifications, webhook, users, custom_metrics, saved_search, integrations_global, tags, autocomplete
|
||||
from chalicelib.core.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from or_dependencies import OR_context, OR_role
|
||||
|
|
@ -556,7 +554,7 @@ def get_all_alerts(projectId: int, context: schemas.CurrentContext = Depends(OR_
|
|||
|
||||
@app.get('/{projectId}/alerts/triggers', tags=["alerts", "customMetrics"])
|
||||
def get_alerts_triggers(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": alerts.get_predefined_values() \
|
||||
return {"data": alerts.get_predefined_values()
|
||||
+ custom_metrics.get_series_for_alert(project_id=projectId, user_id=context.user_id)}
|
||||
|
||||
|
||||
|
|
@ -839,8 +837,8 @@ def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborationSche
|
|||
if old["endpoint"] != data.url.unicode_string():
|
||||
if not MSTeams.say_hello(data.url.unicode_string()):
|
||||
return {
|
||||
"errors": [
|
||||
"We couldn't send you a test message on your Microsoft Teams channel. Please verify your webhook url."]
|
||||
"errors": ["We couldn't send you a test message on your Microsoft Teams channel. "
|
||||
"Please verify your webhook url."]
|
||||
}
|
||||
return {"data": webhook.update(tenant_id=context.tenant_id, webhook_id=webhookId,
|
||||
changes={"name": data.name, "endpoint": data.url.unicode_string()})}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import logging
|
||||
from typing import Optional, Union
|
||||
|
||||
from decouple import config
|
||||
|
|
@ -6,12 +7,13 @@ from fastapi import HTTPException, status
|
|||
from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Response
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \
|
||||
sessions_favorite, assist, sessions_notes, sessions_replay, signup, feature_flags
|
||||
from chalicelib.core import sessions_viewed
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
from chalicelib.core import unprocessed_sessions
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.utils import captcha, smtp
|
||||
from chalicelib.utils import helper
|
||||
|
|
@ -19,6 +21,7 @@ from chalicelib.utils.TimeUTC import TimeUTC
|
|||
from or_dependencies import OR_context, OR_role
|
||||
from routers.base import get_routers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
COOKIE_PATH = "/api/refresh"
|
||||
|
|
@ -249,7 +252,7 @@ def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema
|
|||
|
||||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/first-mob', tags=["sessions", "replay"])
|
||||
def get_first_mob_file(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
|
||||
def get_first_mob_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not sessionId.isnumeric():
|
||||
return {"errors": ["session not found"]}
|
||||
|
|
@ -368,16 +371,10 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
|
|||
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
not_found = {"errors": ["Replay file not found"]}
|
||||
if not sessionId.isnumeric():
|
||||
return not_found
|
||||
else:
|
||||
sessionId = int(sessionId)
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
if not assist.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in Assist.")
|
||||
return not_found
|
||||
|
||||
sessionId, err = unprocessed_sessions.check_exists(project_id=projectId, session_id=sessionId,
|
||||
not_found_response=not_found)
|
||||
if err is not None:
|
||||
return err
|
||||
path = assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId)
|
||||
if path is None:
|
||||
return not_found
|
||||
|
|
@ -389,19 +386,13 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
|||
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
not_found = {"errors": ["Devtools file not found"]}
|
||||
if not sessionId.isnumeric():
|
||||
return not_found
|
||||
else:
|
||||
sessionId = int(sessionId)
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
if not assist.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in Assist.")
|
||||
return not_found
|
||||
|
||||
sessionId, err = unprocessed_sessions.check_exists(project_id=projectId, session_id=sessionId,
|
||||
not_found_response=not_found)
|
||||
if err is not None:
|
||||
return err
|
||||
path = assist.get_raw_devtools_by_id(project_id=projectId, session_id=sessionId)
|
||||
if path is None:
|
||||
return {"errors": ["Devtools file not found"]}
|
||||
return not_found
|
||||
|
||||
return FileResponse(path=path, media_type="application/octet-stream")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
from .schemas import *
|
||||
from . import overrides as _overrides
|
||||
from . import overrides as _overrides
|
||||
|
|
|
|||
|
|
@ -34,6 +34,6 @@ T = TypeVar('T')
|
|||
|
||||
|
||||
class ORUnion:
|
||||
def __new__(self, union_types: Union[AnyType], discriminator: str) -> T:
|
||||
def __new__(cls, union_types: Union[AnyType], discriminator: str) -> T:
|
||||
return lambda **args: TypeAdapter(Annotated[union_types, Field(discriminator=discriminator)]) \
|
||||
.validate_python(args)
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
from typing import Annotated, Any
|
||||
from typing import Optional, List, Union, Literal
|
||||
|
||||
from pydantic import Field, EmailStr, HttpUrl, SecretStr, AnyHttpUrl, validator
|
||||
from pydantic import Field, EmailStr, HttpUrl, SecretStr, AnyHttpUrl
|
||||
from pydantic import field_validator, model_validator, computed_field
|
||||
from pydantic.functional_validators import BeforeValidator
|
||||
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from .overrides import BaseModel, Enum, ORUnion
|
||||
from .transformers_validators import transform_email, remove_whitespace, remove_duplicate_values, single_to_list, \
|
||||
force_is_event, NAME_PATTERN, int_to_string
|
||||
from pydantic.functional_validators import BeforeValidator
|
||||
|
||||
|
||||
def transform_old_filter_type(cls, values):
|
||||
|
|
@ -162,7 +162,7 @@ class _TimedSchema(BaseModel):
|
|||
endTimestamp: int = Field(default=None)
|
||||
|
||||
@model_validator(mode='before')
|
||||
def transform_time(cls, values):
|
||||
def transform_time(self, values):
|
||||
if values.get("startTimestamp") is None and values.get("startDate") is not None:
|
||||
values["startTimestamp"] = values["startDate"]
|
||||
if values.get("endTimestamp") is None and values.get("endDate") is not None:
|
||||
|
|
@ -170,7 +170,7 @@ class _TimedSchema(BaseModel):
|
|||
return values
|
||||
|
||||
@model_validator(mode='after')
|
||||
def __time_validator(cls, values):
|
||||
def __time_validator(self, values):
|
||||
if values.startTimestamp is not None:
|
||||
assert 0 <= values.startTimestamp, "startTimestamp must be greater or equal to 0"
|
||||
if values.endTimestamp is not None:
|
||||
|
|
@ -435,7 +435,7 @@ class AlertSchema(BaseModel):
|
|||
series_id: Optional[int] = Field(default=None, doc_hidden=True)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def transform_alert(cls, values):
|
||||
def transform_alert(self, values):
|
||||
values.series_id = None
|
||||
if isinstance(values.query.left, int):
|
||||
values.series_id = values.query.left
|
||||
|
|
@ -626,7 +626,7 @@ class SessionSearchEventSchema2(BaseModel):
|
|||
_transform = model_validator(mode='before')(transform_old_filter_type)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def event_validator(cls, values):
|
||||
def event_validator(self, values):
|
||||
if isinstance(values.type, PerformanceEventType):
|
||||
if values.type == PerformanceEventType.FETCH_FAILED:
|
||||
return values
|
||||
|
|
@ -666,7 +666,7 @@ class SessionSearchFilterSchema(BaseModel):
|
|||
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
|
||||
|
||||
@model_validator(mode='before')
|
||||
def _transform_data(cls, values):
|
||||
def _transform_data(self, values):
|
||||
if values.get("source") is not None:
|
||||
if isinstance(values["source"], list):
|
||||
if len(values["source"]) == 0:
|
||||
|
|
@ -678,20 +678,20 @@ class SessionSearchFilterSchema(BaseModel):
|
|||
return values
|
||||
|
||||
@model_validator(mode='after')
|
||||
def filter_validator(cls, values):
|
||||
def filter_validator(self, values):
|
||||
if values.type == FilterType.METADATA:
|
||||
assert values.source is not None and len(values.source) > 0, \
|
||||
"must specify a valid 'source' for metadata filter"
|
||||
elif values.type == FilterType.ISSUE:
|
||||
for v in values.value:
|
||||
for i, v in enumerate(values.value):
|
||||
if IssueType.has_value(v):
|
||||
v = IssueType(v)
|
||||
values.value[i] = IssueType(v)
|
||||
else:
|
||||
raise ValueError(f"value should be of type IssueType for {values.type} filter")
|
||||
elif values.type == FilterType.PLATFORM:
|
||||
for v in values.value:
|
||||
for i, v in enumerate(values.value):
|
||||
if PlatformType.has_value(v):
|
||||
v = PlatformType(v)
|
||||
values.value[i] = PlatformType(v)
|
||||
else:
|
||||
raise ValueError(f"value should be of type PlatformType for {values.type} filter")
|
||||
elif values.type == FilterType.EVENTS_COUNT:
|
||||
|
|
@ -730,8 +730,8 @@ def add_missing_is_event(values: dict):
|
|||
|
||||
|
||||
# this type is created to allow mixing events&filters and specifying a discriminator
|
||||
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema2], \
|
||||
Field(discriminator='is_event'), BeforeValidator(add_missing_is_event)]
|
||||
GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema2],
|
||||
Field(discriminator='is_event'), BeforeValidator(add_missing_is_event)]
|
||||
|
||||
|
||||
class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
||||
|
|
@ -744,7 +744,7 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
bookmarked: bool = Field(default=False)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def transform_order(cls, values):
|
||||
def transform_order(self, values):
|
||||
if values.get("sort") is None:
|
||||
values["sort"] = "startTs"
|
||||
|
||||
|
|
@ -755,7 +755,7 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
return values
|
||||
|
||||
@model_validator(mode="before")
|
||||
def add_missing_attributes(cls, values):
|
||||
def add_missing_attributes(self, values):
|
||||
# in case isEvent is wrong:
|
||||
for f in values.get("filters") or []:
|
||||
if EventType.has_value(f["type"]) and not f.get("isEvent"):
|
||||
|
|
@ -770,7 +770,7 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
return values
|
||||
|
||||
@model_validator(mode="before")
|
||||
def remove_wrong_filter_values(cls, values):
|
||||
def remove_wrong_filter_values(self, values):
|
||||
for f in values.get("filters", []):
|
||||
vals = []
|
||||
for v in f.get("value", []):
|
||||
|
|
@ -780,7 +780,7 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def split_filters_events(cls, values):
|
||||
def split_filters_events(self, values):
|
||||
n_filters = []
|
||||
n_events = []
|
||||
for v in values.filters:
|
||||
|
|
@ -793,7 +793,7 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
return values
|
||||
|
||||
@field_validator("filters", mode="after")
|
||||
def merge_identical_filters(cls, values):
|
||||
def merge_identical_filters(self, values):
|
||||
# ignore 'issue' type as it could be used for step-filters and tab-filters at the same time
|
||||
i = 0
|
||||
while i < len(values):
|
||||
|
|
@ -853,7 +853,7 @@ class PathAnalysisSubFilterSchema(BaseModel):
|
|||
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __force_is_event(cls, values):
|
||||
def __force_is_event(self, values):
|
||||
values["isEvent"] = True
|
||||
return values
|
||||
|
||||
|
|
@ -879,8 +879,8 @@ class _ProductAnalyticsEventFilter(BaseModel):
|
|||
|
||||
|
||||
# this type is created to allow mixing events&filters and specifying a discriminator for PathAnalysis series filter
|
||||
ProductAnalyticsFilter = Annotated[Union[_ProductAnalyticsFilter, _ProductAnalyticsEventFilter], \
|
||||
Field(discriminator='is_event')]
|
||||
ProductAnalyticsFilter = Annotated[Union[_ProductAnalyticsFilter, _ProductAnalyticsEventFilter],
|
||||
Field(discriminator='is_event')]
|
||||
|
||||
|
||||
class PathAnalysisSchema(_TimedSchema, _PaginatedSchema):
|
||||
|
|
@ -1033,7 +1033,7 @@ class MetricOfPathAnalysis(str, Enum):
|
|||
# class CardSessionsSchema(SessionsSearchPayloadSchema):
|
||||
class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
||||
startTimestamp: int = Field(default=TimeUTC.now(-7))
|
||||
endTimestamp: int = Field(defautl=TimeUTC.now())
|
||||
endTimestamp: int = Field(default=TimeUTC.now())
|
||||
density: int = Field(default=7, ge=1, le=200)
|
||||
series: List[CardSeriesSchema] = Field(default=[])
|
||||
|
||||
|
|
@ -1047,7 +1047,7 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
|||
(force_is_event(events_enum=[EventType, PerformanceEventType]))
|
||||
|
||||
@model_validator(mode="before")
|
||||
def remove_wrong_filter_values(cls, values):
|
||||
def remove_wrong_filter_values(self, values):
|
||||
for f in values.get("filters", []):
|
||||
vals = []
|
||||
for v in f.get("value", []):
|
||||
|
|
@ -1057,7 +1057,7 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
|||
return values
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
if values.get("startTimestamp") is None:
|
||||
values["startTimestamp"] = TimeUTC.now(-7)
|
||||
|
||||
|
|
@ -1067,7 +1067,7 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
|||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __enforce_default_after(cls, values):
|
||||
def __enforce_default_after(self, values):
|
||||
for s in values.series:
|
||||
if s.filter is not None:
|
||||
s.filter.limit = values.limit
|
||||
|
|
@ -1078,7 +1078,7 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
|||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __merge_out_filters_with_series(cls, values):
|
||||
def __merge_out_filters_with_series(self, values):
|
||||
if len(values.filters) > 0:
|
||||
for f in values.filters:
|
||||
for s in values.series:
|
||||
|
|
@ -1140,12 +1140,12 @@ class CardTimeSeries(__CardSchema):
|
|||
view_type: MetricTimeseriesViewType
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
values["metricValue"] = []
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
values.metric_of = MetricOfTimeseries(values.metric_of)
|
||||
return values
|
||||
|
||||
|
|
@ -1157,18 +1157,18 @@ class CardTable(__CardSchema):
|
|||
metric_format: MetricExtendedFormatType = Field(default=MetricExtendedFormatType.SESSION_COUNT)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
if values.get("metricOf") is not None and values.get("metricOf") != MetricOfTable.ISSUES:
|
||||
values["metricValue"] = []
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
values.metric_of = MetricOfTable(values.metric_of)
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __validator(cls, values):
|
||||
def __validator(self, values):
|
||||
if values.metric_of not in (MetricOfTable.ISSUES, MetricOfTable.USER_BROWSER,
|
||||
MetricOfTable.USER_DEVICE, MetricOfTable.USER_COUNTRY,
|
||||
MetricOfTable.VISITED_URL):
|
||||
|
|
@ -1183,7 +1183,7 @@ class CardFunnel(__CardSchema):
|
|||
view_type: MetricOtherViewType = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
if values.get("metricOf") and not MetricOfFunnels.has_value(values["metricOf"]):
|
||||
values["metricOf"] = MetricOfFunnels.SESSION_COUNT
|
||||
values["viewType"] = MetricOtherViewType.OTHER_CHART
|
||||
|
|
@ -1192,7 +1192,7 @@ class CardFunnel(__CardSchema):
|
|||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
values.metric_of = MetricOfTimeseries(values.metric_of)
|
||||
return values
|
||||
|
||||
|
|
@ -1203,12 +1203,12 @@ class CardErrors(__CardSchema):
|
|||
view_type: MetricOtherViewType = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
values["series"] = []
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
values.metric_of = MetricOfErrors(values.metric_of)
|
||||
return values
|
||||
|
||||
|
|
@ -1219,12 +1219,12 @@ class CardPerformance(__CardSchema):
|
|||
view_type: MetricOtherViewType = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
values["series"] = []
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
values.metric_of = MetricOfPerformance(values.metric_of)
|
||||
return values
|
||||
|
||||
|
|
@ -1235,12 +1235,12 @@ class CardResources(__CardSchema):
|
|||
view_type: MetricOtherViewType = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
values["series"] = []
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
values.metric_of = MetricOfResources(values.metric_of)
|
||||
return values
|
||||
|
||||
|
|
@ -1251,12 +1251,12 @@ class CardWebVital(__CardSchema):
|
|||
view_type: MetricOtherViewType = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
values["series"] = []
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
values.metric_of = MetricOfWebVitals(values.metric_of)
|
||||
return values
|
||||
|
||||
|
|
@ -1267,11 +1267,11 @@ class CardHeatMap(__CardSchema):
|
|||
view_type: MetricOtherViewType = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
values.metric_of = MetricOfHeatMap(values.metric_of)
|
||||
return values
|
||||
|
||||
|
|
@ -1286,17 +1286,17 @@ class CardInsights(__CardSchema):
|
|||
view_type: MetricOtherViewType = Field(...)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
values["view_type"] = MetricOtherViewType.LIST_CHART
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
values.metric_of = MetricOfInsights(values.metric_of)
|
||||
return values
|
||||
|
||||
@model_validator(mode='after')
|
||||
def restrictions(cls, values):
|
||||
def restrictions(self, values):
|
||||
raise ValueError(f"metricType:{MetricType.INSIGHTS} not supported yet.")
|
||||
|
||||
|
||||
|
|
@ -1306,7 +1306,7 @@ class CardPathAnalysisSeriesSchema(CardSeriesSchema):
|
|||
density: int = Field(default=4, ge=2, le=10)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
if values.get("filter") is None and values.get("startTimestamp") and values.get("endTimestamp"):
|
||||
values["filter"] = PathAnalysisSchema(startTimestamp=values["startTimestamp"],
|
||||
endTimestamp=values["endTimestamp"],
|
||||
|
|
@ -1328,14 +1328,14 @@ class CardPathAnalysis(__CardSchema):
|
|||
series: List[CardPathAnalysisSeriesSchema] = Field(default=[])
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __enforce_default(cls, values):
|
||||
def __enforce_default(self, values):
|
||||
values["viewType"] = MetricOtherViewType.OTHER_CHART.value
|
||||
if values.get("series") is not None and len(values["series"]) > 0:
|
||||
values["series"] = [values["series"][0]]
|
||||
return values
|
||||
|
||||
@model_validator(mode="after")
|
||||
def __clean_start_point_and_enforce_metric_value(cls, values):
|
||||
def __clean_start_point_and_enforce_metric_value(self, values):
|
||||
start_point = []
|
||||
for s in values.start_point:
|
||||
if len(s.value) == 0:
|
||||
|
|
@ -1349,7 +1349,7 @@ class CardPathAnalysis(__CardSchema):
|
|||
return values
|
||||
|
||||
@model_validator(mode='after')
|
||||
def __validator(cls, values):
|
||||
def __validator(self, values):
|
||||
s_e_values = {}
|
||||
exclude_values = {}
|
||||
for f in values.start_point:
|
||||
|
|
@ -1359,7 +1359,8 @@ class CardPathAnalysis(__CardSchema):
|
|||
exclude_values[f.type] = exclude_values.get(f.type, []) + f.value
|
||||
|
||||
assert len(
|
||||
values.start_point) <= 1, f"Only 1 startPoint with multiple values OR 1 endPoint with multiple values is allowed"
|
||||
values.start_point) <= 1, \
|
||||
f"Only 1 startPoint with multiple values OR 1 endPoint with multiple values is allowed"
|
||||
for t in exclude_values:
|
||||
for v in t:
|
||||
assert v not in s_e_values.get(t, []), f"startPoint and endPoint cannot be excluded, value: {v}"
|
||||
|
|
@ -1452,13 +1453,13 @@ class LiveSessionSearchFilterSchema(BaseModel):
|
|||
value: Union[List[str], str] = Field(...)
|
||||
type: LiveFilterType = Field(...)
|
||||
source: Optional[str] = Field(default=None)
|
||||
operator: Literal[SearchEventOperator.IS, \
|
||||
SearchEventOperator.CONTAINS] = Field(default=SearchEventOperator.CONTAINS)
|
||||
operator: Literal[SearchEventOperator.IS, SearchEventOperator.CONTAINS] \
|
||||
= Field(default=SearchEventOperator.CONTAINS)
|
||||
|
||||
_transform = model_validator(mode='before')(transform_old_filter_type)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def __validator(cls, values):
|
||||
def __validator(self, values):
|
||||
if values.type is not None and values.type == LiveFilterType.METADATA:
|
||||
assert values.source is not None, "source should not be null for METADATA type"
|
||||
assert len(values.source) > 0, "source should not be empty for METADATA type"
|
||||
|
|
@ -1471,7 +1472,7 @@ class LiveSessionsSearchPayloadSchema(_PaginatedSchema):
|
|||
order: SortOrderType = Field(default=SortOrderType.DESC)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
if values.get("order") is not None:
|
||||
values["order"] = values["order"].upper()
|
||||
if values.get("filters") is not None:
|
||||
|
|
@ -1527,8 +1528,9 @@ class SessionUpdateNoteSchema(SessionNoteSchema):
|
|||
is_public: Optional[bool] = Field(default=None)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def __validator(cls, values):
|
||||
assert values.message is not None or values.timestamp is not None or values.is_public is not None, "at least 1 attribute should be provided for update"
|
||||
def __validator(self, values):
|
||||
assert values.message is not None or values.timestamp is not None or values.is_public is not None, \
|
||||
"at least 1 attribute should be provided for update"
|
||||
return values
|
||||
|
||||
|
||||
|
|
@ -1555,7 +1557,7 @@ class HeatMapSessionsSearch(SessionsSearchPayloadSchema):
|
|||
filters: List[Union[SessionSearchFilterSchema, _HeatMapSearchEventRaw]] = Field(default=[])
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __transform(cls, values):
|
||||
def __transform(self, values):
|
||||
for f in values.get("filters", []):
|
||||
if f.get("type") == FilterType.DURATION:
|
||||
return values
|
||||
|
|
@ -1598,7 +1600,7 @@ class FeatureFlagConditionFilterSchema(BaseModel):
|
|||
sourceOperator: Optional[Union[SearchEventOperator, MathOperator]] = Field(default=None)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __force_is_event(cls, values):
|
||||
def __force_is_event(self, values):
|
||||
values["isEvent"] = False
|
||||
return values
|
||||
|
||||
|
|
@ -1638,10 +1640,19 @@ class FeatureFlagSchema(BaseModel):
|
|||
variants: List[FeatureFlagVariant] = Field(default=[])
|
||||
|
||||
|
||||
class ModuleType(str, Enum):
|
||||
ASSIST = "assist"
|
||||
NOTES = "notes"
|
||||
BUG_REPORTS = "bug-reports"
|
||||
OFFLINE_RECORDINGS = "offline-recordings"
|
||||
ALERTS = "alerts"
|
||||
ASSIST_STATTS = "assist-statts"
|
||||
RECOMMENDATIONS = "recommendations"
|
||||
FEATURE_FLAGS = "feature-flags"
|
||||
|
||||
|
||||
class ModuleStatus(BaseModel):
|
||||
module: Literal["assist", "notes", "bug-reports",
|
||||
"offline-recordings", "alerts", "assist-statts", "recommendations", "feature-flags"] = Field(...,
|
||||
description="Possible values: assist, notes, bug-reports, offline-recordings, alerts, assist-statts, recommendations, feature-flags")
|
||||
module: ModuleType = Field(...)
|
||||
status: bool = Field(...)
|
||||
|
||||
|
||||
|
|
|
|||
2
ee/api/.gitignore
vendored
2
ee/api/.gitignore
vendored
|
|
@ -276,3 +276,5 @@ Pipfile.lock
|
|||
/routers/subs/spot.py
|
||||
/chalicelib/utils/or_cache/
|
||||
/routers/subs/health.py
|
||||
/chalicelib/core/spot.py
|
||||
/chalicelib/core/unprocessed_sessions.py
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from fastapi import FastAPI, Request
|
|||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.middleware.gzip import GZipMiddleware
|
||||
from psycopg import AsyncConnection
|
||||
from psycopg.rows import dict_row
|
||||
from starlette import status
|
||||
from starlette.responses import StreamingResponse, JSONResponse
|
||||
|
||||
|
|
@ -17,21 +18,19 @@ from chalicelib.core import traces
|
|||
from chalicelib.utils import events_queue
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from crons import core_crons, ee_crons, core_dynamic_crons
|
||||
from routers import core, core_dynamic
|
||||
from routers import ee
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot
|
||||
from routers.subs import v1_api_ee
|
||||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
from routers import saml
|
||||
from crons import core_crons, ee_crons, core_dynamic_crons
|
||||
from routers.subs import insights, metrics, v1_api, health, usability_tests, spot
|
||||
from routers.subs import v1_api_ee
|
||||
|
||||
loglevel = config("LOGLEVEL", default=logging.WARNING)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
logging.basicConfig(level=loglevel)
|
||||
|
||||
from psycopg.rows import dict_row
|
||||
|
||||
|
||||
class ORPYAsyncConnection(AsyncConnection):
|
||||
|
||||
|
|
|
|||
|
|
@ -53,10 +53,9 @@ class JWTAuth(HTTPBearer):
|
|||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid authentication scheme.")
|
||||
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
|
||||
auth_exists = jwt_payload is not None \
|
||||
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
tenant_id=jwt_payload.get("tenantId", -1),
|
||||
jwt_iat=jwt_payload.get("iat", 100))
|
||||
auth_exists = jwt_payload is not None and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
tenant_id=jwt_payload.get("tenantId", -1),
|
||||
jwt_iat=jwt_payload.get("iat", 100))
|
||||
if jwt_payload is None \
|
||||
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
|
||||
or not auth_exists:
|
||||
|
|
|
|||
|
|
@ -263,6 +263,12 @@ def __search_metadata(project_id, value, key=None, source=None):
|
|||
return helper.list_to_camel_case(results)
|
||||
|
||||
|
||||
class TableColumn:
|
||||
def __init__(self, table, column):
|
||||
self.table = table
|
||||
self.column = column
|
||||
|
||||
|
||||
TYPE_TO_COLUMN = {
|
||||
schemas.EventType.CLICK: "label",
|
||||
schemas.EventType.INPUT: "label",
|
||||
|
|
|
|||
|
|
@ -96,4 +96,6 @@ rm -rf ./chalicelib/core/db_request_handler.py
|
|||
rm -rf ./chalicelib/core/db_request_handler.py
|
||||
rm -rf ./routers/subs/spot.py
|
||||
rm -rf ./chalicelib/utils/or_cache
|
||||
rm -rf ./routers/subs/health.py
|
||||
rm -rf ./routers/subs/health.py
|
||||
rm -rf ./chalicelib/core/spot.py
|
||||
rm -rf ./chalicelib/core/unprocessed_sessions.py
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
import logging
|
||||
from typing import Optional, Union
|
||||
|
||||
from decouple import config
|
||||
|
|
@ -6,12 +7,13 @@ from fastapi import HTTPException, status
|
|||
from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Response
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \
|
||||
errors_favorite, sessions_notes, sessions_replay, signup, feature_flags
|
||||
from chalicelib.core import sessions_viewed
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
from chalicelib.core import unprocessed_sessions
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core import scope
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.core.users import get_user_settings
|
||||
from chalicelib.utils import SAML2_helper, smtp
|
||||
|
|
@ -24,7 +26,7 @@ from schemas import Permissions, ServicePermissions
|
|||
|
||||
if config("ENABLE_SSO", cast=bool, default=True):
|
||||
from routers import saml
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
COOKIE_PATH = "/api/refresh"
|
||||
|
|
@ -79,7 +81,7 @@ def login_user(response: JSONResponse, spot: Optional[bool] = False, data: schem
|
|||
content = {
|
||||
'jwt': r.pop('jwt'),
|
||||
'data': {
|
||||
"scope":scope.get_scope(r["tenantId"]),
|
||||
"scope": scope.get_scope(r["tenantId"]),
|
||||
"user": r
|
||||
}
|
||||
}
|
||||
|
|
@ -140,6 +142,8 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
def edit_account(data: schemas.EditAccountSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.edit_account(tenant_id=context.tenant_id, user_id=context.user_id, changes=data)
|
||||
|
||||
|
||||
@app.post('/account/scope', tags=["account"])
|
||||
def change_scope(data: schemas.ScopeSchema = Body(),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
@ -392,16 +396,10 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
|
|||
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
not_found = {"errors": ["Replay file not found"]}
|
||||
if not sessionId.isnumeric():
|
||||
return not_found
|
||||
else:
|
||||
sessionId = int(sessionId)
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
if not assist.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in Assist.")
|
||||
return not_found
|
||||
|
||||
sessionId, err = unprocessed_sessions.check_exists(project_id=projectId, session_id=sessionId,
|
||||
not_found_response=not_found)
|
||||
if err is not None:
|
||||
return err
|
||||
path = assist.get_raw_mob_by_id(project_id=projectId, session_id=sessionId)
|
||||
if path is None:
|
||||
return not_found
|
||||
|
|
@ -416,19 +414,13 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
|||
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
not_found = {"errors": ["Devtools file not found"]}
|
||||
if not sessionId.isnumeric():
|
||||
return not_found
|
||||
else:
|
||||
sessionId = int(sessionId)
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in DB.")
|
||||
if not assist.session_exists(project_id=projectId, session_id=sessionId):
|
||||
print(f"{projectId}/{sessionId} not found in Assist.")
|
||||
return not_found
|
||||
|
||||
sessionId, err = unprocessed_sessions.check_exists(project_id=projectId, session_id=sessionId,
|
||||
not_found_response=not_found)
|
||||
if err is not None:
|
||||
return err
|
||||
path = assist.get_raw_devtools_by_id(project_id=projectId, session_id=sessionId)
|
||||
if path is None:
|
||||
return {"errors": ["Devtools file not found"]}
|
||||
return not_found
|
||||
|
||||
return FileResponse(path=path, media_type="application/octet-stream")
|
||||
|
||||
|
|
|
|||
|
|
@ -60,13 +60,13 @@ class AssistStatsSessionsRequest(BaseModel):
|
|||
userId: Optional[int] = Field(default=None)
|
||||
|
||||
@field_validator("sort")
|
||||
def validate_sort(cls, v):
|
||||
def validate_sort(self, v):
|
||||
if v not in assist_sort_options:
|
||||
raise ValueError(f"Invalid sort option. Allowed options: {', '.join(assist_sort_options)}")
|
||||
return v
|
||||
|
||||
@field_validator("order")
|
||||
def validate_order(cls, v):
|
||||
def validate_order(self, v):
|
||||
if v not in ["desc", "asc"]:
|
||||
raise ValueError("Invalid order option. Must be 'desc' or 'asc'.")
|
||||
return v
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ class CurrentContext(schemas.CurrentContext):
|
|||
service_account: bool = Field(default=False)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def remove_unsupported_perms(cls, values):
|
||||
def remove_unsupported_perms(self, values):
|
||||
if values.get("permissions") is not None:
|
||||
perms = []
|
||||
for p in values["permissions"]:
|
||||
|
|
@ -94,7 +94,7 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema):
|
|||
order: schemas.SortOrderType = Field(default=schemas.SortOrderType.DESC)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def transform_order(cls, values):
|
||||
def transform_order(self, values):
|
||||
if values.get("order") is None:
|
||||
values["order"] = schemas.SortOrderType.DESC
|
||||
else:
|
||||
|
|
@ -154,7 +154,7 @@ class CardInsights(schemas.CardInsights):
|
|||
metric_value: List[InsightCategories] = Field(default=[])
|
||||
|
||||
@model_validator(mode='after')
|
||||
def restrictions(cls, values):
|
||||
def restrictions(self, values):
|
||||
return values
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue