Api v1.15.0 (#1547)
* feat(chalice): upgraded dependencies * feat(chalice): changed path analysis schema * feat(DB): click coordinate support * feat(chalice): changed path analysis issues schema feat(chalice): upgraded dependencies * fix(chalice): fixed pydantic issue * refactor(chalice): refresh token validator * feat(chalice): role restrictions * feat(chalice): EE path analysis changes * refactor(DB): changed creation queries refactor(DB): changed delte queries feat(DB): support new path analysis payload * feat(chalice): save path analysis card * feat(chalice): restrict access * feat(chalice): restrict access * feat(chalice): EE save new path analysis card * refactor(chalice): path analysis * feat(chalice): path analysis new query * fix(chalice): configurable CH config * fix(chalice): assist autocomplete * refactor(chalice): refactored permissions * refactor(chalice): changed log level * refactor(chalice): upgraded dependencies * refactor(chalice): changed path analysis query * refactor(chalice): changed path analysis query * refactor(chalice): upgraded dependencies refactor(alerts): upgraded dependencies refactor(crons): upgraded dependencies * feat(chalice): path analysis ignore start point * feat(chalice): path analysis in progress * refactor(chalice): path analysis changed link sort * refactor(chalice): path analysis changed link sort * refactor(chalice): path analysis changed link sort * refactor(chalice): path analysis new query refactor(chalice): authorizers * refactor(chalice): refactored authorizer * fix(chalice): fixed create card of PathAnalysis * refactor(chalice): compute link-percentage for Path Analysis * refactor(chalice): remove null starting point from Path Analysis * feat(chalice): path analysis CH query * refactor(chalice): changed Path Analysis links-value fix(chalice): fixed search notes for EE * feat(chalice): path analysis enhanced query results * feat(chalice): include timezone in search sessions response * refactor(chalice): refactored logs * refactor(chalice): refactored logs feat(chalice): get path analysis issues * fix(chalice): fixed path analysis issues pagination * fix(chalice): sessions-search handle null values * feat(chalice): PathAnalysis start event support middle-event matching * feat(chalice): PathAnalysis start event support middle-event matching * feat(chalice): PathAnalysis support mixed events with start-point * fix(chalice): PathAnalysis fixed eventType value when metricValue is missing * fix(chalice): PathAnalysis fixed wrong super-class model for update card * fix(chalice): PathAnalysis fixed search issues refactor(chalice): upgraded dependencies * fix(chalice): enforce isEvent if missing * fix(chalice): enforce isEvent if missing * refactor(chalice): refactored custom-metrics * refactor(chalice): small changes * feat(chalice): path analysis EE new query * fix(chalice): fixed hide-excess state for Path Analysis * fix(chalice): fixed update start point and excludes for Path Analysis * fix(chalice): fix payload validation fix(chalice): fix update widget endpoint * fix(chalice): fix payload validation fix(chalice): fix update widget endpoint * fix(chalice): fix add member * refactor(chalice): upgraded dependencies refactor!(chalice): upgraded SAML dependencies * feat(chalice): ios-project support 1/5 * refactor(chalice): changed logs handling * fix(chalice): fix path analysis issues list * Api v1.15.0 (#1542) * refactor(chalice): changed default dev env vars * refactor(chalice): changes * refactor(chalice): changed payload fixer * refactor(chalice): changed payload fixer refactor(chalice): support duplicate filters * Api v1.15.0 no merge (#1546) * refactor(chalice): changed default dev env vars * refactor(chalice): changes * refactor(chalice): changed payload fixer * refactor(chalice): changed payload fixer refactor(chalice): support duplicate filters * refactor(chalice): changes * feature(chalice): mobile sessions search
This commit is contained in:
parent
16efb1316c
commit
7f37bc4336
11 changed files with 222 additions and 136 deletions
|
|
@ -214,10 +214,10 @@ def make_chart(project_id, user_id, data: schemas.CardSessionsSchema, metric: sc
|
|||
|
||||
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
# raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False, include_data=True)
|
||||
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if raw_metric is None:
|
||||
card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if card is None:
|
||||
return None
|
||||
metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
|
||||
metric: schemas.CardSchema = schemas.CardSchema(**card)
|
||||
metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if metric is None:
|
||||
return None
|
||||
|
|
@ -660,10 +660,10 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
|
|||
data: schemas.CardSessionsSchema
|
||||
# , range_value=None, start_date=None, end_date=None
|
||||
):
|
||||
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if card is None:
|
||||
return None
|
||||
metric: schemas.CardSchema = schemas.CardSchema(**metric)
|
||||
metric: schemas.CardSchema = schemas.CardSchema(**card)
|
||||
metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if metric is None:
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -38,20 +38,21 @@ COALESCE((SELECT TRUE
|
|||
|
||||
# This function executes the query and return result
|
||||
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
|
||||
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False):
|
||||
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False,
|
||||
platform="web"):
|
||||
if data.bookmarked:
|
||||
data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project_id, user_id)
|
||||
|
||||
full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
|
||||
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
|
||||
user_id=user_id)
|
||||
user_id=user_id, platform=platform)
|
||||
if data.limit is not None and data.page is not None:
|
||||
full_args["sessions_limit"] = data.limit
|
||||
full_args["sessions_limit_s"] = (data.page - 1) * data.limit
|
||||
full_args["sessions_limit_e"] = data.page * data.limit
|
||||
else:
|
||||
full_args["sessions_limit"] = 200
|
||||
full_args["sessions_limit_s"] = 1
|
||||
full_args["sessions_limit_s"] = 0
|
||||
full_args["sessions_limit_e"] = 200
|
||||
|
||||
meta_keys = []
|
||||
|
|
@ -113,6 +114,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
if data.sort is not None and data.sort != "session_id":
|
||||
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
|
||||
sort = helper.key_to_snake_case(data.sort)
|
||||
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
|
||||
COALESCE(JSONB_AGG(full_sessions)
|
||||
|
|
@ -129,6 +131,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
logging.debug("--------------------")
|
||||
try:
|
||||
cur.execute(main_query)
|
||||
sessions = cur.fetchone()
|
||||
except Exception as err:
|
||||
logging.warning("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
|
||||
logging.warning(main_query.decode('UTF-8'))
|
||||
|
|
@ -139,7 +142,6 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
if errors_only or ids_only:
|
||||
return helper.list_to_camel_case(cur.fetchall())
|
||||
|
||||
sessions = cur.fetchone()
|
||||
if count_only:
|
||||
return helper.dict_to_camel_case(sessions)
|
||||
|
||||
|
|
@ -396,7 +398,7 @@ def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2):
|
|||
|
||||
# this function generates the query and return the generated-query with the dict of query arguments
|
||||
def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue,
|
||||
project_id, user_id, extra_event=None):
|
||||
project_id, user_id, platform="web", extra_event=None):
|
||||
ss_constraints = []
|
||||
full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp,
|
||||
"projectId": project_id, "userId": user_id}
|
||||
|
|
@ -687,32 +689,61 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
**sh.multi_values(event.source, value_key=s_k)}
|
||||
|
||||
if event_type == events.EventType.CLICK.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CLICK.table} AS main "
|
||||
if not is_any:
|
||||
if event.operator == schemas.ClickEventExtraOperator._on_selector:
|
||||
if platform == "web":
|
||||
event_from = event_from % f"{events.EventType.CLICK.table} AS main "
|
||||
if not is_any:
|
||||
if event.operator == schemas.ClickEventExtraOperator._on_selector:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k))
|
||||
else:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
else:
|
||||
event_from = event_from % f"{events.EventType.CLICK_IOS.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k))
|
||||
else:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value,
|
||||
sh.multi_conditions(f"main.{events.EventType.CLICK_IOS.column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
|
||||
elif event_type == events.EventType.INPUT.ui_type:
|
||||
event_from = event_from % f"{events.EventType.INPUT.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
if event.source is not None and len(event.source) > 0:
|
||||
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
|
||||
value_key=f"custom{i}"))
|
||||
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
|
||||
if platform == "web":
|
||||
event_from = event_from % f"{events.EventType.INPUT.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
if event.source is not None and len(event.source) > 0:
|
||||
event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source,
|
||||
value_key=f"custom{i}"))
|
||||
full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")}
|
||||
|
||||
else:
|
||||
event_from = event_from % f"{events.EventType.INPUT_IOS.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.INPUT_IOS.column} {op} %({e_k})s", event.value,
|
||||
value_key=e_k))
|
||||
|
||||
|
||||
elif event_type == events.EventType.LOCATION.ui_type:
|
||||
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
|
||||
if platform == "web":
|
||||
event_from = event_from % f"{events.EventType.LOCATION.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
else:
|
||||
event_from = event_from % f"{events.EventType.VIEW_IOS.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.VIEW_IOS.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == events.EventType.SWIPE_IOS.ui_type and platform == "ios":
|
||||
event_from = event_from % f"{events.EventType.SWIPE_IOS.table} AS main "
|
||||
if not is_any:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s",
|
||||
sh.multi_conditions(f"main.{events.EventType.SWIPE_IOS.column} {op} %({e_k})s",
|
||||
event.value, value_key=e_k))
|
||||
elif event_type == events.EventType.CUSTOM.ui_type:
|
||||
event_from = event_from % f"{events.EventType.CUSTOM.table} AS main "
|
||||
|
|
|
|||
33
api/env.dev
33
api/env.dev
|
|
@ -1,28 +1,28 @@
|
|||
EMAIL_FROM=Openreplay-taha<do-not-reply@openreplay.com>
|
||||
EMAIL_HOST=email-smtp.eu-west-1.amazonaws.com
|
||||
EMAIL_PASSWORD=password
|
||||
EMAIL_FROM=Openreplay-dev<do-not-reply@openreplay.com>
|
||||
EMAIL_HOST=
|
||||
EMAIL_PASSWORD=
|
||||
EMAIL_PORT=587
|
||||
EMAIL_SSL_CERT=''
|
||||
EMAIL_SSL_KEY=''
|
||||
EMAIL_USER=user
|
||||
EMAIL_SSL_CERT=
|
||||
EMAIL_SSL_KEY=
|
||||
EMAIL_USER=
|
||||
EMAIL_USE_SSL=false
|
||||
EMAIL_USE_TLS=true
|
||||
S3_HOST=https://foss.openreplay.com:443
|
||||
S3_KEY=key
|
||||
S3_SECRET=secret
|
||||
S3_HOST=
|
||||
S3_KEY=
|
||||
S3_SECRET=
|
||||
SITE_URL=http://127.0.0.1:3333
|
||||
announcement_url=https://asayer-announcements.s3.eu-central-1.amazonaws.com/
|
||||
announcement_url=
|
||||
captcha_key=
|
||||
captcha_server=
|
||||
change_password_link=/changepassword?invitation=%s&&pass=%s
|
||||
invitation_link=/users/invitation?token=%s
|
||||
js_cache_bucket=asayer-sessions-assets-staging
|
||||
js_cache_bucket=
|
||||
jwt_algorithm=HS512
|
||||
JWT_EXPIRATION=6000
|
||||
JWT_REFRESH_EXPIRATION=60
|
||||
JWT_ISSUER=openreplay-local-staging
|
||||
jwt_secret=secret
|
||||
JWT_REFRESH_SECRET=another_secret
|
||||
JWT_ISSUER=openReplay-dev
|
||||
jwt_secret=SECRET
|
||||
JWT_REFRESH_SECRET=SECRET2
|
||||
ASSIST_URL=http://127.0.0.1:9001/assist/%s
|
||||
assist=/sockets-live
|
||||
assistList=/sockets-list
|
||||
|
|
@ -42,10 +42,11 @@ PG_RETRY_INTERVAL=2
|
|||
PG_POOL=true
|
||||
sessions_bucket=mobs
|
||||
sessions_region=us-east-1
|
||||
sourcemaps_bucket=asayer-sourcemaps-staging
|
||||
sourcemaps_bucket=
|
||||
sourcemaps_reader=http://127.0.0.1:3000/sourcemaps
|
||||
LOGLEVEL=INFO
|
||||
FS_DIR=/Users/tahayk/asayer/openreplay/api/.local
|
||||
FS_DIR=
|
||||
|
||||
ASSIST_KEY=abc
|
||||
EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob
|
||||
EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
|
||||
|
|
|
|||
|
|
@ -234,7 +234,8 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
|
|||
@app.post('/{projectId}/sessions/search', tags=["sessions"])
|
||||
def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
|
||||
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id,
|
||||
platform=context.project.platform)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import TypeVar, Annotated, Union, Any
|
||||
from typing import TypeVar, Annotated, Union
|
||||
from enum import Enum as _Enum
|
||||
from pydantic import BaseModel as _BaseModel
|
||||
from pydantic import ConfigDict, TypeAdapter, Field
|
||||
|
|
@ -10,29 +10,6 @@ def attribute_to_camel_case(snake_str: str) -> str:
|
|||
return components[0] + ''.join(x.title() for x in components[1:])
|
||||
|
||||
|
||||
def transform_email(email: str) -> str:
|
||||
return email.lower().strip() if isinstance(email, str) else email
|
||||
|
||||
|
||||
def remove_whitespace(value: str) -> str:
|
||||
return " ".join(value.split()) if isinstance(value, str) else value
|
||||
|
||||
|
||||
def remove_duplicate_values(value: list) -> list:
|
||||
if value is not None and isinstance(value, list):
|
||||
if len(value) > 0 \
|
||||
and (isinstance(value[0], int) or isinstance(value[0], dict)):
|
||||
return value
|
||||
value = list(set(value))
|
||||
return value
|
||||
|
||||
|
||||
def single_to_list(value: Union[list, Any]) -> list:
|
||||
if value is not None and not isinstance(value, list):
|
||||
value = [value]
|
||||
return value
|
||||
|
||||
|
||||
def schema_extra(schema: dict, _):
|
||||
props = {}
|
||||
for k, v in schema.get('properties', {}).items():
|
||||
|
|
|
|||
|
|
@ -5,9 +5,9 @@ from pydantic import Field, EmailStr, HttpUrl, SecretStr, AnyHttpUrl, validator
|
|||
from pydantic import field_validator, model_validator, computed_field
|
||||
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from .overrides import BaseModel, Enum
|
||||
from .overrides import transform_email, remove_whitespace, remove_duplicate_values, \
|
||||
single_to_list, ORUnion
|
||||
from .overrides import BaseModel, Enum, ORUnion
|
||||
from .transformers_validators import transform_email, remove_whitespace, remove_duplicate_values, single_to_list, \
|
||||
force_is_event
|
||||
|
||||
|
||||
def transform_old_filter_type(cls, values):
|
||||
|
|
@ -110,9 +110,9 @@ class CreateProjectSchema(BaseModel):
|
|||
|
||||
|
||||
class CurrentProjectContext(BaseModel):
|
||||
project_id: int = Field(...)
|
||||
project_id: int = Field(..., gt=0)
|
||||
project_key: str = Field(...)
|
||||
platform: str = Field(...)
|
||||
platform: Literal["web", "ios"] = Field(...)
|
||||
|
||||
|
||||
class CurrentAPIContext(BaseModel):
|
||||
|
|
@ -757,6 +757,22 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema):
|
|||
values.filters = n_filters
|
||||
return values
|
||||
|
||||
@field_validator("filters", mode="after")
|
||||
def merge_identical_filters(cls, values):
|
||||
i = 0
|
||||
while i < len(values):
|
||||
j = i + 1
|
||||
while j < len(values):
|
||||
if values[i].type == values[j].type:
|
||||
values[i].value += values[j].value
|
||||
del values[j]
|
||||
else:
|
||||
j += 1
|
||||
values[i] = remove_duplicate_values(values[i])
|
||||
i += 1
|
||||
|
||||
return values
|
||||
|
||||
|
||||
class ErrorStatus(str, Enum):
|
||||
all = 'all'
|
||||
|
|
@ -795,9 +811,7 @@ class PathAnalysisSubFilterSchema(BaseModel):
|
|||
|
||||
@model_validator(mode="before")
|
||||
def __force_is_event(cls, values):
|
||||
for v in values.get("filters", []):
|
||||
if v.get("isEvent") is None:
|
||||
v["isEvent"] = True
|
||||
values["isEvent"] = True
|
||||
return values
|
||||
|
||||
|
||||
|
|
@ -831,12 +845,15 @@ class PathAnalysisSchema(_TimedSchema, _PaginatedSchema):
|
|||
filters: List[ProductAnalyticsFilter] = Field(default=[])
|
||||
type: Optional[str] = Field(default=None)
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __force_is_event(cls, values):
|
||||
for v in values.get("filters"):
|
||||
if v.get("isEvent") is None:
|
||||
v["isEvent"] = ProductAnalyticsSelectedEventType.has_value(v["type"])
|
||||
return values
|
||||
_transform_filters = field_validator('filters', mode='before') \
|
||||
(force_is_event(events_enum=[ProductAnalyticsSelectedEventType]))
|
||||
|
||||
# @model_validator(mode="before")
|
||||
# def __force_is_event_for_filters(cls, values):
|
||||
# for v in values.get("filters"):
|
||||
# if v.get("isEvent") is None:
|
||||
# v["isEvent"] = ProductAnalyticsSelectedEventType.has_value(v["type"])
|
||||
# return values
|
||||
|
||||
|
||||
class MobileSignPayloadSchema(BaseModel):
|
||||
|
|
@ -987,6 +1004,26 @@ class CardSessionsSchema(_TimedSchema, _PaginatedSchema):
|
|||
# Used mainly for PathAnalysis, and could be used by other cards
|
||||
hide_excess: Optional[bool] = Field(default=False, description="Hide extra values")
|
||||
|
||||
_transform_filters = field_validator('filters', mode='before') \
|
||||
(force_is_event(events_enum=[EventType, PerformanceEventType]))
|
||||
|
||||
# @model_validator(mode="before")
|
||||
# def __force_is_event(cls, values):
|
||||
# for v in values.get("filters"):
|
||||
# if v.get("isEvent") is None:
|
||||
# v["isEvent"] = ProductAnalyticsSelectedEventType.has_value(v["type"])
|
||||
# return values
|
||||
|
||||
@model_validator(mode="before")
|
||||
def remove_wrong_filter_values(cls, values):
|
||||
for f in values.get("filters", []):
|
||||
vals = []
|
||||
for v in f.get("value", []):
|
||||
if v is not None:
|
||||
vals.append(v)
|
||||
f["value"] = vals
|
||||
return values
|
||||
|
||||
@model_validator(mode="before")
|
||||
def __force_is_event(cls, values):
|
||||
for v in values.get("filters"):
|
||||
|
|
|
|||
41
api/schemas/transformers_validators.py
Normal file
41
api/schemas/transformers_validators.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
from .overrides import Enum
|
||||
|
||||
from typing import Union, Any, Type
|
||||
|
||||
|
||||
def transform_email(email: str) -> str:
|
||||
return email.lower().strip() if isinstance(email, str) else email
|
||||
|
||||
|
||||
def remove_whitespace(value: str) -> str:
|
||||
return " ".join(value.split()) if isinstance(value, str) else value
|
||||
|
||||
|
||||
def remove_duplicate_values(value: list) -> list:
|
||||
if value is not None and isinstance(value, list):
|
||||
if len(value) > 0 \
|
||||
and (isinstance(value[0], int) or isinstance(value[0], dict)):
|
||||
return value
|
||||
value = list(set(value))
|
||||
return value
|
||||
|
||||
|
||||
def single_to_list(value: Union[list, Any]) -> list:
|
||||
if value is not None and not isinstance(value, list):
|
||||
value = [value]
|
||||
return value
|
||||
|
||||
|
||||
def force_is_event(events_enum: list[Type[Enum]]):
|
||||
def fn(value: list):
|
||||
if value is not None and isinstance(value, list):
|
||||
for v in value:
|
||||
r = False
|
||||
for en in events_enum:
|
||||
if en.has_value(v["type"]):
|
||||
r = True
|
||||
break
|
||||
v["isEvent"] = r
|
||||
return value
|
||||
|
||||
return fn
|
||||
3
ee/api/.gitignore
vendored
3
ee/api/.gitignore
vendored
|
|
@ -274,4 +274,5 @@ Pipfile.lock
|
|||
#exp /chalicelib/core/dashboards.py
|
||||
/schemas/overrides.py
|
||||
/schemas/schemas.py
|
||||
/chalicelib/core/authorizers.py
|
||||
/chalicelib/core/authorizers.py
|
||||
/schemas/transformers_validators.py
|
||||
|
|
|
|||
|
|
@ -15,13 +15,13 @@ fastapi = "==0.104.0"
|
|||
gunicorn = "==21.2.0"
|
||||
python-decouple = "==3.8"
|
||||
apscheduler = "==3.10.4"
|
||||
python3-saml = "==1.16.0"
|
||||
python-multipart = "==0.0.6"
|
||||
redis = "==5.0.1"
|
||||
azure-storage-blob = "==12.18.3"
|
||||
uvicorn = {extras = ["standard"], version = "==0.23.2"}
|
||||
pydantic = {extras = ["email"], version = "==2.3.0"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.6"}
|
||||
python3-saml = "==1.16.0"
|
||||
azure-storage-blob = "==12.18.3"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
|
|
|
|||
|
|
@ -233,10 +233,10 @@ def make_chart(project_id, user_id, data: schemas.CardSessionsSchema, metric: sc
|
|||
|
||||
def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
# raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False, include_data=True)
|
||||
raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if raw_metric is None:
|
||||
card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if card is None:
|
||||
return None
|
||||
metric: schemas.CardSchema = schemas.CardSchema(**raw_metric)
|
||||
metric: schemas.CardSchema = schemas.CardSchema(**card)
|
||||
metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if metric is None:
|
||||
return None
|
||||
|
|
@ -705,10 +705,10 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
|
|||
data: schemas.CardSessionsSchema
|
||||
# , range_value=None, start_date=None, end_date=None
|
||||
):
|
||||
metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
card: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if card is None:
|
||||
return None
|
||||
metric: schemas.CardSchema = schemas.CardSchema(**metric)
|
||||
metric: schemas.CardSchema = schemas.CardSchema(**card)
|
||||
metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if metric is None:
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -1,63 +1,57 @@
|
|||
announcement_url=https://asayer-announcements.s3.eu-central-1.amazonaws.com/
|
||||
captcha_key=
|
||||
captcha_server=
|
||||
ch_host=127.0.0.1
|
||||
## ee.openreplay
|
||||
ch_port=8141
|
||||
ch_user=""
|
||||
ch_password=password
|
||||
|
||||
ch_timeout=30
|
||||
ch_receive_timeout=10
|
||||
change_password_link=/changepassword?invitation=%s&&pass=%s
|
||||
EMAIL_FROM=Asayer-local<do-not-reply@asayer.io>
|
||||
EMAIL_HOST=email-smtp.eu-west-1.amazonaws.com
|
||||
EMAIL_PASSWORD=password
|
||||
EMAIL_FROM=Openreplay-dev<do-not-reply@openreplay.com>
|
||||
EMAIL_HOST=
|
||||
EMAIL_PASSWORD=
|
||||
EMAIL_PORT=587
|
||||
EMAIL_SSL_CERT=
|
||||
EMAIL_SSL_KEY=
|
||||
EMAIL_USER=
|
||||
EMAIL_USE_SSL=false
|
||||
EMAIL_USE_TLS=true
|
||||
EMAIL_USER=user
|
||||
S3_HOST=
|
||||
S3_KEY=
|
||||
S3_SECRET=
|
||||
SITE_URL=http://127.0.0.1:3333
|
||||
announcement_url=
|
||||
captcha_key=
|
||||
captcha_server=
|
||||
change_password_link=/changepassword?invitation=%s&&pass=%s
|
||||
invitation_link=/users/invitation?token=%s
|
||||
IOS_BUCKET=asayer-mobile-mob-staging
|
||||
IOS_MIDDLEWARE=https://staging-str.asayer.io
|
||||
js_cache_bucket=asayer-sessions-assets-staging
|
||||
js_cache_bucket=
|
||||
jwt_algorithm=HS512
|
||||
JWT_EXPIRATION=10
|
||||
JWT_EXPIRATION=6000
|
||||
JWT_REFRESH_EXPIRATION=60
|
||||
JWT_ISSUER=openreplay-local-staging
|
||||
jwt_secret=secret
|
||||
JWT_REFRESH_SECRET=another_secret
|
||||
LICENSE_KEY=KEY
|
||||
# ee.openreplay
|
||||
JWT_ISSUER=openReplay-dev
|
||||
jwt_secret=SECRET
|
||||
JWT_REFRESH_SECRET=SECRET2
|
||||
ASSIST_URL=http://127.0.0.1:9001/assist/%s
|
||||
assist=/sockets-live
|
||||
assistList=/sockets-list
|
||||
|
||||
# EE
|
||||
pg_dbname=postgres
|
||||
pg_host=127.0.0.1
|
||||
pg_password=password
|
||||
pg_port=5421
|
||||
pg_port=5420
|
||||
pg_user=postgres
|
||||
|
||||
PG_TIMEOUT=20
|
||||
PG_MINCONN=5
|
||||
PG_MAXCONN=10
|
||||
PG_MINCONN=2
|
||||
PG_MAXCONN=5
|
||||
PG_RETRY_MAX=50
|
||||
PG_RETRY_INTERVAL=2
|
||||
ASSIST_RECORDS_BUCKET=asayer-mobs-staging
|
||||
sessions_bucket=asayer-mobs-staging
|
||||
sessions_region=eu-central-1
|
||||
SITE_URL=http://127.0.0.1:3333
|
||||
sourcemaps_bucket=asayer-sourcemaps-staging
|
||||
sourcemaps_reader=http://127.0.0.1:3000/
|
||||
PG_POOL=true
|
||||
sessions_bucket=mobs
|
||||
sessions_region=us-east-1
|
||||
sourcemaps_bucket=
|
||||
sourcemaps_reader=http://127.0.0.1:3000/sourcemaps
|
||||
LOGLEVEL=INFO
|
||||
FS_DIR=
|
||||
|
||||
idp_entityId=
|
||||
idp_sso_url=
|
||||
idp_sls_url=''
|
||||
idp_name=okta
|
||||
idp_x509cert=
|
||||
ASSIST_URL=http://127.0.0.1:9001/assist/%s
|
||||
assist=http://127.0.0.1:9001/assist/%s/sockets-live
|
||||
assistList=/sockets-list
|
||||
FS_DIR=/tmp
|
||||
PG_POOL=true
|
||||
EXP_SESSIONS_SEARCH=false
|
||||
EXP_AUTOCOMPLETE=true
|
||||
EXP_ERRORS_SEARCH=false
|
||||
|
|
@ -75,13 +69,16 @@ SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs
|
|||
SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe
|
||||
DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs
|
||||
PRESIGNED_URL_EXPIRATION=3600
|
||||
S3_HOST=https://ee.openreplay.com:443
|
||||
S3_KEY=keys
|
||||
S3_SECRET=secret
|
||||
AWS_DEFAULT_REGION=us-east-1
|
||||
ASSIST_JWT_EXPIRATION=14400
|
||||
ASSIST_JWT_SECRET=secret
|
||||
REDIS_STRING=redis://127.0.0.1:6379
|
||||
KAFKA_SERVERS=127.0.0.1:9092
|
||||
KAFKA_USE_SSL=false
|
||||
LOCAL_DEV=true
|
||||
ENABLE_SSO=false
|
||||
TZ=UTC
|
||||
docs_url=/docs
|
||||
root_path=''
|
||||
docs_url=/docs
|
||||
IOS_BUCKET=mobs
|
||||
IOS_VIDEO_BUCKET=mobs
|
||||
Loading…
Add table
Reference in a new issue