Api v1.15.0 (#1521)

* feat(chalice): upgraded dependencies

* feat(chalice): changed path analysis schema

* feat(DB): click coordinate support

* feat(chalice): changed path analysis issues schema
feat(chalice): upgraded dependencies

* fix(chalice): fixed pydantic issue

* refactor(chalice): refresh token validator

* feat(chalice): role restrictions

* feat(chalice): EE path analysis changes

* refactor(DB): changed creation queries
refactor(DB): changed delte queries
feat(DB): support new path analysis payload

* feat(chalice): save path analysis card

* feat(chalice): restrict access

* feat(chalice): restrict access

* feat(chalice): EE save new path analysis card

* refactor(chalice): path analysis

* feat(chalice): path analysis new query

* fix(chalice): configurable CH config

* fix(chalice): assist autocomplete

* refactor(chalice): refactored permissions

* refactor(chalice): changed log level

* refactor(chalice): upgraded dependencies

* refactor(chalice): changed path analysis query

* refactor(chalice): changed path analysis query

* refactor(chalice): upgraded dependencies
refactor(alerts): upgraded dependencies
refactor(crons): upgraded dependencies

* feat(chalice): path analysis ignore start point

* feat(chalice): path analysis in progress

* refactor(chalice): path analysis changed link sort

* refactor(chalice): path analysis changed link sort

* refactor(chalice): path analysis changed link sort

* refactor(chalice): path analysis new query
refactor(chalice): authorizers

* refactor(chalice): refactored authorizer

* fix(chalice): fixed create card of PathAnalysis

* refactor(chalice): compute link-percentage for Path Analysis

* refactor(chalice): remove null starting point from Path Analysis

* feat(chalice): path analysis CH query

* refactor(chalice): changed Path Analysis links-value
fix(chalice): fixed search notes for EE

* feat(chalice): path analysis enhanced query results

* feat(chalice): include timezone in search sessions response

* refactor(chalice): refactored logs

* refactor(chalice): refactored logs
feat(chalice): get path analysis issues

* fix(chalice): fixed path analysis issues pagination

* fix(chalice): sessions-search handle null values

* feat(chalice): PathAnalysis start event support middle-event matching

* feat(chalice): PathAnalysis start event support middle-event matching

* feat(chalice): PathAnalysis support mixed events with start-point

* fix(chalice): PathAnalysis fixed eventType value when metricValue is missing

* fix(chalice): PathAnalysis fixed wrong super-class model for update card

* fix(chalice): PathAnalysis fixed search issues
refactor(chalice): upgraded dependencies
This commit is contained in:
Kraiem Taha Yassine 2023-10-17 16:31:04 +02:00 committed by GitHub
parent 69ee313ce6
commit 5aabf1873b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 407 additions and 64 deletions

View file

@ -5,7 +5,7 @@ name = "pypi"
[packages]
requests = "==2.31.0"
boto3 = "==1.28.63"
boto3 = "==1.28.64"
pyjwt = "==2.8.0"
psycopg2-binary = "==2.9.9"
elasticsearch = "==8.10.1"

View file

@ -101,7 +101,7 @@ def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, inclu
def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.series) == 0:
data.series.append(
schemas.CardPathAnalysisSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp))
schemas.CardPathAnalysisSeriesSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp))
elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema):
data.series[0].filter = schemas.PathAnalysisSchema()
@ -294,33 +294,28 @@ def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel)
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.series) == 0:
return {"data": {}}
card_table = schemas.CardTable(
search_data = schemas.SessionsSearchPayloadSchema(
startTimestamp=data.startTimestamp,
endTimestamp=data.endTimestamp,
metricType=schemas.MetricType.table,
metricOf=schemas.MetricOfTable.issues,
viewType=schemas.MetricTableViewType.table,
series=data.model_dump()["series"],
limit=data.limit,
page=data.page
page=data.page,
filters=data.series[0].filter.model_dump(by_alias=True)["filters"]
)
for s in data.start_point:
if data.start_type == "end":
card_table.series[0].filter.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=s.operator,
value=s.value))
search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=s.operator,
value=s.value))
else:
card_table.series[0].filter.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type,
operator=s.operator,
value=s.value))
search_data.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type,
operator=s.operator,
value=s.value))
for s in data.excludes:
card_table.series[0].filter.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=schemas.SearchEventOperator._not_on,
value=s.value))
# result = __get_table_of_issues(project_id=project_id, user_id=user_id, data=card_table)
result = sessions.search_table_of_individual_issues(project_id=project_id,
metric_value=card_table.metric_value,
data=card_table)
search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=schemas.SearchEventOperator._not_on,
value=s.value))
result = sessions.search_table_of_individual_issues(project_id=project_id, data=search_data)
return result

View file

@ -68,7 +68,7 @@ JOURNEY_TYPES = {
# compute avg_time_from_previous at the same level as sessions_count
# sort by top 5 according to sessions_count at the CTE level
# final part project data without grouping
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
def path_analysis_deprecated(project_id: int, data: schemas.CardPathAnalysis):
sub_events = []
start_points_from = "pre_ranked_events"
start_points_conditions = []
@ -388,6 +388,344 @@ WITH sub_sessions AS (SELECT session_id
return __transform_journey(rows=rows, reverse_path=reverse)
# query: Q5, the result is correct,
# startPoints are computed before ranked_events to reduce the number of window functions over rows
# replaced time_to_target by time_from_previous
# compute avg_time_from_previous at the same level as sessions_count
# sort by top 5 according to sessions_count at the CTE level
# final part project data without grouping
# if start-point is selected, the selected event is ranked n°1
def path_analysis(project_id: int, data: schemas.CardPathAnalysis):
sub_events = []
start_points_from = "pre_ranked_events"
sub_sessions_extra_projection = ""
start_points_conditions = []
sessions_conditions = ["start_ts>=%(startTimestamp)s", "start_ts<%(endTimestamp)s",
"project_id=%(project_id)s", "events_count > 1", "duration>0"]
if len(data.metric_value) == 0:
data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.location)
sub_events.append({"table": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["table"],
"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"],
"eventType": schemas.ProductAnalyticsSelectedEventType.location.value})
else:
for v in data.metric_value:
if JOURNEY_TYPES.get(v):
sub_events.append({"table": JOURNEY_TYPES[v]["table"],
"column": JOURNEY_TYPES[v]["column"],
"eventType": v})
extra_values = {}
start_join = []
reverse = data.start_type == "end"
for i, sf in enumerate(data.start_point):
f_k = f"start_point_{i}"
op = sh.get_sql_operator(sf.operator)
is_not = sh.is_negation_operator(sf.operator)
extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)}
start_points_conditions.append(f"(event_type='{sf.type}' AND " +
sh.multi_conditions(f'e_value {op} %({f_k})s', sf.value, is_not=is_not,
value_key=f_k)
+ ")")
main_column = JOURNEY_TYPES[sf.type]["column"]
sessions_conditions.append(sh.multi_conditions(f'{main_column} {op} %({f_k})s', sf.value, is_not=is_not,
value_key=f_k))
sessions_conditions += ["timestamp>=%(startTimestamp)s", "timestamp<%(endTimestamp)s"]
start_join.append(f"INNER JOIN {JOURNEY_TYPES[sf.type]['table']} USING (session_id)")
exclusions = {}
for i, ef in enumerate(data.excludes):
if ef.type in data.metric_value:
f_k = f"exclude_{i}"
extra_values = {**extra_values, **sh.multi_values(ef.value, value_key=f_k)}
exclusions[ef.type] = [
sh.multi_conditions(f'{JOURNEY_TYPES[ef.type]["column"]} != %({f_k})s', ef.value, is_not=True,
value_key=f_k)]
meta_keys = None
for i, f in enumerate(data.series[0].filter.filters):
op = sh.get_sql_operator(f.operator)
is_any = sh.isAny_opreator(f.operator)
is_not = sh.is_negation_operator(f.operator)
is_undefined = sh.isUndefined_operator(f.operator)
f_k = f"f_value_{i}"
extra_values = {**extra_values, **sh.multi_values(f.value, value_key=f_k)}
if not is_any and len(f.value) == 0:
continue
# ---- meta-filters
if f.type == schemas.FilterType.user_browser:
if is_any:
sessions_conditions.append('user_browser IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_os]:
if is_any:
sessions_conditions.append('user_os IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_device]:
if is_any:
sessions_conditions.append('user_device IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_country]:
if is_any:
sessions_conditions.append('user_country IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.user_city:
if is_any:
sessions_conditions.append('user_city IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.user_state:
if is_any:
sessions_conditions.append('user_state IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.utm_source]:
if is_any:
sessions_conditions.append('utm_source IS NOT NULL')
elif is_undefined:
sessions_conditions.append('utm_source IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.utm_medium]:
if is_any:
sessions_conditions.append('utm_medium IS NOT NULL')
elif is_undefined:
sessions_conditions.append('utm_medium IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.utm_campaign]:
if is_any:
sessions_conditions.append('utm_campaign IS NOT NULL')
elif is_undefined:
sessions_conditions.append('utm_campaign IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f'utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.duration:
if len(f.value) > 0 and f.value[0] is not None:
sessions_conditions.append("duration >= %(minDuration)s")
extra_values["minDuration"] = f.value[0]
if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0:
sessions_conditions.append("duration <= %(maxDuration)s")
extra_values["maxDuration"] = f.value[1]
elif f.type == schemas.FilterType.referrer:
# extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
if is_any:
sessions_conditions.append('base_referrer IS NOT NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"base_referrer {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.metadata:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
if f.source in meta_keys.keys():
if is_any:
sessions_conditions.append(f"{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL")
elif is_undefined:
sessions_conditions.append(f"{metadata.index_to_colname(meta_keys[f.source])} IS NULL")
else:
sessions_conditions.append(
sh.multi_conditions(
f"{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
f.value, is_not=is_not, value_key=f_k))
elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
if is_any:
sessions_conditions.append('user_id IS NOT NULL')
elif is_undefined:
sessions_conditions.append('user_id IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
if is_any:
sessions_conditions.append('user_anonymous_id IS NOT NULL')
elif is_undefined:
sessions_conditions.append('user_anonymous_id IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
value_key=f_k))
elif f.type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
if is_any:
sessions_conditions.append('rev_id IS NOT NULL')
elif is_undefined:
sessions_conditions.append('rev_id IS NULL')
else:
sessions_conditions.append(
sh.multi_conditions(f"rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
elif f.type == schemas.FilterType.platform:
# op = __ sh.get_sql_operator(f.operator)
sessions_conditions.append(
sh.multi_conditions(f"user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.issue:
if is_any:
sessions_conditions.append("array_length(issue_types, 1) > 0")
else:
sessions_conditions.append(
sh.multi_conditions(f"%({f_k})s {op} ANY (issue_types)", f.value, is_not=is_not,
value_key=f_k))
elif f.type == schemas.FilterType.events_count:
sessions_conditions.append(
sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
events_subquery = []
for t in sub_events:
sub_events_conditions = ["e.timestamp >= %(startTimestamp)s",
"e.timestamp < %(endTimestamp)s"] + exclusions.get(t["eventType"], [])
if len(start_points_conditions) > 0:
sub_events_conditions.append("e.timestamp >= sub_sessions.start_event_timestamp")
events_subquery.append(f"""\
SELECT session_id, {t["column"]} AS e_value, timestamp, '{t["eventType"]}' AS event_type
FROM {t["table"]} AS e INNER JOIN sub_sessions USING (session_id)
WHERE {" AND ".join(sub_events_conditions)}""")
events_subquery = "\n UNION ALL \n".join(events_subquery)
if reverse:
path_direction = "DESC"
else:
path_direction = ""
if len(start_points_conditions) == 0:
start_points_from = """(SELECT event_type, e_value
FROM pre_ranked_events
WHERE event_number_in_session = 1
GROUP BY event_type, e_value
ORDER BY count(1) DESC
LIMIT 1) AS top_start_events
INNER JOIN pre_ranked_events
USING (event_type, e_value)"""
else:
sub_sessions_extra_projection = ", MIN(timestamp) AS start_event_timestamp"
start_points_conditions = ["(" + " OR ".join(start_points_conditions) + ")"]
start_points_conditions.append("event_number_in_session = 1")
steps_query = ["""n1 AS (SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
AVG(time_from_previous) AS avg_time_from_previous,
COUNT(1) AS sessions_count
FROM ranked_events INNER JOIN start_points USING (session_id)
WHERE event_number_in_session = 1
AND next_value IS NOT NULL
GROUP BY event_number_in_session, event_type, e_value, next_type, next_value
ORDER BY sessions_count DESC
LIMIT %(eventThresholdNumberInGroup)s)"""]
projection_query = ["""(SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count,
avg_time_from_previous
FROM n1)"""]
for i in range(2, data.density):
steps_query.append(f"""n{i} AS (SELECT *
FROM (SELECT re.event_number_in_session,
re.event_type,
re.e_value,
re.next_type,
re.next_value,
AVG(re.time_from_previous) AS avg_time_from_previous,
COUNT(1) AS sessions_count
FROM ranked_events AS re
INNER JOIN n{i - 1} ON (n{i - 1}.next_value = re.e_value)
WHERE re.event_number_in_session = {i}
GROUP BY re.event_number_in_session, re.event_type, re.e_value, re.next_type, re.next_value) AS sub_level
ORDER BY sessions_count DESC
LIMIT %(eventThresholdNumberInGroup)s)""")
projection_query.append(f"""(SELECT event_number_in_session,
event_type,
e_value,
next_type,
next_value,
sessions_count,
avg_time_from_previous
FROM n{i})""")
with pg_client.PostgresClient() as cur:
pg_query = f"""\
WITH sub_sessions AS (SELECT session_id {sub_sessions_extra_projection}
FROM public.sessions {" ".join(start_join)}
WHERE {" AND ".join(sessions_conditions)}
{"GROUP BY session_id" if len(start_points_conditions) > 0 else ""}),
sub_events AS ({events_subquery}),
pre_ranked_events AS (SELECT *
FROM (SELECT session_id,
event_type,
e_value,
timestamp,
row_number() OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS event_number_in_session
FROM sub_events
ORDER BY session_id) AS full_ranked_events
WHERE event_number_in_session < %(density)s),
start_points AS (SELECT session_id
FROM {start_points_from}
WHERE {" AND ".join(start_points_conditions)}),
ranked_events AS (SELECT *,
LEAD(e_value, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS next_value,
LEAD(event_type, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS next_type,
abs(LAG(timestamp, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) -
timestamp) AS time_from_previous
FROM pre_ranked_events INNER JOIN start_points USING (session_id)),
{",".join(steps_query)}
{"UNION ALL".join(projection_query)};"""
params = {"project_id": project_id, "startTimestamp": data.startTimestamp,
"endTimestamp": data.endTimestamp, "density": data.density,
"eventThresholdNumberInGroup": 4 if data.hide_excess else 8,
**extra_values}
query = cur.mogrify(pg_query, params)
_now = time()
cur.execute(query)
if time() - _now > 2:
logger.info(f">>>>>>>>>PathAnalysis long query ({int(time() - _now)}s)<<<<<<<<<")
logger.info("----------------------")
logger.info(query)
logger.info("----------------------")
rows = cur.fetchall()
return __transform_journey(rows=rows, reverse_path=reverse)
#
# def __compute_weekly_percentage(rows):
# if rows is None or len(rows) == 0:

View file

@ -344,11 +344,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de
return sessions
def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, project_id: int,
metric_value: List):
if len(metric_value) > 0:
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
operator=schemas.SearchEventOperator._is))
def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, project_id: int):
full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None)

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.63
boto3==1.28.64
pyjwt==2.8.0
psycopg2-binary==2.9.9
elasticsearch==8.10.1

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.63
boto3==1.28.64
pyjwt==2.8.0
psycopg2-binary==2.9.9
elasticsearch==8.10.1

View file

@ -790,7 +790,8 @@ class PathAnalysisSubFilterSchema(BaseModel):
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
class ProductAnalyticsFilter(BaseModel):
class _ProductAnalyticsFilter(BaseModel):
is_event: Literal[False] = False
type: FilterType
operator: Union[SearchEventOperator, ClickEventExtraOperator, MathOperator] = Field(...)
# TODO: support session metadata filters
@ -799,6 +800,21 @@ class ProductAnalyticsFilter(BaseModel):
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
class _ProductAnalyticsEventFilter(BaseModel):
is_event: Literal[True] = True
type: ProductAnalyticsSelectedEventType
operator: Union[SearchEventOperator, ClickEventExtraOperator, MathOperator] = Field(...)
# TODO: support session metadata filters
value: List[Union[IssueType, PlatformType, int, str]] = Field(...)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
# this type is created to allow mixing events&filters and specifying a discriminator for PathAnalysis series filter
ProductAnalyticsFilter = Annotated[Union[_ProductAnalyticsFilter, _ProductAnalyticsEventFilter], \
Field(discriminator='is_event')]
class PathAnalysisSchema(_TimedSchema, _PaginatedSchema):
density: int = Field(default=7)
filters: List[ProductAnalyticsFilter] = Field(default=[])
@ -1164,7 +1180,7 @@ class CardInsights(__CardSchema):
raise ValueError(f"metricType:{MetricType.insights} not supported yet.")
class CardPathAnalysisSchema(CardSessionsSchema):
class CardPathAnalysisSeriesSchema(CardSeriesSchema):
name: Optional[str] = Field(default=None)
filter: PathAnalysisSchema = Field(...)
density: int = Field(default=4, ge=2, le=10)
@ -1182,14 +1198,14 @@ class CardPathAnalysis(__CardSchema):
metric_type: Literal[MetricType.pathAnalysis]
metric_of: MetricOfPathAnalysis = Field(default=MetricOfPathAnalysis.session_count)
view_type: MetricOtherViewType = Field(...)
metric_value: List[ProductAnalyticsSelectedEventType] = Field(default=[ProductAnalyticsSelectedEventType.location])
metric_value: List[ProductAnalyticsSelectedEventType] = Field(default=[])
density: int = Field(default=4, ge=2, le=10)
start_type: Literal["start", "end"] = Field(default="start")
start_point: List[PathAnalysisSubFilterSchema] = Field(default=[])
excludes: List[PathAnalysisSubFilterSchema] = Field(default=[])
series: List[CardPathAnalysisSchema] = Field(default=[])
series: List[CardPathAnalysisSeriesSchema] = Field(default=[])
@model_validator(mode="before")
def __enforce_default(cls, values):
@ -1201,17 +1217,14 @@ class CardPathAnalysis(__CardSchema):
@model_validator(mode="after")
def __clean_start_point_and_enforce_metric_value(cls, values):
start_point = []
metric_value = []
for s in values.start_point:
if len(s.value) == 0:
continue
start_point.append(s)
metric_value.append(s.type)
values.metric_value.append(s.type)
values.start_point = start_point
if len(metric_value) > 0:
metric_value = remove_duplicate_values(metric_value)
values.metric_value = metric_value
values.metric_value = remove_duplicate_values(values.metric_value)
return values

View file

@ -6,10 +6,10 @@ name = "pypi"
[packages]
urllib3 = "==1.26.16"
requests = "==2.31.0"
boto3 = "==1.28.62"
boto3 = "==1.28.64"
pyjwt = "==2.8.0"
psycopg2-binary = "==2.9.9"
elasticsearch = "==8.10.0"
elasticsearch = "==8.10.1"
jira = "==3.5.2"
fastapi = "==0.103.2"
gunicorn = "==21.2.0"
@ -18,7 +18,7 @@ apscheduler = "==3.10.4"
python3-saml = "==1.15.0"
python-multipart = "==0.0.6"
redis = "==5.0.1"
azure-storage-blob = "==12.18.2"
azure-storage-blob = "==12.18.3"
uvicorn = {extras = ["standard"], version = "==0.23.2"}
pydantic = {extras = ["email"], version = "==2.3.0"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.6"}

View file

@ -122,7 +122,7 @@ def __get_insights_chart(project_id: int, data: schemas.CardInsights, user_id: i
def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.series) == 0:
data.series.append(
schemas.CardPathAnalysisSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp))
schemas.CardPathAnalysisSeriesSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp))
elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema):
data.series[0].filter = schemas.PathAnalysisSchema()
@ -315,28 +315,29 @@ def __get_funnel_issues(project_id: int, user_id: int, data: schemas.CardFunnel)
def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.CardPathAnalysis):
if len(data.series) == 0:
return {"data": {}}
card_table = schemas.CardTable(
search_data = schemas.SessionsSearchPayloadSchema(
startTimestamp=data.startTimestamp,
endTimestamp=data.endTimestamp,
metricType=schemas.MetricType.table,
metricOf=schemas.MetricOfTable.issues,
viewType=schemas.MetricTableViewType.table,
series=data.model_dump()["series"])
limit=data.limit,
page=data.page,
filters=data.series[0].filter.model_dump(by_alias=True)["filters"]
)
for s in data.start_point:
if data.start_type == "end":
card_table.series[0].filter.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=s.operator,
value=s.value))
search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=s.operator,
value=s.value))
else:
card_table.series[0].filter.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type,
operator=s.operator,
value=s.value))
search_data.filters.insert(0, schemas.SessionSearchEventSchema2(type=s.type,
operator=s.operator,
value=s.value))
for s in data.excludes:
card_table.series[0].filter.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=schemas.SearchEventOperator._not_on,
value=s.value))
result = __get_table_of_issues(project_id=project_id, user_id=user_id, data=card_table)
return result[0] if len(result) > 0 else {}
search_data.filters.append(schemas.SessionSearchEventSchema2(type=s.type,
operator=schemas.SearchEventOperator._not_on,
value=s.value))
result = sessions.search_table_of_individual_issues(project_id=project_id, data=search_data)
return result
def get_issues(project_id: int, user_id: int, data: schemas.CardSchema):

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.63
boto3==1.28.64
pyjwt==2.8.0
psycopg2-binary==2.9.9
elasticsearch==8.10.1
@ -17,4 +17,4 @@ apscheduler==3.10.4
clickhouse-driver[lz4]==0.2.6
python-multipart==0.0.6
azure-storage-blob==12.18.2
azure-storage-blob==12.18.3

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.63
boto3==1.28.64
pyjwt==2.8.0
psycopg2-binary==2.9.9
elasticsearch==8.10.1
@ -16,4 +16,4 @@ apscheduler==3.10.4
clickhouse-driver[lz4]==0.2.6
redis==5.0.1
azure-storage-blob==12.18.2
azure-storage-blob==12.18.3

View file

@ -1,7 +1,7 @@
# Keep this version to not have conflicts between requests and boto3
urllib3==1.26.16
requests==2.31.0
boto3==1.28.63
boto3==1.28.64
pyjwt==2.8.0
psycopg2-binary==2.9.9
elasticsearch==8.10.1
@ -25,4 +25,4 @@ python-multipart==0.0.6
redis==5.0.1
#confluent-kafka==2.1.0
azure-storage-blob==12.18.2
azure-storage-blob==12.18.3