feat(api): dashboard support pagination for sessions drill down

This commit is contained in:
Taha Yassine Kraiem 2022-04-15 13:45:29 +02:00
parent a2d17d930d
commit 71623cf992
8 changed files with 26 additions and 30 deletions

View file

@ -97,6 +97,8 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessi
for s in metric.series:
s.filter.startDate = data.startTimestamp
s.filter.endDate = data.endTimestamp
s.filter.limit = data.limit
s.filter.page = data.page
results.append({"seriesId": s.series_id, "seriesName": s.name,
**sessions.search2_pg(data=s.filter, project_id=project_id, user_id=user_id)})

View file

@ -261,7 +261,6 @@ def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None
}}
@dev.timed
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
data.events = filter_stages(data.events)
data.events = __fix_stages(data.events)
@ -313,7 +312,6 @@ def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
return f
@dev.timed
def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
start_date=None, end_date=None):
if len(data.events) == 0:

View file

@ -3,7 +3,6 @@ from chalicelib.utils import helper, pg_client
from chalicelib.utils import dev
@dev.timed
def get_by_url(project_id, data):
args = {"startDate": data.get('startDate', TimeUTC.now(delta_days=-30)),
"endDate": data.get('endDate', TimeUTC.now()),

View file

@ -28,7 +28,6 @@ JOURNEY_TYPES = {
}
@dev.timed
def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args):
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
@ -181,7 +180,6 @@ def __complete_acquisition(rows, start_date, end_date=None):
return rows
@dev.timed
def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[],
**args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
@ -229,7 +227,6 @@ def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endT
}
@dev.timed
def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -277,7 +274,6 @@ def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en
}
@dev.timed
def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -367,7 +363,7 @@ def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en
}
@dev.timed
def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -460,7 +456,7 @@ def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70),
}
@dev.timed
def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -525,7 +521,7 @@ def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_da
return popularity
@dev.timed
def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -595,7 +591,7 @@ def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), end
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
@ -655,7 +651,7 @@ def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
@ -720,7 +716,7 @@ def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_da
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -757,7 +753,7 @@ def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), en
return rows
@dev.timed
def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
@ -799,7 +795,7 @@ def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTime
return row_users
@dev.timed
def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
@ -824,7 +820,7 @@ def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimes
return helper.dict_to_camel_case(row_users)
@dev.timed
def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
@ -889,7 +885,7 @@ def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTi
}
@dev.timed
def search(text, feature_type, project_id, platform=None):
if not feature_type:
resource_type = "ALL"

View file

@ -273,7 +273,6 @@ def add_edit_delete(tenant_id, project_id, new_metas):
return {"data": get(project_id)}
@dev.timed
def get_remaining_metadata_with_count(tenant_id):
all_projects = projects.get_projects(tenant_id=tenant_id)
results = []

View file

@ -41,7 +41,6 @@ def __create(tenant_id, name):
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
@dev.timed
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False, version=False,
last_tracker_version=None):
with pg_client.PostgresClient() as cur:

View file

@ -24,7 +24,7 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
@dev.timed
def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
"""
Add minimal timestamp
@ -293,7 +293,7 @@ def pearson_corr(x: list, y: list):
return r, confidence, False
@dev.timed
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage):
"""
Returns two lists with binary values 0/1:
@ -363,7 +363,7 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_
return transitions, errors, all_errors, n_sess_affected
@dev.timed
def get_affected_users_for_all_issues(rows, first_stage, last_stage):
"""
@ -415,7 +415,7 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage):
return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts
@dev.timed
def count_sessions(rows, n_stages):
session_counts = {i: set() for i in range(1, n_stages + 1)}
for ind, row in enumerate(rows):
@ -467,7 +467,7 @@ def get_stages(stages, rows):
return stages_list
@dev.timed
def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False):
"""
@ -544,7 +544,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
return n_critical_issues, issues_dict, total_drop_due_to_issues
@dev.timed
def get_top_insights(filter_d, project_id):
output = []
stages = filter_d.get("events", [])
@ -582,7 +582,7 @@ def get_top_insights(filter_d, project_id):
return stages_list, total_drop_due_to_issues
@dev.timed
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None):
output = dict({'critical_issues_count': 0})
stages = filter_d.get("events", [])

View file

@ -613,7 +613,12 @@ class SessionSearchFilterSchema(__MixedSearchFilter):
return values
class SessionsSearchPayloadSchema(BaseModel):
class _PaginatedSchema(BaseModel):
limit: int = Field(default=200, gt=0, le=200)
page: int = Field(default=1, gt=0)
class SessionsSearchPayloadSchema(_PaginatedSchema):
events: List[_SessionSearchEventSchema] = Field([])
filters: List[SessionSearchFilterSchema] = Field([])
startDate: int = Field(None)
@ -622,8 +627,6 @@ class SessionsSearchPayloadSchema(BaseModel):
order: Literal["asc", "desc"] = Field(default="desc")
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then)
group_by_user: bool = Field(default=False)
limit: int = Field(default=200, gt=0, le=200)
page: int = Field(default=1, gt=0)
bookmarked: bool = Field(default=False)
class Config:
@ -803,7 +806,7 @@ class TimeseriesMetricOfType(str, Enum):
session_count = "sessionCount"
class CustomMetricSessionsPayloadSchema(FlatSessionsSearch):
class CustomMetricSessionsPayloadSchema(FlatSessionsSearch, _PaginatedSchema):
startTimestamp: int = Field(TimeUTC.now(-7))
endTimestamp: int = Field(TimeUTC.now())
series: Optional[List[CustomMetricCreateSeriesSchema]] = Field(default=None)