feat(api): fixed JIRA error handling
feat(api): fixed dashboard split old metrics
This commit is contained in:
parent
1743665769
commit
7d18c093eb
3 changed files with 33 additions and 31 deletions
|
|
@ -2261,14 +2261,15 @@ def get_application_activity_avg_image_load_time(project_id, startTimestamp=Time
|
|||
endTimestamp=TimeUTC.now(), **args):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
results = helper.dict_to_camel_case(row)
|
||||
results = row
|
||||
results["chart"] = get_performance_avg_image_load_time(project_id, startTimestamp, endTimestamp, **args)
|
||||
diff = endTimestamp - startTimestamp
|
||||
endTimestamp = startTimestamp
|
||||
startTimestamp = endTimestamp - diff
|
||||
row = __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
previous = helper.dict_to_camel_case(row)
|
||||
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
|
||||
results["chart"] = get_performance_avg_image_load_time(project_id, startTimestamp, endTimestamp, **args)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
|
|
@ -2297,8 +2298,7 @@ def get_performance_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(d
|
|||
AND resources.type = 'img' AND resources.duration>0
|
||||
{(f' AND ({" OR ".join(img_constraints)})') if len(img_constraints) > 0 else ""}
|
||||
)
|
||||
SELECT
|
||||
generated_timestamp AS timestamp,
|
||||
SELECT generated_timestamp AS timestamp,
|
||||
COALESCE(AVG(resources.duration),0) AS value
|
||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (
|
||||
|
|
@ -2337,14 +2337,14 @@ def get_application_activity_avg_page_load_time(project_id, startTimestamp=TimeU
|
|||
endTimestamp=TimeUTC.now(), **args):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
results = helper.dict_to_camel_case(row)
|
||||
results = row
|
||||
results["chart"] = get_performance_avg_page_load_time(project_id, startTimestamp, endTimestamp, **args)
|
||||
diff = endTimestamp - startTimestamp
|
||||
endTimestamp = startTimestamp
|
||||
startTimestamp = endTimestamp - diff
|
||||
row = __get_application_activity_avg_page_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
previous = helper.dict_to_camel_case(row)
|
||||
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
|
||||
results["chart"] = get_performance_avg_page_load_time(project_id, startTimestamp, endTimestamp, **args)
|
||||
return results
|
||||
|
||||
|
||||
|
|
@ -2369,8 +2369,7 @@ def get_performance_avg_page_load_time(project_id, startTimestamp=TimeUTC.now(de
|
|||
WHERE {" AND ".join(pg_sub_query_subset)} AND pages.load_time>0 AND pages.load_time IS NOT NULL
|
||||
{(f' AND ({" OR ".join(location_constraints)})') if len(location_constraints) > 0 else ""}
|
||||
)
|
||||
SELECT
|
||||
generated_timestamp AS timestamp,
|
||||
SELECT generated_timestamp AS timestamp,
|
||||
COALESCE(AVG(pages.load_time),0) AS value
|
||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL ( SELECT pages.load_time
|
||||
|
|
@ -2407,14 +2406,14 @@ def get_application_activity_avg_request_load_time(project_id, startTimestamp=Ti
|
|||
endTimestamp=TimeUTC.now(), **args):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
results = helper.dict_to_camel_case(row)
|
||||
results = row
|
||||
results["chart"] = get_performance_avg_request_load_time(project_id, startTimestamp, endTimestamp, **args)
|
||||
diff = endTimestamp - startTimestamp
|
||||
endTimestamp = startTimestamp
|
||||
startTimestamp = endTimestamp - diff
|
||||
row = __get_application_activity_avg_request_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
previous = helper.dict_to_camel_case(row)
|
||||
results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"])
|
||||
results["chart"] = get_performance_avg_request_load_time(project_id, startTimestamp, endTimestamp, **args)
|
||||
return results
|
||||
|
||||
|
||||
|
|
@ -2447,8 +2446,7 @@ def get_performance_avg_request_load_time(project_id, startTimestamp=TimeUTC.now
|
|||
AND resources.type = 'fetch' AND resources.duration>0
|
||||
{(f' AND ({" OR ".join(request_constraints)})') if len(request_constraints) > 0 else ""}
|
||||
)
|
||||
SELECT
|
||||
generated_timestamp AS timestamp,
|
||||
SELECT generated_timestamp AS timestamp,
|
||||
COALESCE(AVG(resources.duration),0) AS value
|
||||
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_projects()
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
projects_dict_list = []
|
||||
for project in projects:
|
||||
|
|
@ -50,7 +50,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_project()
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
return self.__parser_project_info(project)
|
||||
|
||||
|
|
@ -66,7 +66,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issues(sql, offset)
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
||||
issue_dict_list = []
|
||||
|
|
@ -86,7 +86,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issue(issue_id)
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
return self.__parser_issue_info(issue)
|
||||
|
||||
|
|
@ -106,8 +106,8 @@ class JiraManager:
|
|||
if self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issue_v3(issue_id)
|
||||
print(f"=>Error {e}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
print(f"=>Exception {e}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: get issue error")
|
||||
return self.__parser_issue_info(issue.json())
|
||||
|
||||
def create_issue(self, issue_dict):
|
||||
|
|
@ -120,7 +120,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.create_issue(issue_dict)
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
||||
def close_issue(self, issue):
|
||||
|
|
@ -132,7 +132,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.close_issue(issue)
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
||||
def assign_issue(self, issue_id, account_id) -> bool:
|
||||
|
|
@ -143,7 +143,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.assign_issue(issue_id, account_id)
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
||||
def add_comment(self, issue_id: str, comment: str):
|
||||
|
|
@ -154,7 +154,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.add_comment(issue_id, comment)
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
return self.__parser_comment_info(comment)
|
||||
|
||||
|
|
@ -191,8 +191,8 @@ class JiraManager:
|
|||
if self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.add_comment_v3(issue_id, comment)
|
||||
print(f"=>Error {e}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
print(f"=>Exception {e}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: comment error")
|
||||
return self.__parser_comment_info(comment_response.json())
|
||||
|
||||
def get_comments(self, issueKey):
|
||||
|
|
@ -207,7 +207,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_comments(issueKey)
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
||||
def get_meta(self):
|
||||
|
|
@ -224,7 +224,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_assignable_users()
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
if e.status_code == 401:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="JIRA: 401 Unauthorized")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
|
|
@ -247,7 +247,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issue_types()
|
||||
print(f"=>Error {e.text}")
|
||||
print(f"=>Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
types_dict = []
|
||||
for type in types:
|
||||
|
|
|
|||
|
|
@ -325,7 +325,7 @@ def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPa
|
|||
@app.post('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
|
||||
@app.get('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
|
||||
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
|
||||
return {"data": [
|
||||
results = [
|
||||
{"key": "count_sessions",
|
||||
"data": dashboard.get_processed_sessions(project_id=projectId, **data.dict())},
|
||||
*helper.explode_widget(data={**dashboard.get_application_activity(project_id=projectId, **data.dict()),
|
||||
|
|
@ -343,13 +343,15 @@ def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body
|
|||
{"key": "avg_used_js_heap_size", "data": dashboard.get_memory_consumption(project_id=projectId, **data.dict())},
|
||||
{"key": "avg_cpu", "data": dashboard.get_avg_cpu(project_id=projectId, **data.dict())},
|
||||
{"key": schemas.TemplateKeys.avg_fps, "data": dashboard.get_avg_fps(project_id=projectId, **data.dict())}
|
||||
]}
|
||||
]
|
||||
results = sorted(results, key=lambda r: r["key"])
|
||||
return {"data": results}
|
||||
|
||||
|
||||
@app.post('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
|
||||
@app.get('/{projectId}/dashboard/overview2', tags=["dashboard", "metrics"])
|
||||
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
|
||||
return {"data": [
|
||||
results = [
|
||||
{"key": schemas.TemplateKeys.count_sessions,
|
||||
"data": dashboard.get_processed_sessions(project_id=projectId, **data.dict())},
|
||||
{"key": schemas.TemplateKeys.avg_image_load_time,
|
||||
|
|
@ -388,4 +390,6 @@ def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body
|
|||
"data": dashboard.get_memory_consumption(project_id=projectId, **data.dict())},
|
||||
{"key": schemas.TemplateKeys.avg_cpu, "data": dashboard.get_avg_cpu(project_id=projectId, **data.dict())},
|
||||
{"key": schemas.TemplateKeys.avg_fps, "data": dashboard.get_avg_fps(project_id=projectId, **data.dict())}
|
||||
]}
|
||||
]
|
||||
results = sorted(results, key=lambda r: r["key"])
|
||||
return {"data": results}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue