Merge remote-tracking branch 'origin/api-v1.8.2' into dev

This commit is contained in:
Taha Yassine Kraiem 2022-11-25 19:30:36 +01:00
commit f99077cb45
7 changed files with 19 additions and 11 deletions

View file

@ -266,7 +266,8 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
"metric_type": data.metric_type, "metric_of": data.metric_of,
"metric_value": data.metric_value, "metric_format": data.metric_format}
"metric_value": data.metric_value, "metric_format": data.metric_format,
"config": json.dumps(data.config.dict())}
for i, s in enumerate(data.series):
prefix = "u_"
if s.index is None:
@ -316,7 +317,8 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
view_type= %(view_type)s, metric_type= %(metric_type)s,
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
metric_format= %(metric_format)s,
edited_at = timezone('utc'::text, now())
edited_at = timezone('utc'::text, now()),
default_config = %(config)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)

View file

@ -111,6 +111,8 @@ def get_dashboard(project_id, user_id, dashboard_id):
for w in row["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
w["config"]["col"] = w["default_config"]["col"]
w["config"]["row"] = w["default_config"]["row"]
for s in w["series"]:
s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
return helper.dict_to_camel_case(row)

View file

@ -534,8 +534,8 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
if is_sign:
n_critical_issues += n_issues_dict[issue_id]
# To limit the number of returned issues to the frontend
issues_dict["significant"] = issues_dict["significant"][:50]
issues_dict["insignificant"] = issues_dict["insignificant"][:50]
issues_dict["significant"] = issues_dict["significant"][:20]
issues_dict["insignificant"] = issues_dict["insignificant"][:20]
return n_critical_issues, issues_dict, total_drop_due_to_issues

View file

@ -874,14 +874,14 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema):
class CustomMetricsConfigSchema(BaseModel):
col: Optional[int] = Field(default=2)
row: Optional[int] = Field(default=2)
col: Optional[int] = Field(...)
row: Optional[int] = Field(...)
position: Optional[int] = Field(default=0)
class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema):
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
config: CustomMetricsConfigSchema = Field(default=CustomMetricsConfigSchema())
config: CustomMetricsConfigSchema = Field(...)
@root_validator(pre=True)
def transform_series(cls, values):

View file

@ -279,7 +279,8 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
"metric_type": data.metric_type, "metric_of": data.metric_of,
"metric_value": data.metric_value, "metric_format": data.metric_format}
"metric_value": data.metric_value, "metric_format": data.metric_format,
"config": json.dumps(data.config.dict())}
for i, s in enumerate(data.series):
prefix = "u_"
if s.index is None:
@ -329,7 +330,8 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
view_type= %(view_type)s, metric_type= %(metric_type)s,
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
metric_format= %(metric_format)s,
edited_at = timezone('utc'::text, now())
edited_at = timezone('utc'::text, now()),
default_config = %(config)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)

View file

@ -118,6 +118,8 @@ def get_dashboard(project_id, user_id, dashboard_id):
for w in row["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
w["config"]["col"] = w["default_config"]["col"]
w["config"]["row"] = w["default_config"]["row"]
for s in w["series"]:
s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
return helper.dict_to_camel_case(row)

View file

@ -541,8 +541,8 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
if is_sign:
n_critical_issues += n_issues_dict[issue_id]
# To limit the number of returned issues to the frontend
issues_dict["significant"] = issues_dict["significant"][:50]
issues_dict["insignificant"] = issues_dict["insignificant"][:50]
issues_dict["significant"] = issues_dict["significant"][:20]
issues_dict["insignificant"] = issues_dict["insignificant"][:20]
return n_critical_issues, issues_dict, total_drop_due_to_issues