feat(api): cleaned unused custom_metrics endpoints
feat(api): custom_metrics support list of issues feat(api): custom_metrics support table-pieChart feat(api): custom_metrics support table-table feat(api): custom_metrics handle redundant series update feat(api): custom_metrics default create-values feat(api): custom_metrics refactored schemas feat(DB): custom_metrics structure changes
This commit is contained in:
parent
f0d13fde50
commit
a140223923
8 changed files with 179 additions and 117 deletions
|
|
@ -6,22 +6,22 @@ from chalicelib.utils import helper, pg_client
|
|||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def __try_live(project_id, data: schemas.TryCustomMetricsSchema):
|
||||
def __try_live(project_id, data: schemas.CreateCustomMetricsSchema):
|
||||
results = []
|
||||
for i, s in enumerate(data.series):
|
||||
s.filter.startDate = data.startDate
|
||||
s.filter.endDate = data.endDate
|
||||
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
|
||||
view_type=data.viewType, metric_type=data.metricType,
|
||||
metric_of=data.metricOf))
|
||||
if data.viewType == schemas.MetricViewType.progress:
|
||||
view_type=data.view_type, metric_type=data.metric_type,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value))
|
||||
if data.view_type == schemas.MetricTimeseriesViewType.progress:
|
||||
r = {"count": results[-1]}
|
||||
diff = s.filter.endDate - s.filter.startDate
|
||||
s.filter.startDate = data.endDate
|
||||
s.filter.endDate = data.endDate - diff
|
||||
r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
|
||||
view_type=data.viewType, metric_type=data.metricType,
|
||||
metric_of=data.metricOf)
|
||||
view_type=data.view_type, metric_type=data.metric_type,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value)
|
||||
r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"])
|
||||
r["seriesName"] = s.name if s.name else i + 1
|
||||
r["seriesId"] = s.series_id if s.series_id else None
|
||||
|
|
@ -29,9 +29,9 @@ def __try_live(project_id, data: schemas.TryCustomMetricsSchema):
|
|||
return results
|
||||
|
||||
|
||||
def merged_live(project_id, data: schemas.TryCustomMetricsSchema):
|
||||
def merged_live(project_id, data: schemas.CreateCustomMetricsSchema):
|
||||
series_charts = __try_live(project_id=project_id, data=data)
|
||||
if data.viewType == schemas.MetricViewType.progress or data.metricType == schemas.MetricType.table:
|
||||
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
|
||||
return series_charts
|
||||
results = [{}] * len(series_charts[0])
|
||||
for i in range(len(results)):
|
||||
|
|
@ -45,9 +45,9 @@ def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPa
|
|||
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
return None
|
||||
metric: schemas.TryCustomMetricsSchema = schemas.TryCustomMetricsSchema.parse_obj({**data.dict(), **metric})
|
||||
metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj({**data.dict(), **metric})
|
||||
series_charts = __try_live(project_id=project_id, data=metric)
|
||||
if data.viewType == schemas.MetricViewType.progress:
|
||||
if metric.view_type == schemas.MetricTimeseriesViewType.progress:
|
||||
return series_charts
|
||||
results = [{}] * len(series_charts[0])
|
||||
for i in range(len(results)):
|
||||
|
|
@ -57,11 +57,12 @@ def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPa
|
|||
return results
|
||||
|
||||
|
||||
def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricRawPayloadSchema):
|
||||
def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
|
||||
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
return None
|
||||
metric: schemas.TryCustomMetricsSchema = schemas.TryCustomMetricsSchema.parse_obj({**data.dict(), **metric})
|
||||
metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj(
|
||||
{**data.dict(), **metric})
|
||||
results = []
|
||||
for s in metric.series:
|
||||
s.filter.startDate = data.startDate
|
||||
|
|
@ -84,8 +85,9 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema):
|
|||
data.series = None
|
||||
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data}
|
||||
query = cur.mogrify(f"""\
|
||||
WITH m AS (INSERT INTO metrics (project_id, user_id, name)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s)
|
||||
WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public, view_type, metric_type, metric_of, metric_value)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s)
|
||||
RETURNING *)
|
||||
INSERT
|
||||
INTO metric_series(metric_id, index, name, filter)
|
||||
|
|
@ -100,32 +102,21 @@ def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema):
|
|||
return {"data": get(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
|
||||
|
||||
|
||||
def __get_series_id(metric_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT series_id
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = %(metric_id)s
|
||||
AND metric_series.deleted_at ISNULL;""",
|
||||
{"metric_id": metric_id}
|
||||
)
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return [r["series_id"] for r in rows]
|
||||
|
||||
|
||||
def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSchema):
|
||||
series_ids = __get_series_id(metric_id)
|
||||
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
return None
|
||||
series_ids = [r["seriesId"] for r in metric["series"]]
|
||||
n_series = []
|
||||
d_series_ids = []
|
||||
u_series = []
|
||||
u_series_ids = []
|
||||
params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
|
||||
"user_id": user_id, "project_id": project_id}
|
||||
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
|
||||
"metric_type": data.metric_type, "metric_of": data.metric_of, "metric_value": data.metric_value}
|
||||
for i, s in enumerate(data.series):
|
||||
prefix = "u_"
|
||||
if s.series_id is None:
|
||||
if s.series_id is None or s.series_id not in series_ids:
|
||||
n_series.append({"i": i, "s": s})
|
||||
prefix = "n_"
|
||||
s.index = i
|
||||
|
|
@ -167,7 +158,9 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
|
|||
query = cur.mogrify(f"""\
|
||||
{"WITH " if len(sub_queries) > 0 else ""}{",".join(sub_queries)}
|
||||
UPDATE metrics
|
||||
SET name = %(name)s, is_public= %(is_public)s
|
||||
SET name = %(name)s, is_public= %(is_public)s,
|
||||
view_type= %(view_type)s, metric_type= %(metric_type)s,
|
||||
metric_of= %(metric_of)s, metric_value= %(metric_value)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
|
|
@ -226,7 +219,7 @@ def get(metric_id, project_id, user_id, flatten=True):
|
|||
cur.mogrify(
|
||||
"""SELECT *
|
||||
FROM metrics
|
||||
LEFT JOIN LATERAL (SELECT jsonb_agg(metric_series.* ORDER BY index) AS series
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = metrics.metric_id
|
||||
AND metric_series.deleted_at ISNULL
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from typing import List
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, events_ios, \
|
||||
sessions_mobs, issues, projects, errors, resources, assist, performance_event
|
||||
|
|
@ -265,12 +267,16 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
|
|||
|
||||
|
||||
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
|
||||
view_type: schemas.MetricViewType, metric_type: schemas.MetricType, metric_of: schemas.TableMetricOfType):
|
||||
view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType,
|
||||
metric_of: schemas.TableMetricOfType, metric_value: List):
|
||||
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate,
|
||||
density=density, factor=1, decimal=True))
|
||||
extra_event = None
|
||||
if metric_of == schemas.TableMetricOfType.visited_url:
|
||||
extra_event = "events.pages"
|
||||
elif metric_of == schemas.TableMetricOfType.issues and len(metric_value) > 0:
|
||||
data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue,
|
||||
operator=schemas.SearchEventOperator._is))
|
||||
full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False,
|
||||
favorite_only=False, issue=None, project_id=project_id,
|
||||
user_id=None, extra_event=extra_event)
|
||||
|
|
@ -278,7 +284,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
sessions = []
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if metric_type == schemas.MetricType.timeseries:
|
||||
if view_type == schemas.MetricViewType.line_chart:
|
||||
if view_type == schemas.MetricTimeseriesViewType.line_chart:
|
||||
main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts
|
||||
{query_part})
|
||||
SELECT generated_timestamp AS timestamp,
|
||||
|
|
@ -296,9 +302,9 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
cur.execute(main_query)
|
||||
# print("--------------------")
|
||||
if view_type == schemas.MetricViewType.line_chart:
|
||||
cur.execute(main_query)
|
||||
if view_type == schemas.MetricTimeseriesViewType.line_chart:
|
||||
sessions = cur.fetchall()
|
||||
else:
|
||||
sessions = cur.fetchone()["count"]
|
||||
|
|
@ -306,6 +312,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
if isinstance(metric_of, schemas.TableMetricOfType):
|
||||
main_col = "user_id"
|
||||
extra_col = ""
|
||||
extra_where = ""
|
||||
pre_query = ""
|
||||
if metric_of == schemas.TableMetricOfType.user_country:
|
||||
main_col = "user_country"
|
||||
|
|
@ -316,6 +323,13 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
elif metric_of == schemas.TableMetricOfType.issues:
|
||||
main_col = "issue"
|
||||
extra_col = f", UNNEST(s.issue_types) AS {main_col}"
|
||||
if len(metric_value) > 0:
|
||||
extra_where = []
|
||||
for i in range(len(metric_value)):
|
||||
arg_name = f"selected_issue_{i}"
|
||||
extra_where.append(f"{main_col} = %({arg_name})s")
|
||||
full_args[arg_name] = metric_value[i]
|
||||
extra_where = f"WHERE ({' OR '.join(extra_where)})"
|
||||
elif metric_of == schemas.TableMetricOfType.visited_url:
|
||||
main_col = "base_path"
|
||||
extra_col = ", base_path"
|
||||
|
|
@ -332,6 +346,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
|
|||
{query_part}
|
||||
ORDER BY s.session_id desc) AS filtred_sessions
|
||||
) AS full_sessions
|
||||
{extra_where}
|
||||
GROUP BY {main_col}
|
||||
ORDER BY session_count DESC) AS users_sessions;""",
|
||||
full_args)
|
||||
|
|
|
|||
|
|
@ -1089,27 +1089,12 @@ def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
|
|||
|
||||
@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"])
|
||||
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
|
||||
def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsSchema = Body(...),
|
||||
def try_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.merged_live
|
||||
(project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/custom_metrics/sessions', tags=["customMetrics"])
|
||||
def get_custom_metric_sessions(projectId: int, data: schemas.CustomMetricRawPayloadSchema2 = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=data.metric_id,
|
||||
data=data)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/custom_metrics/chart', tags=["customMetrics"])
|
||||
@app.put('/{projectId}/custom_metrics/chart', tags=["customMetrics"])
|
||||
def get_custom_metric_chart(projectId: int, data: schemas.CustomMetricChartPayloadSchema2 = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.make_chart(project_id=projectId, user_id=context.user_id, metric_id=data.metric_id,
|
||||
data=data)}
|
||||
|
||||
|
||||
@app.post('/{projectId}/custom_metrics', tags=["customMetrics"])
|
||||
@app.put('/{projectId}/custom_metrics', tags=["customMetrics"])
|
||||
def add_custom_metric(projectId: int, data: schemas.CreateCustomMetricsSchema = Body(...),
|
||||
|
|
@ -1124,29 +1109,40 @@ def get_custom_metrics(projectId: int, context: schemas.CurrentContext = Depends
|
|||
|
||||
@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
def get_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
|
||||
data = custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id)
|
||||
if data is None:
|
||||
return {"errors": ["custom metric not found"]}
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"])
|
||||
def get_custom_metric_sessions(projectId: int, metric_id: int, data: schemas.CustomMetricRawPayloadSchema = Body(...),
|
||||
def get_custom_metric_sessions(projectId: int, metric_id: int,
|
||||
data: schemas.CustomMetricSessionsPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
|
||||
data=data)}
|
||||
data = custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
|
||||
if data is None:
|
||||
return {"errors": ["custom metric not found"]}
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
|
||||
def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": custom_metrics.make_chart(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
|
||||
data=data)}
|
||||
data = custom_metrics.make_chart(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
|
||||
data=data)
|
||||
if data is None:
|
||||
return {"errors": ["custom metric not found"]}
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
|
||||
def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCustomMetricsSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
"data": custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)}
|
||||
data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)
|
||||
if data is None:
|
||||
return {"errors": ["custom metric not found"]}
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
|
||||
|
|
|
|||
|
|
@ -561,7 +561,7 @@ class _SessionSearchEventSchema(_SessionSearchEventRaw):
|
|||
value: Union[List[Union[_SessionSearchEventRaw, str]], str] = Field(...)
|
||||
|
||||
|
||||
class _SessionSearchFilterSchema(__MixedSearchFilter):
|
||||
class SessionSearchFilterSchema(__MixedSearchFilter):
|
||||
is_event: bool = Field(False, const=False)
|
||||
value: Union[Optional[Union[IssueType, PlatformType, int, str]],
|
||||
Optional[List[Union[IssueType, PlatformType, int, str]]]] = Field(...)
|
||||
|
|
@ -594,7 +594,7 @@ class _SessionSearchFilterSchema(__MixedSearchFilter):
|
|||
|
||||
class SessionsSearchPayloadSchema(BaseModel):
|
||||
events: List[_SessionSearchEventSchema] = Field([])
|
||||
filters: List[_SessionSearchFilterSchema] = Field([])
|
||||
filters: List[SessionSearchFilterSchema] = Field([])
|
||||
startDate: int = Field(None)
|
||||
endDate: int = Field(None)
|
||||
sort: str = Field(default="startTs")
|
||||
|
|
@ -608,7 +608,7 @@ class SessionsSearchPayloadSchema(BaseModel):
|
|||
|
||||
class FlatSessionsSearchPayloadSchema(SessionsSearchPayloadSchema):
|
||||
events: Optional[List[_SessionSearchEventSchema]] = Field([])
|
||||
filters: List[Union[_SessionSearchFilterSchema, _SessionSearchEventSchema]] = Field([])
|
||||
filters: List[Union[SessionSearchFilterSchema, _SessionSearchEventSchema]] = Field([])
|
||||
|
||||
@root_validator(pre=True)
|
||||
def flat_to_original(cls, values):
|
||||
|
|
@ -723,37 +723,21 @@ class CustomMetricCreateSeriesSchema(BaseModel):
|
|||
alias_generator = attribute_to_camel_case
|
||||
|
||||
|
||||
class CreateCustomMetricsSchema(BaseModel):
|
||||
name: str = Field(...)
|
||||
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
|
||||
is_public: Optional[bool] = Field(True)
|
||||
|
||||
class Config:
|
||||
alias_generator = attribute_to_camel_case
|
||||
|
||||
|
||||
class MetricViewType(str, Enum):
|
||||
class MetricTimeseriesViewType(str, Enum):
|
||||
line_chart = "lineChart"
|
||||
progress = "progress"
|
||||
|
||||
|
||||
class MetricTableViewType(str, Enum):
|
||||
table = "table"
|
||||
pie_chart = "pieChart"
|
||||
|
||||
|
||||
class MetricType(str, Enum):
|
||||
timeseries = "timeseries"
|
||||
table = "table"
|
||||
|
||||
|
||||
class CustomMetricRawPayloadSchema(BaseModel):
|
||||
startDate: int = Field(TimeUTC.now(-7))
|
||||
endDate: int = Field(TimeUTC.now())
|
||||
|
||||
class Config:
|
||||
alias_generator = attribute_to_camel_case
|
||||
|
||||
|
||||
class CustomMetricRawPayloadSchema2(CustomMetricRawPayloadSchema):
|
||||
metric_id: int = Field(...)
|
||||
|
||||
|
||||
class TableMetricOfType(str, Enum):
|
||||
user_os = FilterType.user_os.value
|
||||
user_browser = FilterType.user_browser.value
|
||||
|
|
@ -768,29 +752,50 @@ class TimeseriesMetricOfType(str, Enum):
|
|||
session_count = "sessionCount"
|
||||
|
||||
|
||||
class CustomMetricChartPayloadSchema(CustomMetricRawPayloadSchema):
|
||||
class CustomMetricSessionsPayloadSchema(BaseModel):
|
||||
startDate: int = Field(TimeUTC.now(-7))
|
||||
endDate: int = Field(TimeUTC.now())
|
||||
|
||||
class Config:
|
||||
alias_generator = attribute_to_camel_case
|
||||
|
||||
|
||||
class CustomMetricChartPayloadSchema(CustomMetricSessionsPayloadSchema):
|
||||
density: int = Field(7)
|
||||
viewType: MetricViewType = Field(MetricViewType.line_chart)
|
||||
metricType: MetricType = Field(MetricType.timeseries)
|
||||
metricOf: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id)
|
||||
|
||||
class Config:
|
||||
alias_generator = attribute_to_camel_case
|
||||
|
||||
|
||||
class CreateCustomMetricsSchema(CustomMetricChartPayloadSchema):
|
||||
name: str = Field(...)
|
||||
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
|
||||
is_public: bool = Field(default=True, const=True)
|
||||
view_type: Union[MetricTimeseriesViewType, MetricTableViewType] = Field(MetricTimeseriesViewType.line_chart)
|
||||
metric_type: MetricType = Field(MetricType.timeseries)
|
||||
metric_of: Union[TableMetricOfType, TimeseriesMetricOfType] = Field(TableMetricOfType.user_id)
|
||||
metric_value: List[IssueType] = Field([])
|
||||
|
||||
# metricFraction: float = Field(None, gt=0, lt=1)
|
||||
@root_validator
|
||||
def validator(cls, values):
|
||||
if isinstance(values.get("metricOf"), TimeseriesMetricOfType):
|
||||
assert values.get("metricType") == MetricType.timeseries, \
|
||||
f"Only metricType:{MetricType.timeseries.value} is allowed for metricOf: {values.get('metricOf')}"
|
||||
if values.get("metric_type") == MetricType.table:
|
||||
assert isinstance(values.get("view_type"), MetricTableViewType), \
|
||||
f"viewType must be of type {MetricTableViewType} for metricType:{MetricType.table.value}"
|
||||
assert isinstance(values.get("metric_of"), TableMetricOfType), \
|
||||
f"metricOf must be of type {TableMetricOfType} for metricType:{MetricType.table.value}"
|
||||
if values.get("metric_of") != TableMetricOfType.issues:
|
||||
assert values.get("metric_value") is None or len(values.get("metric_value")) == 0, \
|
||||
f"metricValue is only available for metricOf:{TableMetricOfType.issues.value}"
|
||||
elif values.get("metric_type") == MetricType.timeseries:
|
||||
assert isinstance(values.get("view_type"), MetricTimeseriesViewType), \
|
||||
f"viewType must be of type {MetricTimeseriesViewType} for metricType:{MetricType.timeseries.value}"
|
||||
assert isinstance(values.get("metric_of"), TimeseriesMetricOfType), \
|
||||
f"metricOf must be of type {TimeseriesMetricOfType} for metricType:{MetricType.timeseries.value}"
|
||||
return values
|
||||
|
||||
|
||||
class CustomMetricChartPayloadSchema2(CustomMetricChartPayloadSchema):
|
||||
metric_id: int = Field(...)
|
||||
|
||||
|
||||
class TryCustomMetricsSchema(CreateCustomMetricsSchema, CustomMetricChartPayloadSchema):
|
||||
name: Optional[str] = Field(None)
|
||||
class Config:
|
||||
alias_generator = attribute_to_camel_case
|
||||
|
||||
|
||||
class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
|
||||
|
|
|
|||
|
|
@ -5,6 +5,10 @@ $$
|
|||
SELECT 'v1.5.X-ee'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
|
||||
UPDATE metrics
|
||||
SET is_public= TRUE;
|
||||
|
||||
|
||||
DO
|
||||
$$
|
||||
BEGIN
|
||||
|
|
@ -20,7 +24,27 @@ $$
|
|||
$$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
DO
|
||||
$$
|
||||
BEGIN
|
||||
IF NOT EXISTS(SELECT *
|
||||
FROM pg_type typ
|
||||
INNER JOIN pg_namespace nsp
|
||||
ON nsp.oid = typ.typnamespace
|
||||
WHERE nsp.nspname = current_schema()
|
||||
AND typ.typname = 'metric_view_type') THEN
|
||||
CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart');
|
||||
END IF;
|
||||
END;
|
||||
$$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
ALTER TABLE metrics
|
||||
ADD COLUMN IF NOT EXISTS
|
||||
metric_type metric_type NOT NULL DEFAULT 'timeseries';
|
||||
metric_type metric_type NOT NULL DEFAULT 'timeseries',
|
||||
ADD COLUMN IF NOT EXISTS
|
||||
view_type metric_view_type NOT NULL DEFAULT 'lineChart',
|
||||
ADD COLUMN IF NOT EXISTS
|
||||
metric_of text NOT NULL DEFAULT 'sessionCount';
|
||||
|
||||
COMMIT;
|
||||
|
|
@ -771,17 +771,20 @@ $$
|
|||
CREATE INDEX IF NOT EXISTS traces_tenant_id_idx ON traces (tenant_id);
|
||||
|
||||
CREATE TYPE metric_type AS ENUM ('timeseries','table');
|
||||
CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart');
|
||||
CREATE TABLE IF NOT EXISTS metrics
|
||||
(
|
||||
metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
name text NOT NULL,
|
||||
is_public boolean NOT NULL DEFAULT FALSE,
|
||||
active boolean NOT NULL DEFAULT TRUE,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
name text NOT NULL,
|
||||
is_public boolean NOT NULL DEFAULT FALSE,
|
||||
active boolean NOT NULL DEFAULT TRUE,
|
||||
created_at timestamp DEFAULT timezone('utc'::text, now()) not null,
|
||||
deleted_at timestamp,
|
||||
metric_type metric_type NOT NULL DEFAULT 'timeseries'
|
||||
metric_type metric_type NOT NULL DEFAULT 'timeseries',
|
||||
view_type metric_view_type NOT NULL DEFAULT 'lineChart',
|
||||
metric_of text NOT NULL DEFAULT 'sessionCount'
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS metrics_user_id_is_public_idx ON public.metrics (user_id, is_public);
|
||||
CREATE TABLE IF NOT EXISTS metric_series
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@ $$
|
|||
SELECT 'v1.5.X'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
|
||||
UPDATE metrics
|
||||
SET is_public= TRUE;
|
||||
|
||||
DO
|
||||
$$
|
||||
BEGIN
|
||||
|
|
@ -20,7 +23,27 @@ $$
|
|||
$$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
DO
|
||||
$$
|
||||
BEGIN
|
||||
IF NOT EXISTS(SELECT *
|
||||
FROM pg_type typ
|
||||
INNER JOIN pg_namespace nsp
|
||||
ON nsp.oid = typ.typnamespace
|
||||
WHERE nsp.nspname = current_schema()
|
||||
AND typ.typname = 'metric_view_type') THEN
|
||||
CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart');
|
||||
END IF;
|
||||
END;
|
||||
$$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
ALTER TABLE metrics
|
||||
ADD COLUMN IF NOT EXISTS
|
||||
metric_type metric_type NOT NULL DEFAULT 'timeseries';
|
||||
metric_type metric_type NOT NULL DEFAULT 'timeseries',
|
||||
ADD COLUMN IF NOT EXISTS
|
||||
view_type metric_view_type NOT NULL DEFAULT 'lineChart',
|
||||
ADD COLUMN IF NOT EXISTS
|
||||
metric_of text NOT NULL DEFAULT 'sessionCount';
|
||||
|
||||
COMMIT;
|
||||
|
|
@ -901,17 +901,20 @@ $$
|
|||
CREATE INDEX jobs_project_id_idx ON jobs (project_id);
|
||||
|
||||
CREATE TYPE metric_type AS ENUM ('timeseries','table');
|
||||
CREATE TYPE metric_view_type AS ENUM ('lineChart','progress','table','pieChart');
|
||||
CREATE TABLE metrics
|
||||
(
|
||||
metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
name text NOT NULL,
|
||||
is_public boolean NOT NULL DEFAULT FALSE,
|
||||
active boolean NOT NULL DEFAULT TRUE,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
name text NOT NULL,
|
||||
is_public boolean NOT NULL DEFAULT FALSE,
|
||||
active boolean NOT NULL DEFAULT TRUE,
|
||||
created_at timestamp default timezone('utc'::text, now()) not null,
|
||||
deleted_at timestamp,
|
||||
metric_type metric_type NOT NULL DEFAULT 'timeseries'
|
||||
metric_type metric_type NOT NULL DEFAULT 'timeseries',
|
||||
view_type metric_view_type NOT NULL DEFAULT 'lineChart',
|
||||
metric_of text NOT NULL DEFAULT 'sessionCount'
|
||||
);
|
||||
CREATE INDEX metrics_user_id_is_public_idx ON public.metrics (user_id, is_public);
|
||||
CREATE TABLE metric_series
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue