Merge remote-tracking branch 'origin/api-v1.9.5' into dev
This commit is contained in:
commit
1959f80073
15 changed files with 159 additions and 179 deletions
|
|
@ -27,7 +27,7 @@ COALESCE((SELECT TRUE
|
|||
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """
|
||||
|
||||
|
||||
def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, user_id):
|
||||
def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, user_id, include_mobs:bool=True):
|
||||
no_platform = True
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.platform:
|
||||
|
|
@ -68,8 +68,9 @@ def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, u
|
|||
|
||||
session = cur.fetchone()
|
||||
if session:
|
||||
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
|
||||
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
|
||||
if include_mobs:
|
||||
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
|
||||
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
|
||||
session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"],
|
||||
event_type=schemas.EventType.location)
|
||||
|
||||
|
|
|
|||
|
|
@ -105,13 +105,14 @@ def __is_click_map(data: schemas.CreateCardSchema):
|
|||
return data.metric_type == schemas.MetricType.click_map
|
||||
|
||||
|
||||
def __get_click_map_chart(project_id, user_id, data: schemas.CreateCardSchema):
|
||||
def __get_click_map_chart(project_id, user_id, data: schemas.CreateCardSchema, include_mobs: bool = True):
|
||||
if len(data.series) == 0:
|
||||
return None
|
||||
data.series[0].filter.startDate = data.startTimestamp
|
||||
data.series[0].filter.endDate = data.endTimestamp
|
||||
return click_maps.search_short_session(project_id=project_id, user_id=user_id,
|
||||
data=schemas.FlatClickMapSessionsSearch(**data.series[0].filter.dict()))
|
||||
data=schemas.FlatClickMapSessionsSearch(**data.series[0].filter.dict()),
|
||||
include_mobs=include_mobs)
|
||||
|
||||
|
||||
def merged_live(project_id, data: schemas.CreateCardSchema, user_id=None):
|
||||
|
|
@ -150,10 +151,13 @@ def __merge_metric_with_data(metric: schemas.CreateCardSchema,
|
|||
s.filter.filters += data.filters
|
||||
if len(data.events) > 0:
|
||||
s.filter.events += data.events
|
||||
metric.limit = data.limit
|
||||
metric.page = data.page
|
||||
return metric
|
||||
|
||||
|
||||
def make_chart(project_id, user_id, metric_id, data: schemas.CardChartSchema, metric: schemas.CreateCardSchema = None):
|
||||
def make_chart(project_id, user_id, metric_id, data: schemas.CardChartSchema,
|
||||
metric: schemas.CreateCardSchema = None):
|
||||
if metric is None:
|
||||
metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
|
|
@ -164,7 +168,7 @@ def make_chart(project_id, user_id, metric_id, data: schemas.CardChartSchema, me
|
|||
|
||||
|
||||
def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False, include_data=True)
|
||||
if raw_metric is None:
|
||||
return None
|
||||
metric: schemas.CreateCardSchema = schemas.CreateCardSchema(**raw_metric)
|
||||
|
|
@ -172,11 +176,18 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchem
|
|||
if metric is None:
|
||||
return None
|
||||
results = []
|
||||
is_click_map = False
|
||||
if __is_click_map(metric) and raw_metric.get("data") is not None:
|
||||
is_click_map = True
|
||||
for s in metric.series:
|
||||
s.filter.startDate = data.startTimestamp
|
||||
s.filter.endDate = data.endTimestamp
|
||||
s.filter.limit = data.limit
|
||||
s.filter.page = data.page
|
||||
if is_click_map:
|
||||
results.append(
|
||||
{"seriesId": s.series_id, "seriesName": s.name, "total": 1, "sessions": [raw_metric["data"]]})
|
||||
break
|
||||
results.append({"seriesId": s.series_id, "seriesName": s.name,
|
||||
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
|
||||
|
||||
|
|
@ -234,7 +245,11 @@ def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
|
|||
|
||||
def create(project_id, user_id, data: schemas.CreateCardSchema, dashboard=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
_data = {}
|
||||
session_data = None
|
||||
if __is_click_map(data):
|
||||
session_data = json.dumps(__get_click_map_chart(project_id=project_id, user_id=user_id,
|
||||
data=data, include_mobs=False))
|
||||
_data = {"session_data": session_data}
|
||||
for i, s in enumerate(data.series):
|
||||
for k in s.dict().keys():
|
||||
_data[f"{k}_{i}"] = s.__getattribute__(k)
|
||||
|
|
@ -245,10 +260,10 @@ def create(project_id, user_id, data: schemas.CreateCardSchema, dashboard=False)
|
|||
params["default_config"] = json.dumps(data.default_config.dict())
|
||||
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail)
|
||||
metric_format, default_config, thumbnail, data)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s)
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s)
|
||||
RETURNING metric_id"""
|
||||
if len(data.series) > 0:
|
||||
query = f"""WITH m AS ({query})
|
||||
|
|
@ -440,10 +455,13 @@ def delete(project_id, metric_id, user_id):
|
|||
return {"state": "success"}
|
||||
|
||||
|
||||
def get_card(metric_id, project_id, user_id, flatten=True):
|
||||
def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: bool = False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""SELECT *, default_config AS config
|
||||
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, deleted_at, edited_at, metric_type,
|
||||
view_type, metric_of, metric_value, metric_format, is_pinned, default_config,
|
||||
thumbnail, default_config AS config,
|
||||
series, dashboards, owner_email {',data' if include_data else ''}
|
||||
FROM metrics
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
|
|
@ -494,7 +512,10 @@ def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
|
|||
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
|
||||
) AS connected_dashboards ON (TRUE)"""
|
||||
query = cur.mogrify(
|
||||
f"""SELECT *, default_config AS config
|
||||
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, deleted_at, edited_at, metric_type,
|
||||
view_type, metric_of, metric_value, metric_format, is_pinned, default_config,
|
||||
thumbnail, default_config AS config,
|
||||
series
|
||||
FROM metrics
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
|
|
@ -651,27 +672,3 @@ PREDEFINED = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_se
|
|||
def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
|
||||
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
|
||||
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
|
||||
|
||||
# def add_thumbnail(metric_id, user_id, project_id):
|
||||
# key = generate_file_key(project_id=project_id, key=f"{metric_id}.png")
|
||||
# params = {"metric_id": metric_id, "user_id": user_id, "project_id": project_id, "key": key}
|
||||
# with pg_client.PostgresClient() as cur:
|
||||
# query = cur.mogrify(f"""\
|
||||
# UPDATE metrics
|
||||
# SET thumbnail_url = %(key)s
|
||||
# WHERE metric_id = %(metric_id)s
|
||||
# AND project_id = %(project_id)s
|
||||
# AND (user_id = %(user_id)s OR is_public)
|
||||
# RETURNING metric_id;""", params)
|
||||
# cur.execute(query)
|
||||
# row = cur.fetchone()
|
||||
# if row is None:
|
||||
# return {"errors": ["Card not found"]}
|
||||
# return {"data": s3.get_presigned_url_for_upload(bucket=config('THUMBNAILS_BUCKET'), expires_in=180, key=key,
|
||||
# # content-length-range is in bytes
|
||||
# conditions=["content-length-range", 1, 1 * 1024 * 1024],
|
||||
# content_type="image/png")}
|
||||
#
|
||||
#
|
||||
# def generate_file_key(project_id, key):
|
||||
# return f"{project_id}/cards/{key}"
|
||||
|
|
|
|||
|
|
@ -858,7 +858,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._status_code:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value,
|
||||
sh.multi_conditions(f"main.status_code {f.operator.value} %({e_k_f})s::integer", f.value,
|
||||
value_key=e_k_f))
|
||||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._method:
|
||||
|
|
@ -867,7 +867,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._duration:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value,
|
||||
sh.multi_conditions(f"main.duration {f.operator.value} %({e_k_f})s::integer", f.value,
|
||||
value_key=e_k_f))
|
||||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._request_body:
|
||||
|
|
|
|||
|
|
@ -92,7 +92,19 @@ class CreateNotificationSchema(BaseModel):
|
|||
notifications: List = Field(...)
|
||||
|
||||
|
||||
class NotificationsViewSchema(BaseModel):
|
||||
class _TimedSchema(BaseModel):
|
||||
startTimestamp: int = Field(default=None)
|
||||
endTimestamp: int = Field(default=None)
|
||||
|
||||
@root_validator
|
||||
def time_validator(cls, values):
|
||||
if values.get("startTimestamp") is not None and values.get("endTimestamp") is not None:
|
||||
assert values.get("startTimestamp") < values.get("endTimestamp"), \
|
||||
"endTimestamp must be greater than startTimestamp"
|
||||
return values
|
||||
|
||||
|
||||
class NotificationsViewSchema(_TimedSchema):
|
||||
ids: Optional[List] = Field(default=[])
|
||||
startTimestamp: Optional[int] = Field(default=None)
|
||||
endTimestamp: Optional[int] = Field(default=None)
|
||||
|
|
@ -816,7 +828,7 @@ class SearchErrorsSchema(FlatSessionsSearchPayloadSchema):
|
|||
query: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
class MetricPayloadSchema(BaseModel):
|
||||
class MetricPayloadSchema(_TimedSchema):
|
||||
startTimestamp: int = Field(TimeUTC.now(delta_days=-1))
|
||||
endTimestamp: int = Field(TimeUTC.now())
|
||||
density: int = Field(7)
|
||||
|
|
@ -967,7 +979,7 @@ class MetricOfClickMap(str, Enum):
|
|||
click_map_url = "clickMapUrl"
|
||||
|
||||
|
||||
class CardSessionsSchema(FlatSessionsSearch, _PaginatedSchema):
|
||||
class CardSessionsSchema(FlatSessionsSearch, _PaginatedSchema, _TimedSchema):
|
||||
startTimestamp: int = Field(TimeUTC.now(-7))
|
||||
endTimestamp: int = Field(TimeUTC.now())
|
||||
series: List[CardCreateSeriesSchema] = Field(default=[])
|
||||
|
|
@ -1283,7 +1295,7 @@ class FlatClickMapSessionsSearch(SessionsSearchPayloadSchema):
|
|||
|
||||
@root_validator(pre=True)
|
||||
def transform(cls, values):
|
||||
for f in values.get("filters"):
|
||||
for f in values.get("filters", []):
|
||||
if f.get("type") == FilterType.duration:
|
||||
return values
|
||||
values["filters"] = values.get("filters", [])
|
||||
|
|
@ -1293,6 +1305,8 @@ class FlatClickMapSessionsSearch(SessionsSearchPayloadSchema):
|
|||
|
||||
@root_validator()
|
||||
def flat_to_original(cls, values):
|
||||
if len(values["events"]) > 0:
|
||||
return values
|
||||
n_filters = []
|
||||
n_events = []
|
||||
for v in values.get("filters", []):
|
||||
|
|
|
|||
|
|
@ -180,6 +180,8 @@ def __merge_metric_with_data(metric: schemas_ee.CreateCardSchema,
|
|||
s.filter.filters += data.filters
|
||||
if len(data.events) > 0:
|
||||
s.filter.events += data.events
|
||||
metric.limit = data.limit
|
||||
metric.page = data.page
|
||||
return metric
|
||||
|
||||
|
||||
|
|
@ -195,7 +197,7 @@ def make_chart(project_id, user_id, metric_id, data: schemas.CardChartSchema,
|
|||
|
||||
|
||||
def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchema):
|
||||
raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
raw_metric = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False, include_data=True)
|
||||
if raw_metric is None:
|
||||
return None
|
||||
metric: schemas_ee.CreateCardSchema = schemas_ee.CreateCardSchema(**raw_metric)
|
||||
|
|
@ -203,11 +205,18 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchem
|
|||
if metric is None:
|
||||
return None
|
||||
results = []
|
||||
is_click_map = False
|
||||
if __is_click_map(metric) and raw_metric.get("data") is not None:
|
||||
is_click_map = True
|
||||
for s in metric.series:
|
||||
s.filter.startDate = data.startTimestamp
|
||||
s.filter.endDate = data.endTimestamp
|
||||
s.filter.limit = data.limit
|
||||
s.filter.page = data.page
|
||||
if is_click_map:
|
||||
results.append(
|
||||
{"seriesId": s.series_id, "seriesName": s.name, "total": 1, "sessions": [raw_metric["data"]]})
|
||||
break
|
||||
results.append({"seriesId": s.series_id, "seriesName": s.name,
|
||||
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
|
||||
|
||||
|
|
@ -265,6 +274,11 @@ def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema):
|
|||
|
||||
def create(project_id, user_id, data: schemas_ee.CreateCardSchema, dashboard=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
session_data = None
|
||||
if __is_click_map(data):
|
||||
session_data = json.dumps(__get_click_map_chart(project_id=project_id, user_id=user_id,
|
||||
data=data, include_mobs=False))
|
||||
_data = {"session_data": session_data}
|
||||
_data = {}
|
||||
for i, s in enumerate(data.series):
|
||||
for k in s.dict().keys():
|
||||
|
|
@ -276,10 +290,10 @@ def create(project_id, user_id, data: schemas_ee.CreateCardSchema, dashboard=Fal
|
|||
params["default_config"] = json.dumps(data.default_config.dict())
|
||||
query = """INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config, thumbnail)
|
||||
metric_format, default_config, thumbnail, data)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s)
|
||||
%(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s)
|
||||
RETURNING metric_id"""
|
||||
if len(data.series) > 0:
|
||||
query = f"""WITH m AS ({query})
|
||||
|
|
@ -471,10 +485,13 @@ def delete(project_id, metric_id, user_id):
|
|||
return {"state": "success"}
|
||||
|
||||
|
||||
def get_card(metric_id, project_id, user_id, flatten=True):
|
||||
def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: bool = False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""SELECT *, default_config AS config
|
||||
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, deleted_at, edited_at, metric_type,
|
||||
view_type, metric_of, metric_value, metric_format, is_pinned, default_config,
|
||||
thumbnail, default_config AS config,
|
||||
series, dashboards, owner_email {',data' if include_data else ''}
|
||||
FROM metrics
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
|
|
@ -525,7 +542,10 @@ def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
|
|||
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
|
||||
) AS connected_dashboards ON (TRUE)"""
|
||||
query = cur.mogrify(
|
||||
f"""SELECT *, default_config AS config
|
||||
f"""SELECT metric_id, project_id, user_id, name, is_public, created_at, deleted_at, edited_at, metric_type,
|
||||
view_type, metric_of, metric_value, metric_format, is_pinned, default_config,
|
||||
thumbnail, default_config AS config,
|
||||
series
|
||||
FROM metrics
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
|
|
@ -682,27 +702,3 @@ PREDEFINED = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_se
|
|||
def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \
|
||||
schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict):
|
||||
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
|
||||
|
||||
# def add_thumbnail(metric_id, user_id, project_id):
|
||||
# key = generate_file_key(project_id=project_id, key=f"{metric_id}.png")
|
||||
# params = {"metric_id": metric_id, "user_id": user_id, "project_id": project_id, "key": key}
|
||||
# with pg_client.PostgresClient() as cur:
|
||||
# query = cur.mogrify(f"""\
|
||||
# UPDATE metrics
|
||||
# SET thumbnail_url = %(key)s
|
||||
# WHERE metric_id = %(metric_id)s
|
||||
# AND project_id = %(project_id)s
|
||||
# AND (user_id = %(user_id)s OR is_public)
|
||||
# RETURNING metric_id;""", params)
|
||||
# cur.execute(query)
|
||||
# row = cur.fetchone()
|
||||
# if row is None:
|
||||
# return {"errors": ["Card not found"]}
|
||||
# return {"data": s3.get_presigned_url_for_upload(bucket=config('THUMBNAILS_BUCKET'), expires_in=180, key=key,
|
||||
# # content-length-range is in bytes
|
||||
# conditions=["content-length-range", 1, 1 * 1024 * 1024],
|
||||
# content_type="image/png")}
|
||||
#
|
||||
#
|
||||
# def generate_file_key(project_id, key):
|
||||
# return f"{project_id}/cards/{key}"
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ def reset(data: schemas.ForgetPasswordPayloadSchema):
|
|||
# ---FOR SSO
|
||||
if a_user.get("origin") is not None and a_user.get("hasPassword", False) is False:
|
||||
return {"errors": ["Please use your SSO to login"]}
|
||||
if config("enforce_SSO", cast=bool, default=False) and not a_user["superAdmin"]:
|
||||
if config("enforce_SSO", cast=bool, default=False) and not a_user["superAdmin"] and helper.is_saml2_available():
|
||||
return {"errors": ["Please use your SSO to login, enforced by admin"]}
|
||||
# ----------
|
||||
invitation_link = users.generate_new_invitation(user_id=a_user["id"])
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ import schemas_ee
|
|||
from chalicelib.core import events, metadata, events_ios, \
|
||||
sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \
|
||||
sessions_devtool, sessions_notes
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, errors_helper
|
||||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
SESSION_PROJECTION_COLS = """s.project_id,
|
||||
|
|
@ -206,9 +207,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
|
|||
ORDER BY s.session_id desc) AS filtred_sessions
|
||||
ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""",
|
||||
full_args)
|
||||
print("--------------------")
|
||||
print(main_query)
|
||||
print("--------------------")
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
# print("--------------------")
|
||||
try:
|
||||
cur.execute(main_query)
|
||||
except Exception as err:
|
||||
|
|
@ -860,7 +861,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._status_code:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value,
|
||||
sh.multi_conditions(f"main.status_code {f.operator.value} %({e_k_f})s::integer", f.value,
|
||||
value_key=e_k_f))
|
||||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._method:
|
||||
|
|
@ -869,7 +870,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
|
|||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._duration:
|
||||
event_where.append(
|
||||
sh.multi_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value,
|
||||
sh.multi_conditions(f"main.duration {f.operator.value} %({e_k_f})s::integer", f.value,
|
||||
value_key=e_k_f))
|
||||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._request_body:
|
||||
|
|
@ -1088,39 +1089,6 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
return results
|
||||
|
||||
|
||||
def search_by_issue(user_id, issue, project_id, start_date, end_date):
|
||||
constraints = ["s.project_id = %(projectId)s",
|
||||
"p_issues.context_string = %(issueContextString)s",
|
||||
"p_issues.type = %(issueType)s"]
|
||||
if start_date is not None:
|
||||
constraints.append("start_ts >= %(startDate)s")
|
||||
if end_date is not None:
|
||||
constraints.append("start_ts <= %(endDate)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS}
|
||||
FROM public.sessions AS s
|
||||
INNER JOIN events_common.issues USING (session_id)
|
||||
INNER JOIN public.issues AS p_issues USING (issue_id)
|
||||
LEFT JOIN (SELECT user_id, session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE user_id = %(userId)s) AS favorite_sessions
|
||||
USING (session_id)
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY s.session_id DESC;""",
|
||||
{
|
||||
"issueContextString": issue["contextString"],
|
||||
"issueType": issue["type"], "userId": user_id,
|
||||
"projectId": project_id,
|
||||
"startDate": start_date,
|
||||
"endDate": end_date
|
||||
}))
|
||||
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_user_sessions(project_id, user_id, start_date, end_date):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
|
||||
|
|
|
|||
|
|
@ -1038,8 +1038,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
# _multiple_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s",
|
||||
# event.source, value_key=e_k))
|
||||
# events_conditions[-2]["time"] = f"(?t{event.sourceOperator} %({e_k})s)"
|
||||
events_conditions[-2]["time"] = _multiple_conditions(f"?t{event.sourceOperator.value}%({e_k})s", event.source,
|
||||
value_key=e_k)
|
||||
events_conditions[-2]["time"] = _multiple_conditions(f"?t{event.sourceOperator.value}%({e_k})s",
|
||||
event.source, value_key=e_k)
|
||||
event_index += 1
|
||||
# TODO: no isNot for RequestDetails
|
||||
elif event_type == schemas.EventType.request_details:
|
||||
|
|
@ -1064,7 +1064,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._status_code:
|
||||
event_where.append(
|
||||
_multiple_conditions(f"main.status {f.operator} %({e_k_f})s", f.value,
|
||||
_multiple_conditions(f"main.status {f.operator.value} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
|
|
@ -1075,7 +1075,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
|
|||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._duration:
|
||||
event_where.append(
|
||||
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s", f.value, value_key=e_k_f))
|
||||
_multiple_conditions(f"main.duration {f.operator.value} %({e_k_f})s", f.value,
|
||||
value_key=e_k_f))
|
||||
events_conditions[-1]["condition"].append(event_where[-1])
|
||||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._request_body:
|
||||
|
|
@ -1414,39 +1415,6 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
|||
return results
|
||||
|
||||
|
||||
def search_by_issue(user_id, issue, project_id, start_date, end_date):
|
||||
constraints = ["s.project_id = %(projectId)s",
|
||||
"p_issues.context_string = %(issueContextString)s",
|
||||
"p_issues.type = %(issueType)s"]
|
||||
if start_date is not None:
|
||||
constraints.append("start_ts >= %(startDate)s")
|
||||
if end_date is not None:
|
||||
constraints.append("start_ts <= %(endDate)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS}
|
||||
FROM public.sessions AS s
|
||||
INNER JOIN events_common.issues USING (session_id)
|
||||
INNER JOIN public.issues AS p_issues USING (issue_id)
|
||||
LEFT JOIN (SELECT user_id, session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE user_id = %(userId)s) AS favorite_sessions
|
||||
USING (session_id)
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY s.session_id DESC;""",
|
||||
{
|
||||
"issueContextString": issue["contextString"],
|
||||
"issueType": issue["type"], "userId": user_id,
|
||||
"projectId": project_id,
|
||||
"startDate": start_date,
|
||||
"endDate": end_date
|
||||
}))
|
||||
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_user_sessions(project_id, user_id, start_date, end_date):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import schemas, schemas_ee
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
import schemas
|
||||
import schemas_ee
|
||||
from chalicelib.core import metrics
|
||||
from chalicelib.utils import ch_client
|
||||
|
||||
|
|
@ -20,14 +22,14 @@ def _table_where(table, index, value):
|
|||
|
||||
|
||||
def _sum_table_index(table, index):
|
||||
print(f'index {index}')
|
||||
# print(f'index {index}')
|
||||
s = 0
|
||||
count = 0
|
||||
for row in table:
|
||||
v = row[index]
|
||||
if v is None:
|
||||
continue
|
||||
print(v)
|
||||
# print(v)
|
||||
s += v
|
||||
count += 1
|
||||
return s
|
||||
|
|
@ -44,8 +46,8 @@ def _sort_table_index(table, index, reverse=False):
|
|||
|
||||
|
||||
def _select_rec(l, selector):
|
||||
print('selector:', selector)
|
||||
print('list:', l)
|
||||
# print('selector:', selector)
|
||||
# print('list:', l)
|
||||
if len(selector) == 1:
|
||||
return l[selector[0]]
|
||||
else:
|
||||
|
|
@ -95,7 +97,7 @@ def query_requests_by_period(project_id, start_time, end_time, filters: Optional
|
|||
conditions = ["event_type = 'REQUEST'"]
|
||||
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
|
||||
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
|
||||
SELECT T1.hh, count(T2.session_id) as sessions, avg(T2.success) as success_rate, T2.url_host as names,
|
||||
SELECT T1.hh, countIf(T2.session_id != 0) as sessions, avg(T2.success) as success_rate, T2.url_host as names,
|
||||
T2.url_path as source, avg(T2.duration) as avg_duration
|
||||
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
|
||||
LEFT JOIN (SELECT session_id, url_host, url_path, success, message, duration, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime
|
||||
|
|
@ -107,12 +109,17 @@ def query_requests_by_period(project_id, start_time, end_time, filters: Optional
|
|||
ORDER BY T1.hh DESC;"""
|
||||
with ch_client.ClickHouseClient() as conn:
|
||||
query = conn.format(query=query, params=params)
|
||||
# print("--------")
|
||||
# print(query)
|
||||
# print("--------")
|
||||
res = conn.execute(query=query)
|
||||
if res is None or sum([r.get("sessions") for r in res]) == 0:
|
||||
return []
|
||||
|
||||
table_hh1, table_hh2, columns, this_period_hosts, last_period_hosts = __get_two_values(res, time_index='hh',
|
||||
name_index='source')
|
||||
test = [k[4] for k in table_hh1]
|
||||
print(f'length {len(test)}, uniques {len(set(test))}')
|
||||
# print(f'length {len(test)}, uniques {len(set(test))}')
|
||||
del res
|
||||
|
||||
new_hosts = [x for x in this_period_hosts if x not in last_period_hosts]
|
||||
|
|
@ -160,13 +167,13 @@ def query_requests_by_period(project_id, start_time, end_time, filters: Optional
|
|||
if n == n_:
|
||||
if n in new_hosts:
|
||||
data_['value'] = new_duration_values[n]
|
||||
data_['ratio'] = v / total
|
||||
data_['ratio'] = 100 * v / total
|
||||
break
|
||||
for n_, v in increase:
|
||||
if n == n_:
|
||||
data_['value'] = v[0]
|
||||
data_['oldValue'] = v[1]
|
||||
data_['change'] = v[2]
|
||||
data_['change'] = 100* v[2]
|
||||
data_['isNew'] = False
|
||||
break
|
||||
results.append(data_)
|
||||
|
|
@ -195,7 +202,7 @@ def query_most_errors_by_period(project_id, start_time, end_time,
|
|||
conditions = ["event_type = 'ERROR'"]
|
||||
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
|
||||
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
|
||||
SELECT T1.hh, count(T2.session_id) as sessions, T2.name as names,
|
||||
SELECT T1.hh, countIf(T2.session_id != 0) as sessions, T2.name as names,
|
||||
groupUniqArray(T2.source) as sources
|
||||
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
|
||||
LEFT JOIN (SELECT session_id, name, source, message, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime
|
||||
|
|
@ -210,15 +217,20 @@ def query_most_errors_by_period(project_id, start_time, end_time,
|
|||
|
||||
with ch_client.ClickHouseClient() as conn:
|
||||
query = conn.format(query=query, params=params)
|
||||
# print("--------")
|
||||
# print(query)
|
||||
# print("--------")
|
||||
res = conn.execute(query=query)
|
||||
if res is None or sum([r.get("sessions") for r in res]) == 0:
|
||||
return []
|
||||
|
||||
table_hh1, table_hh2, columns, this_period_errors, last_period_errors = __get_two_values(res, time_index='hh',
|
||||
name_index='names')
|
||||
del res
|
||||
print(table_hh1)
|
||||
print('\n')
|
||||
print(table_hh2)
|
||||
print('\n')
|
||||
# print(table_hh1)
|
||||
# print('\n')
|
||||
# print(table_hh2)
|
||||
# print('\n')
|
||||
new_errors = [x for x in this_period_errors if x not in last_period_errors]
|
||||
common_errors = [x for x in this_period_errors if x not in new_errors]
|
||||
|
||||
|
|
@ -260,13 +272,13 @@ def query_most_errors_by_period(project_id, start_time, end_time,
|
|||
if n == n_:
|
||||
if n in new_errors:
|
||||
data_['value'] = new_error_values[n]
|
||||
data_['ratio'] = v / total
|
||||
data_['ratio'] = 100 * v / total
|
||||
break
|
||||
for n_, v in increase:
|
||||
if n == n_:
|
||||
data_['value'] = v[0]
|
||||
data_['oldValue'] = v[1]
|
||||
data_['change'] = v[2]
|
||||
data_['change'] = 100 * v[2]
|
||||
data_['isNew'] = False
|
||||
break
|
||||
results.append(data_)
|
||||
|
|
@ -283,7 +295,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time,
|
|||
conditions = ["event_type = 'PERFORMANCE'"]
|
||||
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
|
||||
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
|
||||
SELECT T1.hh, count(T2.session_id) as sessions, avg(T2.avg_cpu) as cpu_used,
|
||||
SELECT T1.hh, countIf(T2.session_id != 0) as sessions, avg(T2.avg_cpu) as cpu_used,
|
||||
avg(T2.avg_used_js_heap_size) as memory_used, T2.url_host as names, groupUniqArray(T2.url_path) as sources
|
||||
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
|
||||
LEFT JOIN (SELECT session_id, url_host, url_path, avg_used_js_heap_size, avg_cpu, toStartOfInterval(datetime, INTERVAL %(step_size)s second) as dtime
|
||||
|
|
@ -295,7 +307,12 @@ def query_cpu_memory_by_period(project_id, start_time, end_time,
|
|||
ORDER BY T1.hh DESC;"""
|
||||
with ch_client.ClickHouseClient() as conn:
|
||||
query = conn.format(query=query, params=params)
|
||||
# print("--------")
|
||||
# print(query)
|
||||
# print("--------")
|
||||
res = conn.execute(query=query)
|
||||
if res is None or sum([r.get("sessions") for r in res]) == 0:
|
||||
return []
|
||||
|
||||
table_hh1, table_hh2, columns, this_period_resources, last_period_resources = __get_two_values(res, time_index='hh',
|
||||
name_index='names')
|
||||
|
|
@ -308,20 +325,20 @@ def query_cpu_memory_by_period(project_id, start_time, end_time,
|
|||
mem_oldvalue = _mean_table_index(table_hh2, memory_idx)
|
||||
cpu_newvalue = _mean_table_index(table_hh2, cpu_idx)
|
||||
cpu_oldvalue = _mean_table_index(table_hh2, cpu_idx)
|
||||
# TODO: what if _tmp=0 ?
|
||||
|
||||
mem_oldvalue = 1 if mem_oldvalue == 0 else mem_oldvalue
|
||||
cpu_oldvalue = 1 if cpu_oldvalue == 0 else cpu_oldvalue
|
||||
return [{'category': schemas_ee.InsightCategories.resources,
|
||||
'name': 'cpu',
|
||||
'value': cpu_newvalue,
|
||||
'oldValue': cpu_oldvalue,
|
||||
'change': (cpu_newvalue - cpu_oldvalue) / cpu_oldvalue,
|
||||
'change': 100 * (cpu_newvalue - cpu_oldvalue) / cpu_oldvalue,
|
||||
'isNew': None},
|
||||
{'category': schemas_ee.InsightCategories.resources,
|
||||
'name': 'memory',
|
||||
'value': mem_newvalue,
|
||||
'oldValue': mem_oldvalue,
|
||||
'change': (mem_newvalue - mem_oldvalue) / mem_oldvalue,
|
||||
'change': 100 * (mem_newvalue - mem_oldvalue) / mem_oldvalue,
|
||||
'isNew': None}
|
||||
]
|
||||
|
||||
|
|
@ -338,7 +355,7 @@ def query_click_rage_by_period(project_id, start_time, end_time,
|
|||
conditions = ["issue_type = 'click_rage'", "event_type = 'ISSUE'"]
|
||||
query = f"""WITH toUInt32(toStartOfInterval(toDateTime(%(startTimestamp)s/1000), INTERVAL %(step_size)s second)) AS start,
|
||||
toUInt32(toStartOfInterval(toDateTime(%(endTimestamp)s/1000), INTERVAL %(step_size)s second)) AS end
|
||||
SELECT T1.hh, count(T2.session_id) as sessions, groupUniqArray(T2.url_host) as names, T2.url_path as sources
|
||||
SELECT T1.hh, countIf(T2.session_id != 0) as sessions, groupUniqArray(T2.url_host) as names, T2.url_path as sources
|
||||
FROM (SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(start, end, %(step_size)s))) as hh) AS T1
|
||||
LEFT JOIN (SELECT session_id, url_host, url_path, toStartOfInterval(datetime, INTERVAL %(step_size)s second ) as dtime
|
||||
FROM experimental.events
|
||||
|
|
@ -351,7 +368,12 @@ def query_click_rage_by_period(project_id, start_time, end_time,
|
|||
ORDER BY T1.hh DESC;"""
|
||||
with ch_client.ClickHouseClient() as conn:
|
||||
query = conn.format(query=query, params=params)
|
||||
# print("--------")
|
||||
# print(query)
|
||||
# print("--------")
|
||||
res = conn.execute(query=query)
|
||||
if res is None or sum([r.get("sessions") for r in res]) == 0:
|
||||
return []
|
||||
|
||||
table_hh1, table_hh2, columns, this_period_rage, last_period_rage = __get_two_values(res, time_index='hh',
|
||||
name_index='sources')
|
||||
|
|
@ -397,13 +419,13 @@ def query_click_rage_by_period(project_id, start_time, end_time,
|
|||
if n == n_:
|
||||
if n in new_names:
|
||||
data_['value'] = new_raged_values[n]
|
||||
data_['ratio'] = v / total
|
||||
data_['ratio'] = 100 * v / total
|
||||
break
|
||||
for n_, v in increase:
|
||||
if n == n_:
|
||||
data_['value'] = v[0]
|
||||
data_['oldValue'] = v[1]
|
||||
data_['change'] = v[2]
|
||||
data_['change'] = 100 * v[2]
|
||||
data_['isNew'] = False
|
||||
break
|
||||
results.append(data_)
|
||||
|
|
|
|||
|
|
@ -543,6 +543,9 @@ def change_password(tenant_id, user_id, email, old_password, new_password):
|
|||
item = get(tenant_id=tenant_id, user_id=user_id)
|
||||
if item is None:
|
||||
return {"errors": ["access denied"]}
|
||||
if item["origin"] is not None and config("enforce_SSO", cast=bool, default=False) \
|
||||
and not item["superAdmin"] and helper.is_saml2_available():
|
||||
return {"errors": ["Please use your SSO to change your password, enforced by admin"]}
|
||||
if item["origin"] is not None and item["hasPassword"] is False:
|
||||
return {"errors": ["cannot change your password because you are logged-in from an SSO service"]}
|
||||
if old_password == new_password:
|
||||
|
|
@ -741,7 +744,7 @@ def authenticate(email, password, for_change_password=False):
|
|||
if for_change_password:
|
||||
return True
|
||||
r = helper.dict_to_camel_case(r)
|
||||
if config("enforce_SSO", cast=bool, default=False) and not r["superAdmin"]:
|
||||
if config("enforce_SSO", cast=bool, default=False) and not r["superAdmin"] and helper.is_saml2_available():
|
||||
return {"errors": ["must sign-in with SSO, enforced by admin"]}
|
||||
|
||||
jwt_iat = change_jwt_iat(r['userId'])
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class InsightCategories(str, Enum):
|
|||
resources = "resources"
|
||||
|
||||
|
||||
class GetInsightsSchema(BaseModel):
|
||||
class GetInsightsSchema(schemas._TimedSchema):
|
||||
startTimestamp: int = Field(default=TimeUTC.now(-7))
|
||||
endTimestamp: int = Field(default=TimeUTC.now())
|
||||
metricValue: List[InsightCategories] = Field(default=[])
|
||||
|
|
|
|||
|
|
@ -91,6 +91,7 @@ $$
|
|||
ALTER TABLE IF EXISTS metrics
|
||||
DROP COLUMN IF EXISTS active,
|
||||
DROP COLUMN IF EXISTS is_predefined,
|
||||
DROP COLUMN IF EXISTS predefined_key,
|
||||
DROP COLUMN IF EXISTS is_template,
|
||||
DROP COLUMN IF EXISTS category,
|
||||
DROP COLUMN IF EXISTS o_metric_id,
|
||||
|
|
@ -132,6 +133,7 @@ DROP INDEX IF EXISTS public.sessions_user_browser_gin_idx;
|
|||
DROP INDEX IF EXISTS public.sessions_user_os_gin_idx;
|
||||
DROP INDEX IF EXISTS public.issues_context_string_gin_idx;
|
||||
|
||||
|
||||
ALTER TABLE IF EXISTS projects
|
||||
ADD COLUMN IF NOT EXISTS beacon_size integer NOT NULL DEFAULT 0;
|
||||
|
||||
|
|
@ -324,12 +326,13 @@ $$
|
|||
$$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
|
||||
DROP FUNCTION get_new_filter_key;
|
||||
DROP FUNCTION get_new_event_filter_key;
|
||||
DROP FUNCTION get_new_event_key;
|
||||
|
||||
DROP TABLE IF EXISTS public.funnels;
|
||||
ALTER TABLE IF EXISTS public.metrics
|
||||
ADD COLUMN IF NOT EXISTS data jsonb NULL;
|
||||
COMMIT;
|
||||
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_selector_idx ON events.clicks (selector);
|
||||
|
|
|
|||
|
|
@ -751,7 +751,8 @@ $$
|
|||
"col": 2,
|
||||
"row": 2,
|
||||
"position": 0
|
||||
}'::jsonb
|
||||
}'::jsonb,
|
||||
data jsonb NULL
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS metrics_user_id_is_public_idx ON public.metrics (user_id, is_public);
|
||||
CREATE TABLE IF NOT EXISTS metric_series
|
||||
|
|
|
|||
|
|
@ -300,7 +300,13 @@ $$
|
|||
$$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
DROP FUNCTION get_new_filter_key;
|
||||
DROP FUNCTION get_new_event_filter_key;
|
||||
DROP FUNCTION get_new_event_key;
|
||||
|
||||
DROP TABLE IF EXISTS public.funnels;
|
||||
ALTER TABLE IF EXISTS public.metrics
|
||||
ADD COLUMN IF NOT EXISTS data jsonb NULL;
|
||||
COMMIT;
|
||||
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_selector_idx ON events.clicks (selector);
|
||||
|
|
|
|||
|
|
@ -859,7 +859,8 @@ $$
|
|||
"col": 2,
|
||||
"row": 2,
|
||||
"position": 0
|
||||
}'::jsonb
|
||||
}'::jsonb,
|
||||
data jsonb NULL
|
||||
);
|
||||
|
||||
CREATE INDEX metrics_user_id_is_public_idx ON public.metrics (user_id, is_public);
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue