Merge remote-tracking branch 'origin/dev' into api-v1.5.4

This commit is contained in:
Taha Yassine Kraiem 2022-03-14 19:16:35 +01:00
commit ef17d683fc
8 changed files with 109 additions and 53 deletions

View file

@ -103,10 +103,8 @@ def Build(a):
a["filter"]["startDate"] = -1
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part, sort = sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]),
error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=a["projectId"],
user_id=None)
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:

View file

@ -423,6 +423,10 @@ def __get_sort_key(key):
@dev.timed
def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, status="ALL", favorite_only=False):
empty_response = {"data": {
'total': 0,
'errors': []
}}
status = status.upper()
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
return {"errors": ["invalid error status"]}
@ -445,12 +449,9 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
if len(data.events) > 0 or len(data.filters) > 0 or status != "ALL":
# if favorite_only=True search for sessions associated with favorite_error
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
error_status=status, favorite_only=favorite_only)
error_status=status)
if len(statuses) == 0:
return {"data": {
'total': 0,
'errors': []
}}
return empty_response
error_ids = [e["error_id"] for e in statuses]
with pg_client.PostgresClient() as cur:
if data.startDate is None:
@ -465,12 +466,20 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
if data.order is not None:
order = data.order
extra_join = ""
params = {
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if error_ids is not None:
params["error_ids"] = tuple(error_ids)
pg_sub_query.append("error_id IN %(error_ids)s")
@ -478,7 +487,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
pg_sub_query.append("ufe.user_id = %(userId)s")
extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
main_pg_query = f"""\
SELECT error_id,
SELECT full_count,
error_id,
name,
message,
users,
@ -486,20 +496,23 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
last_occurrence,
first_occurrence,
chart
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
{extra_join}
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
FROM (SELECT error_id,
name,
message,
COUNT(DISTINCT user_uuid) AS users,
COUNT(DISTINCT session_id) AS sessions,
MAX(timestamp) AS max_datetime,
MIN(timestamp) AS min_datetime
FROM events.errors
INNER JOIN public.errors AS pe USING (error_id)
INNER JOIN public.sessions USING (session_id)
{extra_join}
WHERE {" AND ".join(pg_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}) AS details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
) AS details
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
MIN(timestamp) AS first_occurrence
FROM events.errors
@ -517,16 +530,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
# print("--------------------")
# print(cur.mogrify(main_pg_query, params))
# print("--------------------")
cur.execute(cur.mogrify(main_pg_query, params))
total = cur.rowcount
rows = cur.fetchall()
total = 0 if len(rows) == 0 else rows[0]["full_count"]
if flows:
return {"data": {"count": total}}
row = cur.fetchone()
rows = []
limit = 200
while row is not None and len(rows) < limit:
rows.append(row)
row = cur.fetchone()
if total == 0:
rows = []
else:
@ -552,6 +563,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
}
for r in rows:
r.pop("full_count")
if r["error_id"] in statuses:
r["status"] = statuses[r["error_id"]]["status"]
r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"]

View file

@ -168,10 +168,11 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
@dev.timed
def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, favorite_only=False, errors_only=False,
def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status="ALL", count_only=False, issue=None):
full_args, query_part, sort = search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id,
user_id)
full_args, query_part, sort = search_query_parts(data=data, error_status=error_status, errors_only=errors_only,
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id)
if data.limit is not None and data.page is not None:
full_args["sessions_limit_s"] = (data.page - 1) * data.limit
full_args["sessions_limit_e"] = data.page * data.limit
@ -379,8 +380,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
fav_only_join = ""
if favorite_only and not errors_only:
fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id"
extra_constraints.append("fs.user_id = %(userId)s")
full_args["userId"] = user_id
# extra_constraints.append("fs.user_id = %(userId)s")
events_query_part = ""
if len(data.filters) > 0:
meta_keys = None
@ -976,7 +976,12 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
extra_constraints.append("ufe.user_id = %(userId)s")
# extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints]
if not favorite_only and not errors_only and user_id is not None:
if favorite_only and not errors_only and user_id is not None:
extra_from += """INNER JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions
USING (session_id)"""
elif not favorite_only and not errors_only and user_id is not None:
extra_from += """LEFT JOIN (SELECT user_id, session_id
FROM public.user_favorite_sessions
WHERE user_id = %(userId)s) AS favorite_sessions

View file

@ -126,7 +126,7 @@ def events_search(projectId: int, q: str,
@app.post('/{projectId}/sessions/search2', tags=["sessions"])
def sessions_search2(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search2_pg(data, projectId, user_id=context.user_id)
data = sessions.search2_pg(data=data, project_id=projectId, user_id=context.user_id)
return {'data': data}

View file

@ -605,6 +605,7 @@ class SessionsSearchPayloadSchema(BaseModel):
group_by_user: bool = Field(default=False)
limit: int = Field(default=200, gt=0, le=200)
page: int = Field(default=1, gt=0)
bookmarked: bool = Field(default=False)
class Config:
alias_generator = attribute_to_camel_case

View file

@ -483,13 +483,18 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
order = "DESC"
if data.order is not None:
order = data.order
extra_join = ""
params = {
"startDate": data.startDate,
"endDate": data.endDate,
"project_id": project_id,
"userId": user_id,
"step_size": step_size}
if data.limit is not None and data.page is not None:
params["errors_offset"] = (data.page - 1) * data.limit
params["errors_limit"] = data.limit
else:
params["errors_offset"] = 0
params["errors_limit"] = 200
if favorite_only:
cur.execute(cur.mogrify(f"""SELECT error_id
FROM public.user_favorite_errors
@ -531,9 +536,10 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
WHERE {" AND ".join(ch_sub_query)}
GROUP BY error_id, name, message
ORDER BY {sort} {order}
LIMIT 200) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
FROM errors
GROUP BY error_id) AS time_details
LIMIT %(errors_limit)s OFFSET %(errors_offset)s) AS details
INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
FROM errors
GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id
INNER JOIN (SELECT error_id, groupArray([timestamp, count]) AS chart
FROM (SELECT error_id, toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size)s second)) * 1000 AS timestamp,
@ -544,8 +550,10 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id, flows=False, s
ORDER BY timestamp) AS sub_table
GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;"""
# print("--------------------")
# print(main_ch_query % params)
# print("------------")
# print(ch.client().substitute_params(main_ch_query, params))
# print("------------")
rows = ch.execute(query=main_ch_query, params=params)
if len(statuses) == 0:
query = cur.mogrify(

View file

@ -6,9 +6,19 @@ import { connect } from 'react-redux';
import { setActiveStages } from 'Duck/funnels';
import { Styles } from '../../Dashboard/Widgets/common';
import { numberWithCommas } from 'App/utils'
import { truncate } from 'App/utils'
const MIN_BAR_HEIGHT = 20;
function CustomTick(props) {
const { x, y, payload } = props;
return (
<g transform={`translate(${x},${y})`}>
<text x={0} y={0} dy={16} fontSize={12} textAnchor="middle" fill="#666">{payload.value}</text>
</g>
);
}
function FunnelGraph(props) {
const { data, activeStages, funnelId, liveFilters } = props;
const [activeIndex, setActiveIndex] = useState(activeStages)
@ -118,13 +128,29 @@ function FunnelGraph(props) {
)
}
const CustomTooltip = ({ active, payload, msg = '' }) => {
const CustomTooltip = (props) => {
const { payload } = props;
if (payload.length === 0) return null;
const { value, headerText } = payload[0].payload;
// const value = payload[0].payload.value;
if (!value) return null;
return (
<div className="rounded border bg-white p-2">
<p className="text-sm">{msg}</p>
<div className="rounded border bg-white p-2">
<div>{headerText}</div>
{value.map(i => (
<div className="text-sm ml-2">{truncate(i, 30)}</div>
))}
</div>
);
)
};
// const CustomTooltip = ({ active, payload, msg = '' }) => {
// return (
// <div className="rounded border bg-white p-2">
// <p className="text-sm">{msg}</p>
// </div>
// );
// };
const TEMP = {}
@ -152,7 +178,9 @@ function FunnelGraph(props) {
background={'transparent'}
>
<CartesianGrid strokeDasharray="1 3" stroke="#BBB" vertical={false} />
{activeStages.length < 2 && <Tooltip cursor={{ fill: 'transparent' }} content={<CustomTooltip msg={activeStages.length > 0 ? 'Select one more event.' : 'Select any two events to analyze in depth.'} />} />}
{/* {activeStages.length < 2 && <Tooltip cursor={{ fill: 'transparent' }} content={<CustomTooltip msg={activeStages.length > 0 ? 'Select one more event.' : 'Select any two events to analyze in depth.'} />} />} */}
<Tooltip cursor={{ fill: 'transparent' }} content={CustomTooltip} />
<Bar
dataKey="sessionsCount"
onClick={handleClick}
@ -210,7 +238,8 @@ function FunnelGraph(props) {
dataKey="label"
strokeWidth={0}
interval={0}
tick ={{ fill: '#666', fontSize: 12 }}
// tick ={{ fill: '#666', fontSize: 12 }}
tick={<CustomTick />}
xAxisId={0}
/>
{/* <XAxis

View file

@ -14,7 +14,10 @@ const getRedableName = ({ type, value, operator }) => {
break;
case "INPUT":
str = 'Entered';
break;
break;
case "CUSTOM":
str = 'Custom Event';
break;
}
return `${str} ${operator}`;
@ -52,7 +55,7 @@ export default Record({
},
fromJS: ({ stages = [], filter, activeStages = null, ...rest }) => {
let _stages = stages.map((stage, index) => {
// stage.label = getRedableName(stage.type, stage.value);
stage.headerText = getRedableName(stage.type, stage.value);
stage.label = `Step ${index + 1}`;
return stage;
});
@ -73,7 +76,7 @@ export default Record({
...rest,
stages: _stages.length > 0 ? _stages.map((stage, index) => {
if (!stage) return;
// stage.label = getRedableName(stage);
stage.headerText = getRedableName(stage);
stage.label = `Step ${index + 1}`;
return stage;
}) : [],