Merge remote-tracking branch 'origin/api-v1.5.4' into dev

This commit is contained in:
Taha Yassine Kraiem 2022-03-21 15:38:47 +01:00
commit 43d3a6a1a4
10 changed files with 188 additions and 37 deletions

View file

@ -69,12 +69,12 @@ def get_live_sessions_ws(project_id, user_id=None):
params = {}
if user_id and len(user_id) > 0:
params["userId"] = user_id
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params)
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return []
try:
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}", params)
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return []
live_peers = connected_peers.json().get("data", [])
except Exception as e:
print("issue getting Live-Assist response")
@ -104,12 +104,12 @@ def get_live_session_by_id(project_id, session_id):
def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return False
try:
connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
return False
connected_peers = connected_peers.json().get("data", [])
except Exception as e:
print("issue getting Assist response")

View file

@ -13,7 +13,8 @@ def get_by_session_id(session_id):
header_size,
encoded_body_size,
decoded_body_size,
success
success,
COALESCE(status, CASE WHEN success THEN 200 END) AS status
FROM events.resources
WHERE session_id = %(session_id)s;"""
params = {"session_id": session_id}

View file

@ -251,8 +251,15 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, e
# print("--------------------")
# print(main_query)
# print("--------------------")
cur.execute(main_query)
try:
cur.execute(main_query)
except Exception as err:
print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------")
print(main_query)
print("--------- PAYLOAD -----------")
print(data.dict())
print("--------------------")
raise err
if errors_only:
return helper.list_to_camel_case(cur.fetchall())
@ -387,10 +394,6 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
"s.duration IS NOT NULL"
]
extra_from = ""
fav_only_join = ""
if favorite_only and not errors_only:
fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id"
# extra_constraints.append("fs.user_id = %(userId)s")
events_query_part = ""
if len(data.filters) > 0:
meta_keys = None
@ -628,6 +631,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s",
"main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s",
"ms.start_ts <= %(endDate)s", "ms.duration IS NOT NULL"]
if favorite_only and not errors_only:
event_from += "INNER JOIN public.user_favorite_sessions AS fs USING(session_id)"
event_where.append("fs.user_id = %(userId)s")
else:
event_from = "%s"
event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s",
@ -946,16 +952,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
MIN(timestamp) AS first_event_ts,
MAX(timestamp) AS last_event_ts
FROM ({events_joiner.join(events_query_from)}) AS u
GROUP BY 1
{fav_only_join}"""
GROUP BY 1"""
else:
events_query_part = f"""SELECT
event_0.session_id,
MIN(event_0.timestamp) AS first_event_ts,
MAX(event_{event_index - 1}.timestamp) AS last_event_ts
FROM {events_joiner.join(events_query_from)}
GROUP BY 1
{fav_only_join}"""
GROUP BY 1"""
else:
data.events = []
# ---------------------------------------------------------------------------

View file

@ -5,11 +5,12 @@ import psycopg2.extras
from decouple import config
from psycopg2 import pool
PG_CONFIG = {"host": config("pg_host"),
"database": config("pg_dbname"),
"user": config("pg_user"),
"password": config("pg_password"),
"port": config("pg_port", cast=int)}
_PG_CONFIG = {"host": config("pg_host"),
"database": config("pg_dbname"),
"user": config("pg_user"),
"password": config("pg_password"),
"port": config("pg_port", cast=int)}
PG_CONFIG = dict(_PG_CONFIG)
if config("pg_timeout", cast=int, default=0) > 0:
PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}"
@ -63,7 +64,7 @@ class PostgresClient:
def __init__(self, long_query=False):
self.long_query = long_query
if long_query:
self.connection = psycopg2.connect(**PG_CONFIG)
self.connection = psycopg2.connect(**_PG_CONFIG)
else:
self.connection = postgreSQL_pool.getconn()

View file

@ -11,6 +11,10 @@ def attribute_to_camel_case(snake_str):
return components[0] + ''.join(x.title() for x in components[1:])
def transform_email(email: str) -> str:
return email.lower() if isinstance(email, str) else email
class _Grecaptcha(BaseModel):
g_recaptcha_response: Optional[str] = Field(None, alias='g-recaptcha-response')
@ -18,6 +22,7 @@ class _Grecaptcha(BaseModel):
class UserLoginSchema(_Grecaptcha):
email: EmailStr = Field(...)
password: str = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class UserSignupSchema(UserLoginSchema):
@ -31,17 +36,21 @@ class UserSignupSchema(UserLoginSchema):
class EditUserSchema(BaseModel):
name: Optional[str] = Field(None)
email: Optional[str] = Field(None)
email: Optional[EmailStr] = Field(None)
admin: Optional[bool] = Field(False)
appearance: Optional[dict] = Field({})
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditUserAppearanceSchema(BaseModel):
appearance: dict = Field(...)
class ForgetPasswordPayloadSchema(_Grecaptcha):
email: str = Field(...)
email: EmailStr = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditUserPasswordSchema(BaseModel):
@ -70,7 +79,9 @@ class CurrentAPIContext(BaseModel):
class CurrentContext(CurrentAPIContext):
user_id: int = Field(...)
email: str = Field(...)
email: EmailStr = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class AddSlackSchema(BaseModel):
@ -115,15 +126,19 @@ class CreateEditWebhookSchema(BaseModel):
class CreateMemberSchema(BaseModel):
userId: Optional[int] = Field(None)
name: str = Field(...)
email: str = Field(...)
email: EmailStr = Field(...)
admin: bool = Field(False)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditMemberSchema(BaseModel):
name: str = Field(...)
email: str = Field(...)
email: EmailStr = Field(...)
admin: bool = Field(False)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class EditPasswordByInvitationSchema(BaseModel):
invitation: str = Field(...)
@ -244,6 +259,8 @@ class EmailPayloadSchema(BaseModel):
link: str = Field(...)
message: str = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class MemberInvitationPayloadSchema(BaseModel):
auth: str = Field(...)
@ -252,6 +269,8 @@ class MemberInvitationPayloadSchema(BaseModel):
client_id: str = Field(...)
sender_name: str = Field(...)
_transform_email = validator('email', pre=True, allow_reuse=True)(transform_email)
class Config:
alias_generator = attribute_to_camel_case

View file

@ -7,7 +7,7 @@ def get_by_session_id(session_id):
with ch_client.ClickHouseClient() as ch:
ch_query = """\
SELECT
datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success
datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status
FROM resources
WHERE session_id = toUInt64(%(session_id)s);"""
params = {"session_id": session_id}

View file

@ -6,6 +6,70 @@ SELECT 'v1.5.4-ee'
$$ LANGUAGE sql IMMUTABLE;
-- to detect duplicate users and delete them if possible
DO
$$
DECLARE
duplicate RECORD;
BEGIN
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'duplicate users detected';
FOR duplicate IN SELECT user_id, email, deleted_at, verified_email, jwt_iat
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
IF duplicate.deleted_at IS NOT NULL OR duplicate.jwt_iat IS NULL THEN
raise notice 'deleting duplicate user: % %',duplicate.user_id,duplicate.email;
DELETE FROM users WHERE user_id = duplicate.user_id;
END IF;
END LOOP;
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'remaining duplicates, please fix (delete) before finishing update';
FOR duplicate IN SELECT user_id, email
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
raise notice 'user: % %',duplicate.user_id,duplicate.email;
END LOOP;
RAISE 'Duplicate users' USING ERRCODE = '42710';
END IF;
END IF;
END;
$$
LANGUAGE plpgsql;
UPDATE users
SET email=LOWER(email);
DROP INDEX IF EXISTS autocomplete_value_gin_idx;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';

View file

@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.3-ee'
SELECT 'v1.5.4-ee'
$$ LANGUAGE sql IMMUTABLE;
@ -721,7 +721,6 @@ $$
CREATE unique index IF NOT EXISTS autocomplete_unique ON autocomplete (project_id, value, type);
CREATE index IF NOT EXISTS autocomplete_project_id_idx ON autocomplete (project_id);
CREATE INDEX IF NOT EXISTS autocomplete_type_idx ON public.autocomplete (type);
CREATE INDEX IF NOT EXISTS autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';

View file

@ -6,6 +6,70 @@ SELECT 'v1.5.4'
$$ LANGUAGE sql IMMUTABLE;
-- to detect duplicate users and delete them if possible
DO
$$
DECLARE
duplicate RECORD;
BEGIN
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'duplicate users detected';
FOR duplicate IN SELECT user_id, email, deleted_at, verified_email, jwt_iat
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
IF duplicate.deleted_at IS NOT NULL OR duplicate.jwt_iat IS NULL THEN
raise notice 'deleting duplicate user: % %',duplicate.user_id,duplicate.email;
DELETE FROM users WHERE user_id = duplicate.user_id;
END IF;
END LOOP;
IF EXISTS(SELECT user_id
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)) THEN
raise notice 'remaining duplicates, please fix (delete) before finishing update';
FOR duplicate IN SELECT user_id, email
FROM users
WHERE lower(email) =
(SELECT LOWER(email)
FROM users AS su
WHERE LOWER(su.email) = LOWER(users.email)
AND su.user_id != users.user_id
LIMIT 1)
ORDER BY LOWER(email)
LOOP
raise notice 'user: % %',duplicate.user_id,duplicate.email;
END LOOP;
RAISE 'Duplicate users' USING ERRCODE = '42710';
END IF;
END IF;
END;
$$
LANGUAGE plpgsql;
UPDATE users
SET email=LOWER(email);
DROP INDEX IF EXISTS autocomplete_value_gin_idx;
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';

View file

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS events;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.5.3'
SELECT 'v1.5.4'
$$ LANGUAGE sql IMMUTABLE;
-- --- accounts.sql ---
@ -898,7 +898,6 @@ $$
CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type);
CREATE index autocomplete_project_id_idx ON autocomplete (project_id);
CREATE INDEX autocomplete_type_idx ON public.autocomplete (type);
CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
CREATE INDEX autocomplete_value_clickonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CLICK';
CREATE INDEX autocomplete_value_customonly_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops) WHERE type = 'CUSTOM';