Dev (#2311)
* refactor(chalice): upgraded dependencies * refactor(chalice): upgraded dependencies feat(chalice): support heatmaps * feat(chalice): support table-of-browsers showing user-count * feat(chalice): support table-of-devices showing user-count * feat(chalice): support table-of-URLs showing user-count * feat(chalice): get heatmap by sessionId refactor(chalice): updated dependencies refactor(alerts): updated dependencies refactor(crons): updated dependencies
This commit is contained in:
parent
a8d5857e0f
commit
b6ba3aceb9
12 changed files with 91 additions and 14 deletions
|
|
@ -6,18 +6,18 @@ name = "pypi"
|
||||||
[packages]
|
[packages]
|
||||||
urllib3 = "==1.26.16"
|
urllib3 = "==1.26.16"
|
||||||
requests = "==2.32.3"
|
requests = "==2.32.3"
|
||||||
boto3 = "==1.34.125"
|
boto3 = "==1.34.134"
|
||||||
pyjwt = "==2.8.0"
|
pyjwt = "==2.8.0"
|
||||||
psycopg2-binary = "==2.9.9"
|
psycopg2-binary = "==2.9.9"
|
||||||
psycopg = {extras = ["binary", "pool"], version = "==3.1.19"}
|
|
||||||
elasticsearch = "==8.14.0"
|
elasticsearch = "==8.14.0"
|
||||||
jira = "==3.8.0"
|
jira = "==3.8.0"
|
||||||
fastapi = "==0.111.0"
|
fastapi = "==0.111.0"
|
||||||
uvicorn = {extras = ["standard"], version = "==0.30.1"}
|
|
||||||
python-decouple = "==3.8"
|
python-decouple = "==3.8"
|
||||||
pydantic = {extras = ["email"], version = "==2.3.0"}
|
|
||||||
apscheduler = "==3.10.4"
|
apscheduler = "==3.10.4"
|
||||||
redis = "==5.1.0b6"
|
redis = "==5.1.0b6"
|
||||||
|
psycopg = {extras = ["binary", "pool"], version = "==3.1.19"}
|
||||||
|
uvicorn = {extras = ["standard"], version = "==0.30.1"}
|
||||||
|
pydantic = {extras = ["email"], version = "==2.3.0"}
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -80,6 +80,34 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
|
||||||
return helper.list_to_camel_case(rows)
|
return helper.list_to_camel_case(rows)
|
||||||
|
|
||||||
|
|
||||||
|
def get_by_url_and_session_id(project_id, session_id, data: schemas.GetHeatmapBasePayloadSchema):
|
||||||
|
args = {"session_id": session_id, "url": data.url}
|
||||||
|
constraints = ["session_id = %(session_id)s",
|
||||||
|
"(url = %(url)s OR path= %(url)s)",
|
||||||
|
"normalized_x IS NOT NULL"]
|
||||||
|
query_from = "events.clicks"
|
||||||
|
|
||||||
|
with pg_client.PostgresClient() as cur:
|
||||||
|
query = cur.mogrify(f"""SELECT normalized_x, normalized_y
|
||||||
|
FROM {query_from}
|
||||||
|
WHERE {" AND ".join(constraints)};""", args)
|
||||||
|
logger.debug("---------")
|
||||||
|
logger.debug(query.decode('UTF-8'))
|
||||||
|
logger.debug("---------")
|
||||||
|
try:
|
||||||
|
cur.execute(query)
|
||||||
|
except Exception as err:
|
||||||
|
logger.warning("--------- HEATMAP-session_id SEARCH QUERY EXCEPTION -----------")
|
||||||
|
logger.warning(query.decode('UTF-8'))
|
||||||
|
logger.warning("--------- PAYLOAD -----------")
|
||||||
|
logger.warning(data)
|
||||||
|
logger.warning("--------------------")
|
||||||
|
raise err
|
||||||
|
rows = cur.fetchall()
|
||||||
|
|
||||||
|
return helper.list_to_camel_case(rows)
|
||||||
|
|
||||||
|
|
||||||
SESSION_PROJECTION_COLS = """s.project_id,
|
SESSION_PROJECTION_COLS = """s.project_id,
|
||||||
s.session_id::text AS session_id,
|
s.session_id::text AS session_id,
|
||||||
s.user_uuid,
|
s.user_uuid,
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
# Keep this version to not have conflicts between requests and boto3
|
# Keep this version to not have conflicts between requests and boto3
|
||||||
urllib3==1.26.16
|
urllib3==1.26.16
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
boto3==1.34.125
|
boto3==1.34.134
|
||||||
pyjwt==2.8.0
|
pyjwt==2.8.0
|
||||||
psycopg2-binary==2.9.9
|
psycopg2-binary==2.9.9
|
||||||
psycopg[pool,binary]==3.1.19
|
psycopg[pool,binary]==3.1.19
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
# Keep this version to not have conflicts between requests and boto3
|
# Keep this version to not have conflicts between requests and boto3
|
||||||
urllib3==1.26.16
|
urllib3==1.26.16
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
boto3==1.34.125
|
boto3==1.34.134
|
||||||
pyjwt==2.8.0
|
pyjwt==2.8.0
|
||||||
psycopg2-binary==2.9.9
|
psycopg2-binary==2.9.9
|
||||||
psycopg[pool,binary]==3.1.19
|
psycopg[pool,binary]==3.1.19
|
||||||
|
|
|
||||||
|
|
@ -417,6 +417,13 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema =
|
||||||
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
|
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/{projectId}/sessions/{sessionId}/heatmaps/url', tags=["heatmaps"])
|
||||||
|
def get_heatmaps_by_session_id_url(projectId: int, sessionId: int,
|
||||||
|
data: schemas.GetHeatmapBasePayloadSchema = Body(...),
|
||||||
|
context: schemas.CurrentContext = Depends(OR_context)):
|
||||||
|
return {"data": heatmaps.get_by_url_and_session_id(project_id=projectId, session_id=sessionId, data=data)}
|
||||||
|
|
||||||
|
|
||||||
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"])
|
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"])
|
||||||
def add_remove_favorite_session2(projectId: int, sessionId: int,
|
def add_remove_favorite_session2(projectId: int, sessionId: int,
|
||||||
context: schemas.CurrentContext = Depends(OR_context)):
|
context: schemas.CurrentContext = Depends(OR_context)):
|
||||||
|
|
|
||||||
|
|
@ -1560,6 +1560,10 @@ class GetHeatmapPayloadSchema(_TimedSchema):
|
||||||
click_rage: bool = Field(default=False)
|
click_rage: bool = Field(default=False)
|
||||||
|
|
||||||
|
|
||||||
|
class GetHeatmapBasePayloadSchema(BaseModel):
|
||||||
|
url: str = Field(...)
|
||||||
|
|
||||||
|
|
||||||
class FeatureFlagVariant(BaseModel):
|
class FeatureFlagVariant(BaseModel):
|
||||||
variant_id: Optional[int] = Field(default=None)
|
variant_id: Optional[int] = Field(default=None)
|
||||||
value: str = Field(...)
|
value: str = Field(...)
|
||||||
|
|
|
||||||
|
|
@ -6,22 +6,22 @@ name = "pypi"
|
||||||
[packages]
|
[packages]
|
||||||
urllib3 = "==1.26.16"
|
urllib3 = "==1.26.16"
|
||||||
requests = "==2.32.3"
|
requests = "==2.32.3"
|
||||||
boto3 = "==1.34.125"
|
boto3 = "==1.34.134"
|
||||||
pyjwt = "==2.8.0"
|
pyjwt = "==2.8.0"
|
||||||
psycopg2-binary = "==2.9.9"
|
psycopg2-binary = "==2.9.9"
|
||||||
psycopg = {extras = ["binary", "pool"], version = "==3.1.19"}
|
|
||||||
elasticsearch = "==8.14.0"
|
elasticsearch = "==8.14.0"
|
||||||
jira = "==3.8.0"
|
jira = "==3.8.0"
|
||||||
fastapi = "==0.111.0"
|
fastapi = "==0.111.0"
|
||||||
uvicorn = {extras = ["standard"], version = "==0.30.1"}
|
|
||||||
gunicorn = "==22.0.0"
|
gunicorn = "==22.0.0"
|
||||||
python-decouple = "==3.8"
|
python-decouple = "==3.8"
|
||||||
pydantic = {extras = ["email"], version = "==2.3.0"}
|
|
||||||
apscheduler = "==3.10.4"
|
apscheduler = "==3.10.4"
|
||||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.8"}
|
|
||||||
python3-saml = "==1.16.0"
|
python3-saml = "==1.16.0"
|
||||||
redis = "==5.1.0b6"
|
redis = "==5.1.0b6"
|
||||||
azure-storage-blob = "==12.21.0b1"
|
azure-storage-blob = "==12.21.0b1"
|
||||||
|
psycopg = {extras = ["pool", "binary"], version = "==3.1.19"}
|
||||||
|
uvicorn = {extras = ["standard"], version = "==0.30.1"}
|
||||||
|
pydantic = {extras = ["email"], version = "==2.3.0"}
|
||||||
|
clickhouse-driver = {extras = ["lz4"], version = "==0.2.8"}
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -90,6 +90,36 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
|
||||||
return helper.list_to_camel_case(rows)
|
return helper.list_to_camel_case(rows)
|
||||||
|
|
||||||
|
|
||||||
|
def get_by_url_and_session_id(project_id, session_id, data: schemas.GetHeatmapBasePayloadSchema):
|
||||||
|
args = {"project_id": project_id, "session_id": session_id, "url": data.url}
|
||||||
|
constraints = ["main_events.project_id = toUInt16(%(project_id)s)",
|
||||||
|
"main_events.session_id = %(session_id)s",
|
||||||
|
"(main_events.url_hostpath = %(url)s OR main_events.url_path = %(url)s)",
|
||||||
|
"main_events.event_type='CLICK'",
|
||||||
|
"isNotNull(main_events.normalized_x)"]
|
||||||
|
query_from = f"{exp_ch_helper.get_main_events_table(0)} AS main_events"
|
||||||
|
|
||||||
|
with ch_client.ClickHouseClient() as cur:
|
||||||
|
query = cur.format(f"""SELECT main_events.normalized_x AS normalized_x,
|
||||||
|
main_events.normalized_y AS normalized_y
|
||||||
|
FROM {query_from}
|
||||||
|
WHERE {" AND ".join(constraints)};""", args)
|
||||||
|
logger.debug("---------")
|
||||||
|
logger.debug(query)
|
||||||
|
logger.debug("---------")
|
||||||
|
try:
|
||||||
|
rows = cur.execute(query)
|
||||||
|
except Exception as err:
|
||||||
|
logger.warning("--------- HEATMAP-session_id SEARCH QUERY EXCEPTION CH -----------")
|
||||||
|
logger.warning(query)
|
||||||
|
logger.warning("--------- PAYLOAD -----------")
|
||||||
|
logger.warning(data)
|
||||||
|
logger.warning("--------------------")
|
||||||
|
raise err
|
||||||
|
|
||||||
|
return helper.list_to_camel_case(rows)
|
||||||
|
|
||||||
|
|
||||||
if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||||
# this part is identical to FOSS
|
# this part is identical to FOSS
|
||||||
SESSION_PROJECTION_COLS = """s.project_id,
|
SESSION_PROJECTION_COLS = """s.project_id,
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
# Keep this version to not have conflicts between requests and boto3
|
# Keep this version to not have conflicts between requests and boto3
|
||||||
urllib3==1.26.16
|
urllib3==1.26.16
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
boto3==1.34.125
|
boto3==1.34.134
|
||||||
pyjwt==2.8.0
|
pyjwt==2.8.0
|
||||||
psycopg2-binary==2.9.9
|
psycopg2-binary==2.9.9
|
||||||
psycopg[pool,binary]==3.1.19
|
psycopg[pool,binary]==3.1.19
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
# Keep this version to not have conflicts between requests and boto3
|
# Keep this version to not have conflicts between requests and boto3
|
||||||
urllib3==1.26.16
|
urllib3==1.26.16
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
boto3==1.34.125
|
boto3==1.34.134
|
||||||
pyjwt==2.8.0
|
pyjwt==2.8.0
|
||||||
psycopg2-binary==2.9.9
|
psycopg2-binary==2.9.9
|
||||||
psycopg[pool,binary]==3.1.19
|
psycopg[pool,binary]==3.1.19
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
# Keep this version to not have conflicts between requests and boto3
|
# Keep this version to not have conflicts between requests and boto3
|
||||||
urllib3==1.26.16
|
urllib3==1.26.16
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
boto3==1.34.125
|
boto3==1.34.134
|
||||||
pyjwt==2.8.0
|
pyjwt==2.8.0
|
||||||
psycopg2-binary==2.9.9
|
psycopg2-binary==2.9.9
|
||||||
psycopg[pool,binary]==3.1.19
|
psycopg[pool,binary]==3.1.19
|
||||||
|
|
|
||||||
|
|
@ -446,6 +446,14 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema =
|
||||||
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
|
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post('/{projectId}/sessions/{sessionId}/heatmaps/url', tags=["heatmaps"],
|
||||||
|
dependencies=[OR_scope(Permissions.session_replay)])
|
||||||
|
def get_heatmaps_by_session_id_url(projectId: int, sessionId: int,
|
||||||
|
data: schemas.GetHeatmapBasePayloadSchema = Body(...),
|
||||||
|
context: schemas.CurrentContext = Depends(OR_context)):
|
||||||
|
return {"data": heatmaps.get_by_url_and_session_id(project_id=projectId, session_id=sessionId, data=data)}
|
||||||
|
|
||||||
|
|
||||||
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"],
|
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"],
|
||||||
dependencies=[OR_scope(Permissions.session_replay)])
|
dependencies=[OR_scope(Permissions.session_replay)])
|
||||||
def add_remove_favorite_session2(projectId: int, sessionId: int,
|
def add_remove_favorite_session2(projectId: int, sessionId: int,
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue