Dev (#2420)
* refactor(chalice): upgraded dependencies * refactor(chalice): upgraded dependencies feat(chalice): support heatmaps * feat(chalice): support table-of-browsers showing user-count * feat(chalice): support table-of-devices showing user-count * feat(chalice): support table-of-URLs showing user-count * fix(chalice): fixed Math-operators validation refactor(chalice): search for sessions that have events for heatmaps * refactor(chalice): search for sessions that have at least 1 location event for heatmaps * refactor(chalice): upgraded dependencies * refactor(chalice): upgraded dependencies feat(chalice): support heatmaps * feat(chalice): support table-of-browsers showing user-count * feat(chalice): support table-of-devices showing user-count * feat(chalice): support table-of-URLs showing user-count * fix(chalice): fixed Math-operators validation refactor(chalice): search for sessions that have events for heatmaps * refactor(chalice): search for sessions that have at least 1 location event for heatmaps * refactor(chalice): refactored search sessions hooks * refactor(chalice): refactored schemas * refactor(chalice): refactored schemas refactor(chalice): cleaned scripts feat(chalice): search sessions by CSS selector (PG) * refactor(chalice): upgraded dependencies refactor(crons): upgraded dependencies refactor(alerts): upgraded dependencies * feat(chalice): search by selectors * feat(chalice): get top 10 values for autocomplete CH * refactor(chalice): cleaned code refactor(chalice): upgraded dependencies refactor(alerts): upgraded dependencies refactor(crons): upgraded dependencies
This commit is contained in:
parent
420d8c43b1
commit
d05ddebeb9
9 changed files with 104 additions and 63 deletions
|
|
@ -236,47 +236,6 @@ def get_keys_by_projects(project_ids):
|
|||
return results
|
||||
|
||||
|
||||
# def add_edit_delete(tenant_id, project_id, new_metas):
|
||||
# old_metas = get(project_id)
|
||||
# old_indexes = [k["index"] for k in old_metas]
|
||||
# new_indexes = [k["index"] for k in new_metas if "index" in k]
|
||||
# new_keys = [k["key"] for k in new_metas]
|
||||
#
|
||||
# add_metas = [k["key"] for k in new_metas
|
||||
# if "index" not in k]
|
||||
# new_metas = {k["index"]: {"key": k["key"]} for
|
||||
# k in new_metas if
|
||||
# "index" in k}
|
||||
# old_metas = {k["index"]: {"key": k["key"]} for k in old_metas}
|
||||
#
|
||||
# if len(new_keys) > 20:
|
||||
# return {"errors": ["you cannot add more than 20 key"]}
|
||||
# for k in new_metas.keys():
|
||||
# if re.match(regex, new_metas[k]["key"]) is None:
|
||||
# return {"errors": [f"invalid key {k}"]}
|
||||
# for k in add_metas:
|
||||
# if re.match(regex, k) is None:
|
||||
# return {"errors": [f"invalid key {k}"]}
|
||||
# if len(new_indexes) > len(set(new_indexes)):
|
||||
# return {"errors": ["duplicate indexes"]}
|
||||
# if len(new_keys) > len(set(new_keys)):
|
||||
# return {"errors": ["duplicate keys"]}
|
||||
# to_delete = list(set(old_indexes) - set(new_indexes))
|
||||
#
|
||||
# with pg_client.PostgresClient() as cur:
|
||||
# for d in to_delete:
|
||||
# delete(tenant_id=tenant_id, project_id=project_id, index=d)
|
||||
#
|
||||
# for k in add_metas:
|
||||
# add(tenant_id=tenant_id, project_id=project_id, new_name=k)
|
||||
#
|
||||
# for k in new_metas.keys():
|
||||
# if new_metas[k]["key"].lower() != old_metas[k]["key"]:
|
||||
# edit(tenant_id=tenant_id, project_id=project_id, index=k, new_name=new_metas[k]["key"])
|
||||
#
|
||||
# return {"data": get(project_id)}
|
||||
|
||||
|
||||
def get_remaining_metadata_with_count(tenant_id):
|
||||
all_projects = projects.get_projects(tenant_id=tenant_id)
|
||||
results = []
|
||||
|
|
@ -290,3 +249,15 @@ def get_remaining_metadata_with_count(tenant_id):
|
|||
{**p, "limit": MAX_INDEXES, "remaining": remaining, "count": len(used_metas[str(p["projectId"])])})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_colname_by_key(project_id, key):
|
||||
if key is None or len(key) == 0:
|
||||
return None
|
||||
|
||||
meta_keys = get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys if m["key"] == key}
|
||||
if len(meta_keys) == 0:
|
||||
return None
|
||||
|
||||
return index_to_colname(meta_keys[key])
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
requests==2.32.3
|
||||
boto3==1.34.145
|
||||
boto3==1.34.147
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.9
|
||||
psycopg[pool,binary]==3.2.1
|
||||
|
|
@ -11,7 +11,7 @@ jira==3.8.0
|
|||
|
||||
|
||||
fastapi==0.111.1
|
||||
uvicorn[standard]==0.30.1
|
||||
uvicorn[standard]==0.30.3
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.3.0
|
||||
apscheduler==3.10.4
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
requests==2.32.3
|
||||
boto3==1.34.145
|
||||
boto3==1.34.147
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.9
|
||||
psycopg[pool,binary]==3.2.1
|
||||
|
|
@ -11,7 +11,7 @@ jira==3.8.0
|
|||
|
||||
|
||||
fastapi==0.111.1
|
||||
uvicorn[standard]==0.30.1
|
||||
uvicorn[standard]==0.30.3
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.3.0
|
||||
apscheduler==3.10.4
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Union
|
||||
from typing import Union, Optional
|
||||
|
||||
from decouple import config
|
||||
from fastapi import Depends, Body, BackgroundTasks
|
||||
|
|
@ -10,7 +10,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig
|
|||
log_tool_stackdriver, reset_password, log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, sessions, \
|
||||
log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \
|
||||
assist, mobile, tenants, boarding, notifications, webhook, users, \
|
||||
custom_metrics, saved_search, integrations_global, tags
|
||||
custom_metrics, saved_search, integrations_global, tags, autocomplete
|
||||
from chalicelib.core.collaboration_msteams import MSTeams
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from or_dependencies import OR_context, OR_role
|
||||
|
|
@ -21,17 +21,19 @@ public_app, app, app_apikey = get_routers()
|
|||
|
||||
@app.get('/{projectId}/autocomplete', tags=["autocomplete"])
|
||||
@app.get('/{projectId}/events/search', tags=["events"])
|
||||
def events_search(projectId: int, q: str,
|
||||
def events_search(projectId: int, q: Optional[str] = None,
|
||||
type: Union[schemas.FilterType, schemas.EventType,
|
||||
schemas.PerformanceEventType, schemas.FetchFilterType,
|
||||
schemas.GraphqlFilterType, str] = None,
|
||||
key: str = None, source: str = None, live: bool = False,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if len(q) == 0 and not type:
|
||||
if type and (not q or len(q) == 0) \
|
||||
and (schemas.FilterType.has_value(type) or schemas.EventType.has_value(type)):
|
||||
# TODO: check if type is a valid value for autocomplete
|
||||
return autocomplete.get_top_values(project_id=projectId, event_type=type,event_key=key)
|
||||
elif (not q or len(q) == 0) and not type:
|
||||
return {"data": []}
|
||||
elif type:
|
||||
# TODO: return to values related to type
|
||||
pass
|
||||
|
||||
if live:
|
||||
return assist.autocomplete(project_id=projectId, q=q,
|
||||
key=key if key is not None else type)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ name = "pypi"
|
|||
[packages]
|
||||
urllib3 = "==1.26.16"
|
||||
requests = "==2.32.3"
|
||||
boto3 = "==1.34.145"
|
||||
boto3 = "==1.34.147"
|
||||
pyjwt = "==2.8.0"
|
||||
psycopg2-binary = "==2.9.9"
|
||||
elasticsearch = "==8.14.0"
|
||||
|
|
@ -17,9 +17,9 @@ python-decouple = "==3.8"
|
|||
apscheduler = "==3.10.4"
|
||||
python3-saml = "==1.16.0"
|
||||
redis = "==5.1.0b6"
|
||||
azure-storage-blob = "==12.21.0b1"
|
||||
azure-storage-blob = "==12.21.0"
|
||||
psycopg = {extras = ["binary", "pool"], version = "==3.2.1"}
|
||||
uvicorn = {extras = ["standard"], version = "==0.30.1"}
|
||||
uvicorn = {extras = ["standard"], version = "==0.30.3"}
|
||||
pydantic = {extras = ["email"], version = "==2.3.0"}
|
||||
clickhouse-driver = {extras = ["lz4"], version = "==0.2.8"}
|
||||
|
||||
|
|
|
|||
|
|
@ -260,3 +260,71 @@ def __search_metadata(project_id, value, key=None, source=None):
|
|||
"svalue": helper.string_to_sql_like("^" + value)})
|
||||
results = cur.execute(query)
|
||||
return helper.list_to_camel_case(results)
|
||||
|
||||
|
||||
TYPE_TO_COLUMN = {
|
||||
schemas.EventType.CLICK: "label",
|
||||
schemas.EventType.INPUT: "label",
|
||||
schemas.EventType.LOCATION: "url_path",
|
||||
schemas.EventType.CUSTOM: "name",
|
||||
schemas.EventType.REQUEST: "url_path",
|
||||
schemas.EventType.GRAPHQL: "name",
|
||||
schemas.EventType.STATE_ACTION: "name",
|
||||
# For ERROR, sessions search is happening over name OR message,
|
||||
# for simplicity top 10 is using name only
|
||||
schemas.EventType.ERROR: "name",
|
||||
schemas.FilterType.USER_COUNTRY: "user_country",
|
||||
schemas.FilterType.USER_CITY: "user_city",
|
||||
schemas.FilterType.USER_STATE: "user_state",
|
||||
schemas.FilterType.USER_ID: "user_id",
|
||||
schemas.FilterType.USER_ANONYMOUS_ID: "user_anonymous_id",
|
||||
schemas.FilterType.USER_OS: "user_os",
|
||||
schemas.FilterType.USER_BROWSER: "user_browser",
|
||||
schemas.FilterType.USER_DEVICE: "user_device",
|
||||
schemas.FilterType.PLATFORM: "platform",
|
||||
schemas.FilterType.REV_ID: "rev_id",
|
||||
schemas.FilterType.REFERRER: "referrer",
|
||||
schemas.FilterType.UTM_SOURCE: "utm_source",
|
||||
schemas.FilterType.UTM_MEDIUM: "utm_medium",
|
||||
schemas.FilterType.UTM_CAMPAIGN: "utm_campaign",
|
||||
}
|
||||
|
||||
|
||||
def get_top_values(project_id, event_type, event_key=None):
|
||||
with ch_client.ClickHouseClient() as cur:
|
||||
if schemas.FilterType.has_value(event_type):
|
||||
if event_type == schemas.FilterType.METADATA \
|
||||
and (event_key is None \
|
||||
or (colname := metadata.get_colname_by_key(project_id=project_id, key=event_key)) is None) \
|
||||
or event_type != schemas.FilterType.METADATA \
|
||||
and (colname := TYPE_TO_COLUMN.get(event_type)) is None:
|
||||
return []
|
||||
|
||||
query = f"""WITH raw AS (SELECT DISTINCT {colname} AS c_value,
|
||||
COUNT(1) OVER (PARTITION BY {colname}) AS row_count,
|
||||
COUNT(1) OVER () AS total_count
|
||||
FROM experimental.sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND isNotNull(c_value)
|
||||
AND notEmpty(c_value)
|
||||
ORDER BY row_count DESC
|
||||
LIMIT 10)
|
||||
SELECT c_value AS value, row_count, truncate(row_count * 100 / total_count, 2) AS row_percentage
|
||||
FROM raw;"""
|
||||
else:
|
||||
colname = TYPE_TO_COLUMN.get(event_type)
|
||||
query = f"""WITH raw AS (SELECT DISTINCT {colname} AS c_value,
|
||||
COUNT(1) OVER (PARTITION BY c_value) AS row_count,
|
||||
COUNT(1) OVER () AS total_count
|
||||
FROM experimental.events
|
||||
WHERE project_id = %(project_id)s
|
||||
AND event_type = '{event_type.upper()}'
|
||||
AND isNotNull(c_value)
|
||||
AND notEmpty(c_value)
|
||||
ORDER BY row_count DESC
|
||||
LIMIT 10)
|
||||
SELECT c_value AS value, row_count, truncate(row_count * 100 / total_count,2) AS row_percentage
|
||||
FROM raw;"""
|
||||
params = {"project_id": project_id}
|
||||
results = cur.execute(query=query, params=params)
|
||||
return helper.list_to_camel_case(results)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
requests==2.32.3
|
||||
boto3==1.34.145
|
||||
boto3==1.34.147
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.9
|
||||
psycopg[pool,binary]==3.2.1
|
||||
|
|
@ -11,10 +11,10 @@ jira==3.8.0
|
|||
|
||||
|
||||
fastapi==0.111.1
|
||||
uvicorn[standard]==0.30.1
|
||||
uvicorn[standard]==0.30.3
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.3.0
|
||||
apscheduler==3.10.4
|
||||
|
||||
clickhouse-driver[lz4]==0.2.8
|
||||
azure-storage-blob==12.21.0b1
|
||||
azure-storage-blob==12.21.0
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
requests==2.32.3
|
||||
boto3==1.34.145
|
||||
boto3==1.34.147
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.9
|
||||
psycopg[pool,binary]==3.2.1
|
||||
|
|
@ -17,4 +17,4 @@ apscheduler==3.10.4
|
|||
|
||||
clickhouse-driver[lz4]==0.2.8
|
||||
redis==5.1.0b6
|
||||
azure-storage-blob==12.21.0b1
|
||||
azure-storage-blob==12.21.0
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Keep this version to not have conflicts between requests and boto3
|
||||
urllib3==1.26.16
|
||||
requests==2.32.3
|
||||
boto3==1.34.145
|
||||
boto3==1.34.147
|
||||
pyjwt==2.8.0
|
||||
psycopg2-binary==2.9.9
|
||||
psycopg[pool,binary]==3.2.1
|
||||
|
|
@ -11,7 +11,7 @@ jira==3.8.0
|
|||
|
||||
|
||||
fastapi==0.111.1
|
||||
uvicorn[standard]==0.30.1
|
||||
uvicorn[standard]==0.30.3
|
||||
gunicorn==22.0.0
|
||||
python-decouple==3.8
|
||||
pydantic[email]==2.3.0
|
||||
|
|
@ -24,4 +24,4 @@ python3-saml==1.16.0 --no-binary=lxml
|
|||
|
||||
redis==5.1.0b6
|
||||
#confluent-kafka==2.1.0
|
||||
azure-storage-blob==12.21.0b1
|
||||
azure-storage-blob==12.21.0
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue