commit
4ec3d78bbb
263 changed files with 7960 additions and 2693 deletions
|
|
@ -47,7 +47,7 @@ OpenReplay is a session replay suite you can host yourself, that lets you see wh
|
|||
- **Omni-search:** Search and filter by almost any user action/criteria, session attribute or technical event, so you can answer any question. No instrumentation required.
|
||||
- **Funnels:** For surfacing the most impactful issues causing conversion and revenue loss.
|
||||
- **Fine-grained privacy controls:** Choose what to capture, what to obscure or what to ignore so user data doesn't even reach your servers.
|
||||
- **Plugins oriented:** Get to the root cause even faster by tracking application state (Redux, VueX, MobX, NgRx) and logging GraphQL queries (Apollo, Relay) and Fetch requests.
|
||||
- **Plugins oriented:** Get to the root cause even faster by tracking application state (Redux, VueX, MobX, NgRx, Pinia and Zustand) and logging GraphQL queries (Apollo, Relay) and Fetch/Axios requests.
|
||||
- **Integrations:** Sync your backend logs with your session replays and see what happened front-to-back. OpenReplay supports Sentry, Datadog, CloudWatch, Stackdriver, Elastic and more.
|
||||
|
||||
## Deployment Options
|
||||
|
|
|
|||
|
|
@ -4,3 +4,4 @@
|
|||
**/build.sh
|
||||
**/build_*.sh
|
||||
**/*deploy.sh
|
||||
Dockerfile*
|
||||
|
|
@ -8,6 +8,7 @@ ARG envarg
|
|||
ENV SOURCE_MAP_VERSION=0.7.4 \
|
||||
APP_NAME=chalice \
|
||||
LISTEN_PORT=8000 \
|
||||
MAPPING_WASM=/work/sourcemap-reader/mappings.wasm \
|
||||
ENTERPRISE_BUILD=${envarg}
|
||||
|
||||
ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm /mappings.wasm
|
||||
|
|
@ -20,7 +21,8 @@ RUN cd /work_tmp && npm install
|
|||
|
||||
WORKDIR /work
|
||||
COPY . .
|
||||
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. && chmod 644 /mappings.wasm
|
||||
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. \
|
||||
&& mv /mappings.wasm ${MAPPING_WASM} && chmod 644 ${MAPPING_WASM}
|
||||
|
||||
RUN adduser -u 1001 openreplay -D
|
||||
USER 1001
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
**/build.sh
|
||||
**/build_*.sh
|
||||
**/*deploy.sh
|
||||
Dockerfile*
|
||||
|
||||
app.py
|
||||
entrypoint_alerts.sh
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ from routers.subs import dashboard, insights, metrics, v1_api
|
|||
app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""))
|
||||
app.add_middleware(GZipMiddleware, minimum_size=1000)
|
||||
|
||||
|
||||
@app.middleware('http')
|
||||
async def or_middleware(request: Request, call_next):
|
||||
global OR_SESSION_TOKEN
|
||||
|
|
@ -28,7 +29,9 @@ async def or_middleware(request: Request, call_next):
|
|||
now = int(time.time() * 1000)
|
||||
response: StreamingResponse = await call_next(request)
|
||||
if helper.TRACK_TIME:
|
||||
print(f"Execution time: {int(time.time() * 1000) - now} ms")
|
||||
now = int(time.time() * 1000) - now
|
||||
if now > 500:
|
||||
print(f"Execution time: {now} ms")
|
||||
except Exception as e:
|
||||
pg_client.close()
|
||||
raise e
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ from chalicelib.core import projects
|
|||
from starlette.exceptions import HTTPException
|
||||
from os import access, R_OK
|
||||
|
||||
ASSIST_KEY = config("ASSIST_KEY")
|
||||
ASSIST_URL = config("ASSIST_URL") % ASSIST_KEY
|
||||
SESSION_PROJECTION_COLS = """s.project_id,
|
||||
s.session_id::text AS session_id,
|
||||
s.user_uuid,
|
||||
|
|
@ -47,7 +49,7 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche
|
|||
def __get_live_sessions_ws(project_id, data):
|
||||
project_key = projects.get_project_key(project_id)
|
||||
try:
|
||||
connected_peers = requests.post(config("ASSIST_URL") + config("assist") % config("S3_KEY") + f"/{project_key}",
|
||||
connected_peers = requests.post(ASSIST_URL + config("assist") + f"/{project_key}",
|
||||
json=data, timeout=config("assistTimeout", cast=int, default=5))
|
||||
if connected_peers.status_code != 200:
|
||||
print("!! issue with the peer-server")
|
||||
|
|
@ -78,9 +80,8 @@ def __get_live_sessions_ws(project_id, data):
|
|||
def get_live_session_by_id(project_id, session_id):
|
||||
project_key = projects.get_project_key(project_id)
|
||||
try:
|
||||
connected_peers = requests.get(
|
||||
config("ASSIST_URL") + config("assist") % config("S3_KEY") + f"/{project_key}/{session_id}",
|
||||
timeout=config("assistTimeout", cast=int, default=5))
|
||||
connected_peers = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
|
||||
timeout=config("assistTimeout", cast=int, default=5))
|
||||
if connected_peers.status_code != 200:
|
||||
print("!! issue with the peer-server")
|
||||
print(connected_peers.text)
|
||||
|
|
@ -108,9 +109,8 @@ def is_live(project_id, session_id, project_key=None):
|
|||
if project_key is None:
|
||||
project_key = projects.get_project_key(project_id)
|
||||
try:
|
||||
connected_peers = requests.get(
|
||||
config("ASSIST_URL") + config("assistList") % config("S3_KEY") + f"/{project_key}/{session_id}",
|
||||
timeout=config("assistTimeout", cast=int, default=5))
|
||||
connected_peers = requests.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
|
||||
timeout=config("assistTimeout", cast=int, default=5))
|
||||
if connected_peers.status_code != 200:
|
||||
print("!! issue with the peer-server")
|
||||
print(connected_peers.text)
|
||||
|
|
@ -138,7 +138,7 @@ def autocomplete(project_id, q: str, key: str = None):
|
|||
params["key"] = key
|
||||
try:
|
||||
results = requests.get(
|
||||
config("ASSIST_URL") + config("assistList") % config("S3_KEY") + f"/{project_key}/autocomplete",
|
||||
ASSIST_URL + config("assistList") + f"/{project_key}/autocomplete",
|
||||
params=params, timeout=config("assistTimeout", cast=int, default=5))
|
||||
if results.status_code != 200:
|
||||
print("!! issue with the peer-server")
|
||||
|
|
|
|||
|
|
@ -52,7 +52,6 @@ def __get_autocomplete_table(value, project_id):
|
|||
"c_list": tuple(c_list)
|
||||
})
|
||||
try:
|
||||
print(query)
|
||||
cur.execute(query)
|
||||
except Exception as err:
|
||||
print("--------- AUTOCOMPLETE SEARCH QUERY EXCEPTION -----------")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils import dev
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def get_by_url(project_id, data):
|
||||
|
|
@ -22,8 +21,14 @@ def get_by_url(project_id, data):
|
|||
GROUP BY selector;""",
|
||||
args)
|
||||
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
try:
|
||||
cur.execute(query)
|
||||
except Exception as err:
|
||||
print("--------- HEATMAP SEARCH QUERY EXCEPTION -----------")
|
||||
print(query.decode('UTF-8'))
|
||||
print("--------- PAYLOAD -----------")
|
||||
print(data)
|
||||
print("--------------------")
|
||||
raise err
|
||||
rows = cur.fetchall()
|
||||
return helper.dict_to_camel_case(rows)
|
||||
return helper.dict_to_camel_case(rows)
|
||||
|
|
|
|||
|
|
@ -1632,7 +1632,7 @@ def __get_domains_errors_4xx_and_5xx(status, project_id, startTimestamp=TimeUTC.
|
|||
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=False, chart=True,
|
||||
data=args, main_table="requests", time_column="timestamp", project=False,
|
||||
duration=False)
|
||||
pg_sub_query_subset.append("requests.status/100 = %(status_code)s")
|
||||
pg_sub_query_subset.append("requests.status_code/100 = %(status_code)s")
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = f"""WITH requests AS (SELECT host, timestamp
|
||||
|
|
@ -1810,7 +1810,7 @@ def __get_calls_errors_4xx_or_5xx(status, project_id, startTimestamp=TimeUTC.now
|
|||
pg_sub_query = __get_constraints(project_id=project_id, data=args)
|
||||
pg_sub_query.append("requests.type = 'fetch'")
|
||||
pg_sub_query.append("requests.method IS NOT NULL")
|
||||
pg_sub_query.append(f"requests.status/100 = {status}")
|
||||
pg_sub_query.append(f"requests.status_code/100 = {status}")
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = f"""SELECT requests.method,
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
|
|||
r.pop("first_recorded_session_at")
|
||||
r.pop("first_recorded")
|
||||
|
||||
if recording_state:
|
||||
if recording_state and len(rows) > 0:
|
||||
project_ids = [f'({r["project_id"]})' for r in rows]
|
||||
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
|
||||
FROM (VALUES {",".join(project_ids)}) AS projects(project_id)
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@ from chalicelib.utils import email_helper, captcha, helper
|
|||
|
||||
|
||||
def reset(data: schemas.ForgetPasswordPayloadSchema):
|
||||
print("====================== reset password ===============")
|
||||
print(data)
|
||||
print(f"====================== reset password {data.email}")
|
||||
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
|
||||
print("error: Invalid captcha.")
|
||||
return {"errors": ["Invalid captcha."]}
|
||||
|
|
|
|||
|
|
@ -495,7 +495,7 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
|
|||
all_issues_with_context,
|
||||
first_stage, last_stage)
|
||||
|
||||
print("len(transitions) =", len(transitions))
|
||||
# print("len(transitions) =", len(transitions))
|
||||
|
||||
if any(all_errors):
|
||||
total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors)
|
||||
|
|
|
|||
|
|
@ -80,12 +80,7 @@ def get_traces_group(project_id, payload):
|
|||
payloads = {}
|
||||
all_exists = True
|
||||
for i, u in enumerate(frames):
|
||||
print("===============================")
|
||||
print(u["absPath"])
|
||||
print("converted to:")
|
||||
key = __get_key(project_id, u["absPath"]) # use filename instead?
|
||||
print(key)
|
||||
print("===============================")
|
||||
if key not in payloads:
|
||||
file_exists = s3.exists(config('sourcemaps_bucket'), key)
|
||||
all_exists = all_exists and file_exists
|
||||
|
|
@ -104,6 +99,9 @@ def get_traces_group(project_id, payload):
|
|||
if payloads[key] is None:
|
||||
continue
|
||||
key_results = sourcemaps_parser.get_original_trace(key=key, positions=[o["position"] for o in payloads[key]])
|
||||
if key_results is None:
|
||||
all_exists = False
|
||||
continue
|
||||
for i, r in enumerate(key_results):
|
||||
res_index = payloads[key][i]["resultIndex"]
|
||||
# function name search by frontend lib is better than sourcemaps' one in most cases
|
||||
|
|
|
|||
|
|
@ -2,20 +2,33 @@ import requests
|
|||
|
||||
from decouple import config
|
||||
|
||||
SMR_URL = config("sourcemaps_reader")
|
||||
|
||||
if '%s' in SMR_URL:
|
||||
if config("SMR_KEY", default=None) is not None:
|
||||
SMR_URL = SMR_URL % config("SMR_KEY")
|
||||
else:
|
||||
SMR_URL = SMR_URL % "smr"
|
||||
|
||||
|
||||
def get_original_trace(key, positions):
|
||||
payload = {
|
||||
"key": key,
|
||||
"positions": positions,
|
||||
"padding": 5,
|
||||
"bucket": config('sourcemaps_bucket'),
|
||||
"S3_HOST": config('S3_HOST'),
|
||||
"S3_KEY": config('S3_KEY'),
|
||||
"S3_SECRET": config('S3_SECRET'),
|
||||
"region": config('sessions_region')
|
||||
"bucket": config('sourcemaps_bucket')
|
||||
}
|
||||
r = requests.post(config("sourcemaps_reader"), json=payload)
|
||||
if r.status_code != 200:
|
||||
return {}
|
||||
|
||||
return r.json()
|
||||
try:
|
||||
r = requests.post(SMR_URL, json=payload, timeout=config("sourcemapTimeout", cast=int, default=5))
|
||||
if r.status_code != 200:
|
||||
print(f"Issue getting sourcemap status_code:{r.status_code}")
|
||||
return None
|
||||
return r.json()
|
||||
except requests.exceptions.Timeout:
|
||||
print("Timeout getting sourcemap")
|
||||
return None
|
||||
except Exception as e:
|
||||
print("Issue getting sourcemap")
|
||||
print(e)
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ PG_CONFIG = dict(_PG_CONFIG)
|
|||
if config("pg_timeout", cast=int, default=0) > 0:
|
||||
PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}"
|
||||
|
||||
logging.info(f">PG_POOL:{config('PG_POOL', default=None)}")
|
||||
|
||||
|
||||
class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
|
||||
def __init__(self, minconn, maxconn, *args, **kwargs):
|
||||
|
|
@ -36,8 +38,15 @@ class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
|
|||
raise e
|
||||
|
||||
def putconn(self, *args, **kwargs):
|
||||
super().putconn(*args, **kwargs)
|
||||
self._semaphore.release()
|
||||
try:
|
||||
super().putconn(*args, **kwargs)
|
||||
self._semaphore.release()
|
||||
except psycopg2.pool.PoolError as e:
|
||||
if str(e) == "trying to put unkeyed connection":
|
||||
print("!!! trying to put unkeyed connection")
|
||||
print(f"env-PG_POOL:{config('PG_POOL', default=None)}")
|
||||
return
|
||||
raise e
|
||||
|
||||
|
||||
postgreSQL_pool: ORThreadedConnectionPool = None
|
||||
|
|
|
|||
|
|
@ -63,9 +63,6 @@ def get_presigned_url_for_upload(bucket, expires_in, key):
|
|||
|
||||
|
||||
def get_file(source_bucket, source_key):
|
||||
print("******************************")
|
||||
print(f"looking for: {source_key} in {source_bucket}")
|
||||
print("******************************")
|
||||
try:
|
||||
result = client.get_object(
|
||||
Bucket=source_bucket,
|
||||
|
|
@ -73,7 +70,7 @@ def get_file(source_bucket, source_key):
|
|||
)
|
||||
except ClientError as ex:
|
||||
if ex.response['Error']['Code'] == 'NoSuchKey':
|
||||
print(f'======> No object found - returning None for {source_bucket}/{source_key}')
|
||||
print(f'======> No object found - returning None for \nbucket:{source_bucket}\nkey:{source_key}')
|
||||
return None
|
||||
else:
|
||||
raise ex
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
#!/bin/sh
|
||||
cd sourcemap-reader
|
||||
nohup npm start &> /tmp/sourcemap-reader.log &
|
||||
nohup npm start &
|
||||
cd ..
|
||||
uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers
|
||||
|
|
|
|||
|
|
@ -26,9 +26,9 @@ jwt_algorithm=HS512
|
|||
jwt_exp_delta_seconds=2592000
|
||||
jwt_issuer=openreplay-default-foss
|
||||
jwt_secret="SET A RANDOM STRING HERE"
|
||||
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001
|
||||
assist=/assist/%s/sockets-live
|
||||
assistList=/assist/%s/sockets-list
|
||||
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
|
||||
assist=/sockets-live
|
||||
assistList=/sockets-list
|
||||
pg_dbname=postgres
|
||||
pg_host=postgresql.db.svc.cluster.local
|
||||
pg_password=asayerPostgres
|
||||
|
|
@ -45,7 +45,7 @@ sentryURL=
|
|||
sessions_bucket=mobs
|
||||
sessions_region=us-east-1
|
||||
sourcemaps_bucket=sourcemaps
|
||||
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
|
||||
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps
|
||||
stage=default-foss
|
||||
version_number=1.4.0
|
||||
FS_DIR=/mnt/efs
|
||||
|
|
@ -37,11 +37,19 @@ func main() {
|
|||
func(sessionID uint64, iter messages.Iterator, meta *types.Meta) {
|
||||
for iter.Next() {
|
||||
if iter.Type() == messages.MsgAssetCache {
|
||||
msg := iter.Message().Decode().(*messages.AssetCache)
|
||||
m := iter.Message().Decode()
|
||||
if m == nil {
|
||||
return
|
||||
}
|
||||
msg := m.(*messages.AssetCache)
|
||||
cacher.CacheURL(sessionID, msg.URL)
|
||||
totalAssets.Add(context.Background(), 1)
|
||||
} else if iter.Type() == messages.MsgErrorEvent {
|
||||
msg := iter.Message().Decode().(*messages.ErrorEvent)
|
||||
m := iter.Message().Decode()
|
||||
if m == nil {
|
||||
return
|
||||
}
|
||||
msg := m.(*messages.ErrorEvent)
|
||||
if msg.Source != "js_exception" {
|
||||
continue
|
||||
}
|
||||
|
|
@ -55,6 +63,7 @@ func main() {
|
|||
}
|
||||
}
|
||||
}
|
||||
iter.Close()
|
||||
},
|
||||
true,
|
||||
cfg.MessageSizeLimit,
|
||||
|
|
|
|||
|
|
@ -69,6 +69,9 @@ func main() {
|
|||
continue
|
||||
}
|
||||
msg := iter.Message().Decode()
|
||||
if msg == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Just save session data into db without additional checks
|
||||
if err := saver.InsertMessage(sessionID, msg); err != nil {
|
||||
|
|
@ -109,6 +112,7 @@ func main() {
|
|||
}
|
||||
})
|
||||
}
|
||||
iter.Close()
|
||||
}
|
||||
|
||||
// Init consumer
|
||||
|
|
@ -142,7 +146,7 @@ func main() {
|
|||
pgDur := time.Now().Sub(start).Milliseconds()
|
||||
|
||||
start = time.Now()
|
||||
if err := saver.CommitStats(); err != nil {
|
||||
if err := saver.CommitStats(consumer.HasFirstPartition()); err != nil {
|
||||
log.Printf("Error on stats commit: %v", err)
|
||||
}
|
||||
chDur := time.Now().Sub(start).Milliseconds()
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ func main() {
|
|||
statsLogger.Collect(sessionID, meta)
|
||||
sessions.UpdateSession(sessionID, meta.Timestamp, iter.Message().Meta().Timestamp)
|
||||
}
|
||||
iter.Close()
|
||||
},
|
||||
false,
|
||||
cfg.MessageSizeLimit,
|
||||
|
|
|
|||
|
|
@ -53,10 +53,18 @@ func main() {
|
|||
cfg.TopicRawWeb,
|
||||
},
|
||||
func(sessionID uint64, iter messages.Iterator, meta *types.Meta) {
|
||||
var lastMessageID uint64
|
||||
for iter.Next() {
|
||||
statsLogger.Collect(sessionID, meta)
|
||||
builderMap.HandleMessage(sessionID, iter.Message().Decode(), iter.Message().Meta().Index)
|
||||
msg := iter.Message().Decode()
|
||||
if msg == nil {
|
||||
log.Printf("failed batch, sess: %d, lastIndex: %d", sessionID, lastMessageID)
|
||||
continue
|
||||
}
|
||||
lastMessageID = msg.Meta().Index
|
||||
builderMap.HandleMessage(sessionID, msg, iter.Message().Meta().Index)
|
||||
}
|
||||
iter.Close()
|
||||
},
|
||||
false,
|
||||
cfg.MessageSizeLimit,
|
||||
|
|
|
|||
|
|
@ -76,7 +76,11 @@ func main() {
|
|||
iter.Type() == MsgCSSInsertRuleURLBased ||
|
||||
iter.Type() == MsgAdoptedSSReplaceURLBased ||
|
||||
iter.Type() == MsgAdoptedSSInsertRuleURLBased {
|
||||
msg = assetMessageHandler.ParseAssets(sessionID, msg.Decode()) // TODO: filter type only once (use iterator inide or bring ParseAssets out here).
|
||||
m := msg.Decode()
|
||||
if m == nil {
|
||||
return
|
||||
}
|
||||
msg = assetMessageHandler.ParseAssets(sessionID, m) // TODO: filter type only once (use iterator inide or bring ParseAssets out here).
|
||||
}
|
||||
|
||||
// Filter message
|
||||
|
|
@ -103,6 +107,7 @@ func main() {
|
|||
messageSize.Record(context.Background(), float64(len(data)))
|
||||
savedMessages.Add(context.Background(), 1)
|
||||
}
|
||||
iter.Close()
|
||||
},
|
||||
false,
|
||||
cfg.MessageSizeLimit,
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ go 1.18
|
|||
require (
|
||||
cloud.google.com/go/logging v1.4.2
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.2.0
|
||||
github.com/aws/aws-sdk-go v1.35.23
|
||||
github.com/aws/aws-sdk-go v1.44.98
|
||||
github.com/btcsuite/btcutil v1.0.2
|
||||
github.com/elastic/go-elasticsearch/v7 v7.13.1
|
||||
github.com/go-redis/redis v6.15.9+incompatible
|
||||
|
|
@ -13,9 +13,9 @@ require (
|
|||
github.com/gorilla/mux v1.8.0
|
||||
github.com/jackc/pgconn v1.6.0
|
||||
github.com/jackc/pgerrcode v0.0.0-20201024163028-a0d42d470451
|
||||
github.com/jackc/pgtype v1.3.0
|
||||
github.com/jackc/pgx/v4 v4.6.0
|
||||
github.com/klauspost/pgzip v1.2.5
|
||||
github.com/lib/pq v1.2.0
|
||||
github.com/oschwald/maxminddb-golang v1.7.0
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/sethvargo/go-envconfig v0.7.0
|
||||
|
|
@ -49,7 +49,6 @@ require (
|
|||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgproto3/v2 v2.0.2 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8 // indirect
|
||||
github.com/jackc/pgtype v1.3.0 // indirect
|
||||
github.com/jackc/puddle v1.2.2-0.20220404125616-4e959849469a // indirect
|
||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||
github.com/klauspost/compress v1.15.7 // indirect
|
||||
|
|
|
|||
|
|
@ -73,8 +73,8 @@ github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRF
|
|||
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/aws/aws-sdk-go v1.35.23 h1:SCP0d0XvyJTDmfnHEQPvBaYi3kea1VNUo7uQmkVgFts=
|
||||
github.com/aws/aws-sdk-go v1.35.23/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k=
|
||||
github.com/aws/aws-sdk-go v1.44.98 h1:fX+NxebSdO/9T6DTNOLhpC+Vv6RNkKRfsMg0a7o/yBo=
|
||||
github.com/aws/aws-sdk-go v1.44.98/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
|
||||
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
|
||||
github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
|
||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||
|
|
|
|||
|
|
@ -22,6 +22,6 @@ func (si *Saver) InsertStats(session *Session, msg Message) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (si *Saver) CommitStats() error {
|
||||
func (si *Saver) CommitStats(optimize bool) error {
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ type Iterator interface {
|
|||
Next() bool // Return true if we have next message
|
||||
Type() int // Return type of the next message
|
||||
Message() Message // Return raw or decoded message
|
||||
Close()
|
||||
}
|
||||
|
||||
type iteratorImpl struct {
|
||||
|
|
@ -90,10 +91,14 @@ func (i *iteratorImpl) Next() bool {
|
|||
switch i.msgType {
|
||||
case MsgBatchMetadata:
|
||||
if i.index != 0 { // Might be several 0-0 BatchMeta in a row without an error though
|
||||
log.Printf("Batch Meta found at the end of the batch")
|
||||
log.Printf("Batch Metadata found at the end of the batch")
|
||||
return false
|
||||
}
|
||||
m := i.msg.Decode().(*BatchMetadata)
|
||||
msg := i.msg.Decode()
|
||||
if msg == nil {
|
||||
return false
|
||||
}
|
||||
m := msg.(*BatchMetadata)
|
||||
i.index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
|
||||
i.timestamp = m.Timestamp
|
||||
i.version = m.Version
|
||||
|
|
@ -108,7 +113,11 @@ func (i *iteratorImpl) Next() bool {
|
|||
log.Printf("Batch Meta found at the end of the batch")
|
||||
return false
|
||||
}
|
||||
m := i.msg.Decode().(*BatchMeta)
|
||||
msg := i.msg.Decode()
|
||||
if msg == nil {
|
||||
return false
|
||||
}
|
||||
m := msg.(*BatchMeta)
|
||||
i.index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
|
||||
i.timestamp = m.Timestamp
|
||||
isBatchMeta = true
|
||||
|
|
@ -118,24 +127,44 @@ func (i *iteratorImpl) Next() bool {
|
|||
log.Printf("Batch Meta found at the end of the batch")
|
||||
return false
|
||||
}
|
||||
m := i.msg.Decode().(*IOSBatchMeta)
|
||||
msg := i.msg.Decode()
|
||||
if msg == nil {
|
||||
return false
|
||||
}
|
||||
m := msg.(*IOSBatchMeta)
|
||||
i.index = m.FirstIndex
|
||||
i.timestamp = int64(m.Timestamp)
|
||||
isBatchMeta = true
|
||||
// continue readLoop
|
||||
case MsgTimestamp:
|
||||
m := i.msg.Decode().(*Timestamp)
|
||||
msg := i.msg.Decode()
|
||||
if msg == nil {
|
||||
return false
|
||||
}
|
||||
m := msg.(*Timestamp)
|
||||
i.timestamp = int64(m.Timestamp)
|
||||
// No skipping here for making it easy to encode back the same sequence of message
|
||||
// continue readLoop
|
||||
case MsgSessionStart:
|
||||
m := i.msg.Decode().(*SessionStart)
|
||||
msg := i.msg.Decode()
|
||||
if msg == nil {
|
||||
return false
|
||||
}
|
||||
m := msg.(*SessionStart)
|
||||
i.timestamp = int64(m.Timestamp)
|
||||
case MsgSessionEnd:
|
||||
m := i.msg.Decode().(*SessionEnd)
|
||||
msg := i.msg.Decode()
|
||||
if msg == nil {
|
||||
return false
|
||||
}
|
||||
m := msg.(*SessionEnd)
|
||||
i.timestamp = int64(m.Timestamp)
|
||||
case MsgSetPageLocation:
|
||||
m := i.msg.Decode().(*SetPageLocation)
|
||||
msg := i.msg.Decode()
|
||||
if msg == nil {
|
||||
return false
|
||||
}
|
||||
m := msg.(*SetPageLocation)
|
||||
i.url = m.URL
|
||||
}
|
||||
i.msg.Meta().Index = i.index
|
||||
|
|
@ -156,6 +185,13 @@ func (i *iteratorImpl) Message() Message {
|
|||
return i.msg
|
||||
}
|
||||
|
||||
func (i *iteratorImpl) Close() {
|
||||
_, err := i.data.Seek(0, io.SeekEnd)
|
||||
if err != nil {
|
||||
log.Printf("can't set seek pointer at the end: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func messageHasSize(msgType uint64) bool {
|
||||
return !(msgType == 80 || msgType == 81 || msgType == 82)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
package messages
|
||||
|
||||
func IsReplayerType(id int) bool {
|
||||
return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id
|
||||
return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 79 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id
|
||||
}
|
||||
|
||||
func IsIOSType(id int) bool {
|
||||
|
|
|
|||
|
|
@ -156,6 +156,8 @@ const (
|
|||
|
||||
MsgAdoptedSSRemoveOwner = 77
|
||||
|
||||
MsgZustand = 79
|
||||
|
||||
MsgIOSBatchMeta = 107
|
||||
|
||||
MsgIOSSessionStart = 90
|
||||
|
|
@ -3038,6 +3040,40 @@ func (msg *AdoptedSSRemoveOwner) TypeID() int {
|
|||
return 77
|
||||
}
|
||||
|
||||
type Zustand struct {
|
||||
message
|
||||
Mutation string
|
||||
State string
|
||||
}
|
||||
|
||||
func (msg *Zustand) Encode() []byte {
|
||||
buf := make([]byte, 21+len(msg.Mutation)+len(msg.State))
|
||||
buf[0] = 79
|
||||
p := 1
|
||||
p = WriteString(msg.Mutation, buf, p)
|
||||
p = WriteString(msg.State, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *Zustand) EncodeWithIndex() []byte {
|
||||
encoded := msg.Encode()
|
||||
if IsIOSType(msg.TypeID()) {
|
||||
return encoded
|
||||
}
|
||||
data := make([]byte, len(encoded)+8)
|
||||
copy(data[8:], encoded[:])
|
||||
binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index)
|
||||
return data
|
||||
}
|
||||
|
||||
func (msg *Zustand) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *Zustand) TypeID() int {
|
||||
return 79
|
||||
}
|
||||
|
||||
type IOSBatchMeta struct {
|
||||
message
|
||||
Timestamp uint64
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ func (m *RawMessage) Decode() Message {
|
|||
msg, err := ReadMessage(m.tp, bytes.NewReader(m.data[1:]))
|
||||
if err != nil {
|
||||
log.Printf("decode err: %s", err)
|
||||
return nil
|
||||
}
|
||||
msg.Meta().SetMeta(m.meta)
|
||||
return msg
|
||||
|
|
|
|||
|
|
@ -1306,6 +1306,18 @@ func DecodeAdoptedSSRemoveOwner(reader io.Reader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeZustand(reader io.Reader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &Zustand{}
|
||||
if msg.Mutation, err = ReadString(reader); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.State, err = ReadString(reader); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeIOSBatchMeta(reader io.Reader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &IOSBatchMeta{}
|
||||
|
|
@ -1939,6 +1951,9 @@ func ReadMessage(t uint64, reader io.Reader) (Message, error) {
|
|||
case 77:
|
||||
return DecodeAdoptedSSRemoveOwner(reader)
|
||||
|
||||
case 79:
|
||||
return DecodeZustand(reader)
|
||||
|
||||
case 107:
|
||||
return DecodeIOSBatchMeta(reader)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ type Consumer interface {
|
|||
Commit() error
|
||||
CommitBack(gap int64) error
|
||||
Close()
|
||||
HasFirstPartition() bool
|
||||
}
|
||||
|
||||
type Producer interface {
|
||||
|
|
|
|||
|
|
@ -161,3 +161,7 @@ func (c *Consumer) CommitBack(gap int64) error {
|
|||
func (c *Consumer) Close() {
|
||||
// noop
|
||||
}
|
||||
|
||||
func (c *Consumer) HasFirstPartition() bool {
|
||||
return false
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
**/build.sh
|
||||
**/build_*.sh
|
||||
**/*deploy.sh
|
||||
Dockerfile*
|
||||
|
||||
app_crons.py
|
||||
app_alerts.py
|
||||
|
|
|
|||
14
ee/api/.gitignore
vendored
14
ee/api/.gitignore
vendored
|
|
@ -180,16 +180,16 @@ Pipfile
|
|||
.local/*
|
||||
|
||||
/chalicelib/core/alerts.py
|
||||
/chalicelib/core/alerts_processor.py
|
||||
#exp /chalicelib/core/alerts_processor.py
|
||||
/chalicelib/core/announcements.py
|
||||
/chalicelib/core/autocomplete.py
|
||||
/chalicelib/core/collaboration_slack.py
|
||||
/chalicelib/core/countries.py
|
||||
/chalicelib/core/errors.py
|
||||
#exp /chalicelib/core/errors.py
|
||||
/chalicelib/core/errors_favorite.py
|
||||
/chalicelib/core/events.py
|
||||
#exp /chalicelib/core/events.py
|
||||
/chalicelib/core/events_ios.py
|
||||
/chalicelib/core/funnels.py
|
||||
#exp /chalicelib/core/funnels.py
|
||||
/chalicelib/core/integration_base.py
|
||||
/chalicelib/core/integration_base_issue.py
|
||||
/chalicelib/core/integration_github.py
|
||||
|
|
@ -214,7 +214,7 @@ Pipfile
|
|||
/chalicelib/core/sessions_assignments.py
|
||||
/chalicelib/core/sessions_metas.py
|
||||
/chalicelib/core/sessions_mobs.py
|
||||
/chalicelib/core/significance.py
|
||||
#exp /chalicelib/core/significance.py
|
||||
/chalicelib/core/slack.py
|
||||
/chalicelib/core/socket_ios.py
|
||||
/chalicelib/core/sourcemaps.py
|
||||
|
|
@ -255,11 +255,11 @@ Pipfile
|
|||
/chalicelib/core/heatmaps.py
|
||||
/routers/subs/insights.py
|
||||
/schemas.py
|
||||
/chalicelib/core/custom_metrics.py
|
||||
#exp /chalicelib/core/custom_metrics.py
|
||||
/chalicelib/core/performance_event.py
|
||||
/chalicelib/core/saved_search.py
|
||||
/app_alerts.py
|
||||
/build_alerts.sh
|
||||
/routers/subs/metrics.py
|
||||
/routers/subs/v1_api.py
|
||||
/chalicelib/core/dashboards.py
|
||||
#exp /chalicelib/core/dashboards.py
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ ARG envarg
|
|||
ENV SOURCE_MAP_VERSION=0.7.4 \
|
||||
APP_NAME=chalice \
|
||||
LISTEN_PORT=8000 \
|
||||
MAPPING_WASM=/work/sourcemap-reader/mappings.wasm \
|
||||
ENTERPRISE_BUILD=${envarg}
|
||||
|
||||
ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm /mappings.wasm
|
||||
|
|
@ -18,7 +19,8 @@ RUN cd /work_tmp && npm install
|
|||
|
||||
WORKDIR /work
|
||||
COPY . .
|
||||
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. && chmod 644 /mappings.wasm
|
||||
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. \
|
||||
&& mv /mappings.wasm ${MAPPING_WASM} && chmod 644 ${MAPPING_WASM}
|
||||
|
||||
RUN adduser -u 1001 openreplay -D
|
||||
USER 1001
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
**/build.sh
|
||||
**/build_*.sh
|
||||
**/*deploy.sh
|
||||
Dockerfile*
|
||||
|
||||
app.py
|
||||
app_crons.py
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
**/build.sh
|
||||
**/build_*.sh
|
||||
**/*deploy.sh
|
||||
Dockerfile*
|
||||
|
||||
app.py
|
||||
app_alerts.py
|
||||
|
|
|
|||
|
|
@ -35,7 +35,9 @@ async def or_middleware(request: Request, call_next):
|
|||
now = int(time.time() * 1000)
|
||||
response: StreamingResponse = await call_next(request)
|
||||
if helper.TRACK_TIME:
|
||||
print(f"Execution time: {int(time.time() * 1000) - now} ms")
|
||||
now = int(time.time() * 1000) - now
|
||||
if now > 500:
|
||||
print(f"Execution time: {now} ms")
|
||||
except Exception as e:
|
||||
pg_client.close()
|
||||
raise e
|
||||
|
|
|
|||
|
|
@ -1,28 +1,33 @@
|
|||
print("============= CRONS =============")
|
||||
import sys
|
||||
import asyncio
|
||||
import sys
|
||||
|
||||
from routers.crons import core_dynamic_crons
|
||||
|
||||
ACTIONS = {
|
||||
"TELEMETRY": core_dynamic_crons.telemetry_cron,
|
||||
"JOB": core_dynamic_crons.run_scheduled_jobs,
|
||||
"REPORT": core_dynamic_crons.weekly_report
|
||||
}
|
||||
|
||||
|
||||
def default_action(action):
|
||||
async def _func():
|
||||
print(f"{action} not found in crons-definitions")
|
||||
print("possible actions:")
|
||||
print(ACTIONS.keys())
|
||||
|
||||
return _func
|
||||
|
||||
|
||||
async def process(action):
|
||||
await {
|
||||
"TELEMETRY": core_dynamic_crons.telemetry_cron,
|
||||
"JOB": core_dynamic_crons.run_scheduled_jobs,
|
||||
"REPORT": core_dynamic_crons.weekly_report2
|
||||
}.get(action.upper(), default_action(action))()
|
||||
await ACTIONS.get(action.upper(), default_action(action))()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) < 2 or len(sys.argv[1]) < 1:
|
||||
print("please provide actions as argument")
|
||||
print("please provide actions as argument\npossible actions:")
|
||||
print(ACTIONS.keys())
|
||||
else:
|
||||
print(f"action: {sys.argv[1]}")
|
||||
asyncio.run(process(sys.argv[1]))
|
||||
|
|
|
|||
|
|
@ -42,3 +42,6 @@ if config("EXP_FUNNELS", cast=bool, default=False):
|
|||
from . import significance_exp as significance
|
||||
else:
|
||||
from . import significance as significance
|
||||
|
||||
if config("EXP_RESOURCES", cast=bool, default=False):
|
||||
print(">>> Using experimental resources for session-replay")
|
||||
|
|
|
|||
241
ee/api/chalicelib/core/alerts_processor.py
Normal file
241
ee/api/chalicelib/core/alerts_processor.py
Normal file
|
|
@ -0,0 +1,241 @@
|
|||
import decimal
|
||||
import logging
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import alerts_listener
|
||||
from chalicelib.core import alerts
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
LeftToDb = {
|
||||
schemas.AlertColumn.performance__dom_content_loaded__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"},
|
||||
schemas.AlertColumn.performance__first_meaningful_paint__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"},
|
||||
schemas.AlertColumn.performance__page_load_time__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"},
|
||||
schemas.AlertColumn.performance__dom_build_time__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(dom_building_time,0))"},
|
||||
schemas.AlertColumn.performance__speed_index__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"},
|
||||
schemas.AlertColumn.performance__page_response_time__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(response_time,0))"},
|
||||
schemas.AlertColumn.performance__ttfb__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(first_paint_time,0))"},
|
||||
schemas.AlertColumn.performance__time_to_render__average: {
|
||||
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(visually_complete,0))"},
|
||||
schemas.AlertColumn.performance__image_load_time__average: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"},
|
||||
schemas.AlertColumn.performance__request_load_time__average: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"},
|
||||
schemas.AlertColumn.resources__load_time__average: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "AVG(NULLIF(resources.duration,0))"},
|
||||
schemas.AlertColumn.resources__missing__count: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'"},
|
||||
schemas.AlertColumn.errors__4xx_5xx__count: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
|
||||
"condition": "status/100!=2"},
|
||||
schemas.AlertColumn.errors__4xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COUNT(session_id)", "condition": "status/100=4"},
|
||||
schemas.AlertColumn.errors__5xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COUNT(session_id)", "condition": "status/100=5"},
|
||||
schemas.AlertColumn.errors__javascript__impacted_sessions__count: {
|
||||
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
|
||||
"formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
|
||||
schemas.AlertColumn.performance__crashes__count: {
|
||||
"table": "public.sessions",
|
||||
"formula": "COUNT(DISTINCT session_id)",
|
||||
"condition": "errors_count > 0 AND duration>0"},
|
||||
schemas.AlertColumn.errors__javascript__count: {
|
||||
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
|
||||
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False},
|
||||
schemas.AlertColumn.errors__backend__count: {
|
||||
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
|
||||
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False},
|
||||
}
|
||||
|
||||
# This is the frequency of execution for each threshold
|
||||
TimeInterval = {
|
||||
15: 3,
|
||||
30: 5,
|
||||
60: 10,
|
||||
120: 20,
|
||||
240: 30,
|
||||
1440: 60,
|
||||
}
|
||||
|
||||
|
||||
def can_check(a) -> bool:
|
||||
now = TimeUTC.now()
|
||||
|
||||
repetitionBase = a["options"]["currentPeriod"] \
|
||||
if a["detectionMethod"] == schemas.AlertDetectionMethod.change \
|
||||
and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
|
||||
else a["options"]["previousPeriod"]
|
||||
|
||||
if TimeInterval.get(repetitionBase) is None:
|
||||
logging.error(f"repetitionBase: {repetitionBase} NOT FOUND")
|
||||
return False
|
||||
|
||||
return (a["options"]["renotifyInterval"] <= 0 or
|
||||
a["options"].get("lastNotification") is None or
|
||||
a["options"]["lastNotification"] <= 0 or
|
||||
((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \
|
||||
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
|
||||
|
||||
|
||||
def Build(a):
|
||||
now = TimeUTC.now()
|
||||
params = {"project_id": a["projectId"], "now": now}
|
||||
full_args = {}
|
||||
j_s = True
|
||||
if a["seriesId"] is not None:
|
||||
a["filter"]["sort"] = "session_id"
|
||||
a["filter"]["order"] = schemas.SortOrderType.desc
|
||||
a["filter"]["startDate"] = -1
|
||||
a["filter"]["endDate"] = TimeUTC.now()
|
||||
full_args, query_part = sessions.search_query_parts(
|
||||
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]), error_status=None, errors_only=False,
|
||||
issue=None, project_id=a["projectId"], user_id=None, favorite_only=False)
|
||||
subQ = f"""SELECT COUNT(session_id) AS value
|
||||
{query_part}"""
|
||||
else:
|
||||
colDef = LeftToDb[a["query"]["left"]]
|
||||
subQ = f"""SELECT {colDef["formula"]} AS value
|
||||
FROM {colDef["table"]}
|
||||
WHERE project_id = %(project_id)s
|
||||
{"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}"""
|
||||
j_s = colDef.get("joinSessions", True)
|
||||
|
||||
q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
|
||||
|
||||
if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold:
|
||||
if a["seriesId"] is not None:
|
||||
q += f""" FROM ({subQ}) AS stat"""
|
||||
else:
|
||||
q += f""" FROM ({subQ} AND timestamp>=%(startDate)s AND timestamp<=%(now)s
|
||||
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
|
||||
{"AND sessions.start_ts <= %(now)s" if j_s else ""}) AS stat"""
|
||||
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
|
||||
else:
|
||||
if a["change"] == schemas.AlertDetectionType.change:
|
||||
if a["seriesId"] is not None:
|
||||
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
|
||||
sub1 = f"SELECT (({subQ})-({sub2})) AS value"
|
||||
q += f" FROM ( {sub1} ) AS stat"
|
||||
params = {**params, **full_args,
|
||||
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
|
||||
"timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000}
|
||||
else:
|
||||
sub1 = f"""{subQ} AND timestamp>=%(startDate)s
|
||||
AND datetime<=toDateTime(%(now)s/1000)
|
||||
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
|
||||
{"AND sessions.start_ts <= %(now)s" if j_s else ""}"""
|
||||
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
|
||||
sub2 = f"""{subQ} AND timestamp<%(startDate)s
|
||||
AND timestamp>=%(timestamp_sub2)s
|
||||
{"AND sessions.start_ts < %(startDate)s AND sessions.start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
|
||||
params["timestamp_sub2"] = TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000
|
||||
sub1 = f"SELECT (( {sub1} )-( {sub2} )) AS value"
|
||||
q += f" FROM ( {sub1} ) AS stat"
|
||||
|
||||
else:
|
||||
if a["seriesId"] is not None:
|
||||
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
|
||||
sub1 = f"SELECT (({subQ})/NULLIF(({sub2}),0)-1)*100 AS value"
|
||||
q += f" FROM ({sub1}) AS stat"
|
||||
params = {**params, **full_args,
|
||||
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
|
||||
"timestamp_sub2": TimeUTC.now() \
|
||||
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) \
|
||||
* 60 * 1000}
|
||||
else:
|
||||
sub1 = f"""{subQ} AND timestamp>=%(startDate)s AND timestamp<=%(now)s
|
||||
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}
|
||||
{"AND sessions.start_ts <= %(now)s" if j_s else ""}"""
|
||||
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
|
||||
sub2 = f"""{subQ} AND timestamp<%(startDate)s
|
||||
AND timestamp>=%(timestamp_sub2)s
|
||||
{"AND sessions.start_ts < %(startDate)s AND sessions.start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
|
||||
params["timestamp_sub2"] = TimeUTC.now() \
|
||||
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) * 60 * 1000
|
||||
sub1 = f"SELECT (({sub1})/NULLIF(({sub2}),0)-1)*100 AS value"
|
||||
q += f" FROM ({sub1}) AS stat"
|
||||
|
||||
return q, params
|
||||
|
||||
|
||||
def process():
|
||||
notifications = []
|
||||
all_alerts = alerts_listener.get_all_alerts()
|
||||
with pg_client.PostgresClient() as cur:
|
||||
for alert in all_alerts:
|
||||
if can_check(alert):
|
||||
logging.info(f"Querying alertId:{alert['alertId']} name: {alert['name']}")
|
||||
query, params = Build(alert)
|
||||
query = cur.mogrify(query, params)
|
||||
logging.debug(alert)
|
||||
logging.debug(query)
|
||||
try:
|
||||
cur.execute(query)
|
||||
result = cur.fetchone()
|
||||
if result["valid"]:
|
||||
logging.info("Valid alert, notifying users")
|
||||
notifications.append(generate_notification(alert, result))
|
||||
except Exception as e:
|
||||
logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']}")
|
||||
logging.error(str(e))
|
||||
logging.error(query)
|
||||
if len(notifications) > 0:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""UPDATE public.Alerts
|
||||
SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb
|
||||
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
|
||||
if len(notifications) > 0:
|
||||
alerts.process_notifications(notifications)
|
||||
|
||||
|
||||
def generate_notification(alert, result):
|
||||
return {
|
||||
"alertId": alert["alertId"],
|
||||
"tenantId": alert["tenantId"],
|
||||
"title": alert["name"],
|
||||
"description": f"has been triggered, {alert['query']['left']} = {round(result['value'], 2)} ({alert['query']['operator']} {alert['query']['right']}).",
|
||||
"buttonText": "Check metrics for more details",
|
||||
"buttonUrl": f"/{alert['projectId']}/metrics",
|
||||
"imageUrl": None,
|
||||
"options": {"source": "ALERT", "sourceId": alert["alertId"],
|
||||
"sourceMeta": alert["detectionMethod"],
|
||||
"message": alert["options"]["message"], "projectId": alert["projectId"],
|
||||
"data": {"title": alert["name"],
|
||||
"limitValue": alert["query"]["right"],
|
||||
"actualValue": float(result["value"]) \
|
||||
if isinstance(result["value"], decimal.Decimal) \
|
||||
else result["value"],
|
||||
"operator": alert["query"]["operator"],
|
||||
"trigger": alert["query"]["left"],
|
||||
"alertId": alert["alertId"],
|
||||
"detectionMethod": alert["detectionMethod"],
|
||||
"currentPeriod": alert["options"]["currentPeriod"],
|
||||
"previousPeriod": alert["options"]["previousPeriod"],
|
||||
"createdAt": TimeUTC.now()}},
|
||||
}
|
||||
551
ee/api/chalicelib/core/custom_metrics.py
Normal file
551
ee/api/chalicelib/core/custom_metrics.py
Normal file
|
|
@ -0,0 +1,551 @@
|
|||
import json
|
||||
from typing import Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import funnels, issues
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
from decouple import config
|
||||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
print(">>> Using experimental error search")
|
||||
from . import errors_exp as errors
|
||||
else:
|
||||
from . import errors as errors
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
PIE_CHART_GROUP = 5
|
||||
|
||||
|
||||
def __try_live(project_id, data: schemas.TryCustomMetricsPayloadSchema):
|
||||
results = []
|
||||
for i, s in enumerate(data.series):
|
||||
s.filter.startDate = data.startTimestamp
|
||||
s.filter.endDate = data.endTimestamp
|
||||
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
|
||||
view_type=data.view_type, metric_type=data.metric_type,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value))
|
||||
if data.view_type == schemas.MetricTimeseriesViewType.progress:
|
||||
r = {"count": results[-1]}
|
||||
diff = s.filter.endDate - s.filter.startDate
|
||||
s.filter.endDate = s.filter.startDate
|
||||
s.filter.startDate = s.filter.endDate - diff
|
||||
r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
|
||||
view_type=data.view_type, metric_type=data.metric_type,
|
||||
metric_of=data.metric_of, metric_value=data.metric_value)
|
||||
r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"])
|
||||
# r["countProgress"] = ((r["count"] - r["previousCount"]) / r["previousCount"]) * 100 \
|
||||
# if r["previousCount"] > 0 else 0
|
||||
r["seriesName"] = s.name if s.name else i + 1
|
||||
r["seriesId"] = s.series_id if s.series_id else None
|
||||
results[-1] = r
|
||||
elif data.view_type == schemas.MetricTableViewType.pie_chart:
|
||||
if len(results[i].get("values", [])) > PIE_CHART_GROUP:
|
||||
results[i]["values"] = results[i]["values"][:PIE_CHART_GROUP] \
|
||||
+ [{
|
||||
"name": "Others", "group": True,
|
||||
"sessionCount": sum(r["sessionCount"] for r in results[i]["values"][PIE_CHART_GROUP:])
|
||||
}]
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def __is_funnel_chart(data: schemas.TryCustomMetricsPayloadSchema):
|
||||
return data.metric_type == schemas.MetricType.funnel
|
||||
|
||||
|
||||
def __get_funnel_chart(project_id, data: schemas.TryCustomMetricsPayloadSchema):
|
||||
if len(data.series) == 0:
|
||||
return {
|
||||
"stages": [],
|
||||
"totalDropDueToIssues": 0
|
||||
}
|
||||
data.series[0].filter.startDate = data.startTimestamp
|
||||
data.series[0].filter.endDate = data.endTimestamp
|
||||
return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter)
|
||||
|
||||
|
||||
def __is_errors_list(data):
|
||||
return data.metric_type == schemas.MetricType.table \
|
||||
and data.metric_of == schemas.TableMetricOfType.errors
|
||||
|
||||
|
||||
def __get_errors_list(project_id, user_id, data):
|
||||
if len(data.series) == 0:
|
||||
return {
|
||||
"total": 0,
|
||||
"errors": []
|
||||
}
|
||||
data.series[0].filter.startDate = data.startTimestamp
|
||||
data.series[0].filter.endDate = data.endTimestamp
|
||||
data.series[0].filter.page = data.page
|
||||
data.series[0].filter.limit = data.limit
|
||||
return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def __is_sessions_list(data):
|
||||
return data.metric_type == schemas.MetricType.table \
|
||||
and data.metric_of == schemas.TableMetricOfType.sessions
|
||||
|
||||
|
||||
def __get_sessions_list(project_id, user_id, data):
|
||||
if len(data.series) == 0:
|
||||
print("empty series")
|
||||
return {
|
||||
"total": 0,
|
||||
"sessions": []
|
||||
}
|
||||
data.series[0].filter.startDate = data.startTimestamp
|
||||
data.series[0].filter.endDate = data.endTimestamp
|
||||
data.series[0].filter.page = data.page
|
||||
data.series[0].filter.limit = data.limit
|
||||
return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def merged_live(project_id, data: schemas.TryCustomMetricsPayloadSchema, user_id=None):
|
||||
if __is_funnel_chart(data):
|
||||
return __get_funnel_chart(project_id=project_id, data=data)
|
||||
elif __is_errors_list(data):
|
||||
return __get_errors_list(project_id=project_id, user_id=user_id, data=data)
|
||||
elif __is_sessions_list(data):
|
||||
return __get_sessions_list(project_id=project_id, user_id=user_id, data=data)
|
||||
|
||||
series_charts = __try_live(project_id=project_id, data=data)
|
||||
if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table:
|
||||
return series_charts
|
||||
results = [{}] * len(series_charts[0])
|
||||
for i in range(len(results)):
|
||||
for j, series_chart in enumerate(series_charts):
|
||||
results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
|
||||
data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]}
|
||||
return results
|
||||
|
||||
|
||||
def __merge_metric_with_data(metric, data: Union[schemas.CustomMetricChartPayloadSchema,
|
||||
schemas.CustomMetricSessionsPayloadSchema]) \
|
||||
-> Union[schemas.CreateCustomMetricsSchema, None]:
|
||||
if data.series is not None and len(data.series) > 0:
|
||||
metric["series"] = data.series
|
||||
metric: schemas.CreateCustomMetricsSchema = schemas.CreateCustomMetricsSchema.parse_obj({**data.dict(), **metric})
|
||||
if len(data.filters) > 0 or len(data.events) > 0:
|
||||
for s in metric.series:
|
||||
if len(data.filters) > 0:
|
||||
s.filter.filters += data.filters
|
||||
if len(data.events) > 0:
|
||||
s.filter.events += data.events
|
||||
return metric
|
||||
|
||||
|
||||
def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema, metric=None):
|
||||
if metric is None:
|
||||
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
return None
|
||||
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
|
||||
return merged_live(project_id=project_id, data=metric, user_id=user_id)
|
||||
# if __is_funnel_chart(metric):
|
||||
# return __get_funnel_chart(project_id=project_id, data=metric)
|
||||
# elif __is_errors_list(metric):
|
||||
# return __get_errors_list(project_id=project_id, user_id=user_id, data=metric)
|
||||
#
|
||||
# series_charts = __try_live(project_id=project_id, data=metric)
|
||||
# if metric.view_type == schemas.MetricTimeseriesViewType.progress or metric.metric_type == schemas.MetricType.table:
|
||||
# return series_charts
|
||||
# results = [{}] * len(series_charts[0])
|
||||
# for i in range(len(results)):
|
||||
# for j, series_chart in enumerate(series_charts):
|
||||
# results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
|
||||
# metric.series[j].name: series_chart[i]["count"]}
|
||||
# return results
|
||||
|
||||
|
||||
def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
|
||||
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
return None
|
||||
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if metric is None:
|
||||
return None
|
||||
results = []
|
||||
for s in metric.series:
|
||||
s.filter.startDate = data.startTimestamp
|
||||
s.filter.endDate = data.endTimestamp
|
||||
s.filter.limit = data.limit
|
||||
s.filter.page = data.page
|
||||
results.append({"seriesId": s.series_id, "seriesName": s.name,
|
||||
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
|
||||
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
return None
|
||||
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if metric is None:
|
||||
return None
|
||||
for s in metric.series:
|
||||
s.filter.startDate = data.startTimestamp
|
||||
s.filter.endDate = data.endTimestamp
|
||||
s.filter.limit = data.limit
|
||||
s.filter.page = data.page
|
||||
return {"seriesId": s.series_id, "seriesName": s.name,
|
||||
**funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)}
|
||||
|
||||
|
||||
def get_errors_list(project_id, user_id, metric_id, data: schemas.CustomMetricSessionsPayloadSchema):
|
||||
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
return None
|
||||
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if metric is None:
|
||||
return None
|
||||
for s in metric.series:
|
||||
s.filter.startDate = data.startTimestamp
|
||||
s.filter.endDate = data.endTimestamp
|
||||
s.filter.limit = data.limit
|
||||
s.filter.page = data.page
|
||||
return {"seriesId": s.series_id, "seriesName": s.name,
|
||||
**errors.search(data=s.filter, project_id=project_id, user_id=user_id)}
|
||||
|
||||
|
||||
def try_sessions(project_id, user_id, data: schemas.CustomMetricSessionsPayloadSchema):
|
||||
results = []
|
||||
if data.series is None:
|
||||
return results
|
||||
for s in data.series:
|
||||
s.filter.startDate = data.startTimestamp
|
||||
s.filter.endDate = data.endTimestamp
|
||||
s.filter.limit = data.limit
|
||||
s.filter.page = data.page
|
||||
results.append({"seriesId": None, "seriesName": s.name,
|
||||
**sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema, dashboard=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
_data = {}
|
||||
for i, s in enumerate(data.series):
|
||||
for k in s.dict().keys():
|
||||
_data[f"{k}_{i}"] = s.__getattribute__(k)
|
||||
_data[f"index_{i}"] = i
|
||||
_data[f"filter_{i}"] = s.filter.json()
|
||||
series_len = len(data.series)
|
||||
data.series = None
|
||||
params = {"user_id": user_id, "project_id": project_id,
|
||||
"default_config": json.dumps(data.config.dict()),
|
||||
**data.dict(), **_data}
|
||||
query = cur.mogrify(f"""\
|
||||
WITH m AS (INSERT INTO metrics (project_id, user_id, name, is_public,
|
||||
view_type, metric_type, metric_of, metric_value,
|
||||
metric_format, default_config)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s,
|
||||
%(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s,
|
||||
%(metric_format)s, %(default_config)s)
|
||||
RETURNING *)
|
||||
INSERT
|
||||
INTO metric_series(metric_id, index, name, filter)
|
||||
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
|
||||
for i in range(series_len)])}
|
||||
RETURNING metric_id;""", params)
|
||||
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
if dashboard:
|
||||
return r["metric_id"]
|
||||
return {"data": get(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)}
|
||||
|
||||
|
||||
def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSchema):
|
||||
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
return None
|
||||
series_ids = [r["seriesId"] for r in metric["series"]]
|
||||
n_series = []
|
||||
d_series_ids = []
|
||||
u_series = []
|
||||
u_series_ids = []
|
||||
params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
|
||||
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
|
||||
"metric_type": data.metric_type, "metric_of": data.metric_of,
|
||||
"metric_value": data.metric_value, "metric_format": data.metric_format}
|
||||
for i, s in enumerate(data.series):
|
||||
prefix = "u_"
|
||||
if s.index is None:
|
||||
s.index = i
|
||||
if s.series_id is None or s.series_id not in series_ids:
|
||||
n_series.append({"i": i, "s": s})
|
||||
prefix = "n_"
|
||||
else:
|
||||
u_series.append({"i": i, "s": s})
|
||||
u_series_ids.append(s.series_id)
|
||||
ns = s.dict()
|
||||
for k in ns.keys():
|
||||
if k == "filter":
|
||||
ns[k] = json.dumps(ns[k])
|
||||
params[f"{prefix}{k}_{i}"] = ns[k]
|
||||
for i in series_ids:
|
||||
if i not in u_series_ids:
|
||||
d_series_ids.append(i)
|
||||
params["d_series_ids"] = tuple(d_series_ids)
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_queries = []
|
||||
if len(n_series) > 0:
|
||||
sub_queries.append(f"""\
|
||||
n AS (INSERT INTO metric_series (metric_id, index, name, filter)
|
||||
VALUES {",".join([f"(%(metric_id)s, %(n_index_{s['i']})s, %(n_name_{s['i']})s, %(n_filter_{s['i']})s::jsonb)"
|
||||
for s in n_series])}
|
||||
RETURNING 1)""")
|
||||
if len(u_series) > 0:
|
||||
sub_queries.append(f"""\
|
||||
u AS (UPDATE metric_series
|
||||
SET name=series.name,
|
||||
filter=series.filter,
|
||||
index=series.index
|
||||
FROM (VALUES {",".join([f"(%(u_series_id_{s['i']})s,%(u_index_{s['i']})s,%(u_name_{s['i']})s,%(u_filter_{s['i']})s::jsonb)"
|
||||
for s in u_series])}) AS series(series_id, index, name, filter)
|
||||
WHERE metric_series.metric_id =%(metric_id)s AND metric_series.series_id=series.series_id
|
||||
RETURNING 1)""")
|
||||
if len(d_series_ids) > 0:
|
||||
sub_queries.append("""\
|
||||
d AS (DELETE FROM metric_series WHERE metric_id =%(metric_id)s AND series_id IN %(d_series_ids)s
|
||||
RETURNING 1)""")
|
||||
query = cur.mogrify(f"""\
|
||||
{"WITH " if len(sub_queries) > 0 else ""}{",".join(sub_queries)}
|
||||
UPDATE metrics
|
||||
SET name = %(name)s, is_public= %(is_public)s,
|
||||
view_type= %(view_type)s, metric_type= %(metric_type)s,
|
||||
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
|
||||
metric_format= %(metric_format)s,
|
||||
edited_at = timezone('utc'::text, now())
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING metric_id;""", params)
|
||||
cur.execute(query)
|
||||
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def get_all(project_id, user_id, include_series=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_join = ""
|
||||
if include_series:
|
||||
sub_join = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = metrics.metric_id
|
||||
AND metric_series.deleted_at ISNULL
|
||||
) AS metric_series ON (TRUE)"""
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT *
|
||||
FROM metrics
|
||||
{sub_join}
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
|
||||
FROM (SELECT DISTINCT dashboard_id, name, is_public
|
||||
FROM dashboards INNER JOIN dashboard_widgets USING (dashboard_id)
|
||||
WHERE deleted_at ISNULL
|
||||
AND dashboard_widgets.metric_id = metrics.metric_id
|
||||
AND project_id = %(project_id)s
|
||||
AND ((dashboards.user_id = %(user_id)s OR is_public))) AS connected_dashboards
|
||||
) AS connected_dashboards ON (TRUE)
|
||||
LEFT JOIN LATERAL (SELECT email AS owner_email
|
||||
FROM users
|
||||
WHERE deleted_at ISNULL
|
||||
AND users.user_id = metrics.user_id
|
||||
) AS owner ON (TRUE)
|
||||
WHERE metrics.project_id = %(project_id)s
|
||||
AND metrics.deleted_at ISNULL
|
||||
AND (user_id = %(user_id)s OR metrics.is_public)
|
||||
ORDER BY metrics.edited_at DESC, metrics.created_at DESC;""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
if include_series:
|
||||
for r in rows:
|
||||
# r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
for s in r["series"]:
|
||||
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
|
||||
else:
|
||||
for r in rows:
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r["edited_at"] = TimeUTC.datetime_to_timestamp(r["edited_at"])
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
return rows
|
||||
|
||||
|
||||
def delete(project_id, metric_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
return {"state": "success"}
|
||||
|
||||
|
||||
def get(metric_id, project_id, user_id, flatten=True):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT *
|
||||
FROM metrics
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = metrics.metric_id
|
||||
AND metric_series.deleted_at ISNULL
|
||||
) AS metric_series ON (TRUE)
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
|
||||
FROM (SELECT dashboard_id, name, is_public
|
||||
FROM dashboards
|
||||
WHERE deleted_at ISNULL
|
||||
AND project_id = %(project_id)s
|
||||
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
|
||||
) AS connected_dashboards ON (TRUE)
|
||||
LEFT JOIN LATERAL (SELECT email AS owner_email
|
||||
FROM users
|
||||
WHERE deleted_at ISNULL
|
||||
AND users.user_id = metrics.user_id
|
||||
) AS owner ON (TRUE)
|
||||
WHERE metrics.project_id = %(project_id)s
|
||||
AND metrics.deleted_at ISNULL
|
||||
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
|
||||
AND metrics.metric_id = %(metric_id)s
|
||||
ORDER BY created_at;""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
|
||||
row["edited_at"] = TimeUTC.datetime_to_timestamp(row["edited_at"])
|
||||
if flatten:
|
||||
for s in row["series"]:
|
||||
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
|
||||
return helper.dict_to_camel_case(row)
|
||||
|
||||
|
||||
def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_query = ""
|
||||
if include_dashboard:
|
||||
sub_query = """LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(connected_dashboards.* ORDER BY is_public,name),'[]'::jsonb) AS dashboards
|
||||
FROM (SELECT dashboard_id, name, is_public
|
||||
FROM dashboards
|
||||
WHERE deleted_at ISNULL
|
||||
AND project_id = %(project_id)s
|
||||
AND ((user_id = %(user_id)s OR is_public))) AS connected_dashboards
|
||||
) AS connected_dashboards ON (TRUE)"""
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT *
|
||||
FROM metrics
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = metrics.metric_id
|
||||
AND metric_series.deleted_at ISNULL
|
||||
) AS metric_series ON (TRUE)
|
||||
{sub_query}
|
||||
WHERE (metrics.project_id = %(project_id)s OR metrics.project_id ISNULL)
|
||||
AND metrics.deleted_at ISNULL
|
||||
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
|
||||
AND metrics.metric_id = %(metric_id)s
|
||||
ORDER BY created_at;""",
|
||||
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
row = cur.fetchone()
|
||||
return helper.dict_to_camel_case(row)
|
||||
|
||||
|
||||
def get_series_for_alert(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT series_id AS value,
|
||||
metrics.name || '.' || (COALESCE(metric_series.name, 'series ' || index)) || '.count' AS name,
|
||||
'count' AS unit,
|
||||
FALSE AS predefined,
|
||||
metric_id,
|
||||
series_id
|
||||
FROM metric_series
|
||||
INNER JOIN metrics USING (metric_id)
|
||||
WHERE metrics.deleted_at ISNULL
|
||||
AND metrics.project_id = %(project_id)s
|
||||
AND metrics.metric_type = 'timeseries'
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
ORDER BY name;""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def change_state(project_id, metric_id, user_id, status):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.metrics
|
||||
SET active = %(status)s
|
||||
WHERE metric_id = %(metric_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"metric_id": metric_id, "status": status, "user_id": user_id})
|
||||
)
|
||||
return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id,
|
||||
data: schemas.CustomMetricSessionsPayloadSchema
|
||||
# , range_value=None, start_date=None, end_date=None
|
||||
):
|
||||
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if metric is None:
|
||||
return None
|
||||
metric: schemas.CreateCustomMetricsSchema = __merge_metric_with_data(metric=metric, data=data)
|
||||
if metric is None:
|
||||
return None
|
||||
for s in metric.series:
|
||||
s.filter.startDate = data.startTimestamp
|
||||
s.filter.endDate = data.endTimestamp
|
||||
s.filter.limit = data.limit
|
||||
s.filter.page = data.page
|
||||
issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {})
|
||||
issues_list = issues_list.get("significant", []) + issues_list.get("insignificant", [])
|
||||
issue = None
|
||||
for i in issues_list:
|
||||
if i.get("issueId", "") == issue_id:
|
||||
issue = i
|
||||
break
|
||||
if issue is None:
|
||||
issue = issues.get(project_id=project_id, issue_id=issue_id)
|
||||
if issue is not None:
|
||||
issue = {**issue,
|
||||
"affectedSessions": 0,
|
||||
"affectedUsers": 0,
|
||||
"conversionImpact": 0,
|
||||
"lostConversions": 0,
|
||||
"unaffectedSessions": 0}
|
||||
return {"seriesId": s.series_id, "seriesName": s.name,
|
||||
"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id,
|
||||
issue=issue, data=s.filter)
|
||||
if issue is not None else {"total": 0, "sessions": []},
|
||||
"issue": issue}
|
||||
333
ee/api/chalicelib/core/dashboards.py
Normal file
333
ee/api/chalicelib/core/dashboards.py
Normal file
|
|
@ -0,0 +1,333 @@
|
|||
import json
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import custom_metrics
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
from decouple import config
|
||||
|
||||
if config("EXP_METRICS", cast=bool, default=False):
|
||||
from . import metrics_exp as metrics
|
||||
else:
|
||||
from . import metrics as metrics
|
||||
|
||||
# category name should be lower cased
|
||||
CATEGORY_DESCRIPTION = {
|
||||
'web vitals': 'A set of metrics that assess app performance on criteria such as load time, load performance, and stability.',
|
||||
'custom': 'Previously created custom metrics by me and my team.',
|
||||
'errors': 'Keep a closer eye on errors and track their type, origin and domain.',
|
||||
'performance': 'Optimize your app’s performance by tracking slow domains, page response times, memory consumption, CPU usage and more.',
|
||||
'resources': 'Find out which resources are missing and those that may be slowing your web app.'
|
||||
}
|
||||
|
||||
|
||||
def get_templates(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = cur.mogrify(f"""SELECT category, jsonb_agg(metrics ORDER BY name) AS widgets
|
||||
FROM (SELECT * , default_config AS config
|
||||
FROM metrics LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = metrics.metric_id
|
||||
AND metric_series.deleted_at ISNULL
|
||||
) AS metric_series ON (TRUE)
|
||||
WHERE deleted_at IS NULL
|
||||
AND (project_id ISNULL OR (project_id = %(project_id)s AND (is_public OR user_id= %(userId)s)))
|
||||
) AS metrics
|
||||
GROUP BY category
|
||||
ORDER BY ARRAY_POSITION(ARRAY ['custom','overview','errors','performance','resources'], category);""",
|
||||
{"project_id": project_id, "userId": user_id})
|
||||
cur.execute(pg_query)
|
||||
rows = cur.fetchall()
|
||||
for r in rows:
|
||||
r["description"] = CATEGORY_DESCRIPTION.get(r["category"].lower(), "")
|
||||
for w in r["widgets"]:
|
||||
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
|
||||
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
|
||||
for s in w["series"]:
|
||||
s["filter"] = helper.old_search_payload_to_flat(s["filter"])
|
||||
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description)
|
||||
VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s)
|
||||
RETURNING *"""
|
||||
params = {"userId": user_id, "projectId": project_id, **data.dict()}
|
||||
if data.metrics is not None and len(data.metrics) > 0:
|
||||
pg_query = f"""WITH dash AS ({pg_query})
|
||||
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
|
||||
VALUES {",".join([f"((SELECT dashboard_id FROM dash),%(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])}
|
||||
RETURNING (SELECT dashboard_id FROM dash)"""
|
||||
for i, m in enumerate(data.metrics):
|
||||
params[f"metric_id_{i}"] = m
|
||||
# params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
|
||||
# .get("properties", {}).get("config", {}).get("default", {})
|
||||
# params[f"config_{i}"]["position"] = i
|
||||
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
|
||||
params[f"config_{i}"] = json.dumps({"position": i})
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return {"errors": ["something went wrong while creating the dashboard"]}
|
||||
return {"data": get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=row["dashboard_id"])}
|
||||
|
||||
|
||||
def get_dashboards(project_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = f"""SELECT *
|
||||
FROM dashboards
|
||||
WHERE deleted_at ISNULL
|
||||
AND project_id = %(projectId)s
|
||||
AND (user_id = %(userId)s OR is_public);"""
|
||||
params = {"userId": user_id, "projectId": project_id}
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_dashboard(project_id, user_id, dashboard_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = """SELECT dashboards.*, all_metric_widgets.widgets AS widgets
|
||||
FROM dashboards
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(raw_metrics), '[]') AS widgets
|
||||
FROM (SELECT dashboard_widgets.*, metrics.*, metric_series.series
|
||||
FROM metrics
|
||||
INNER JOIN dashboard_widgets USING (metric_id)
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(JSONB_AGG(metric_series.* ORDER BY index),'[]') AS series
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = metrics.metric_id
|
||||
AND metric_series.deleted_at ISNULL
|
||||
) AS metric_series ON (TRUE)
|
||||
WHERE dashboard_widgets.dashboard_id = dashboards.dashboard_id
|
||||
AND metrics.deleted_at ISNULL
|
||||
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)) AS raw_metrics
|
||||
) AS all_metric_widgets ON (TRUE)
|
||||
WHERE dashboards.deleted_at ISNULL
|
||||
AND dashboards.project_id = %(projectId)s
|
||||
AND dashboard_id = %(dashboard_id)s
|
||||
AND (dashboards.user_id = %(userId)s OR is_public);"""
|
||||
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
row = cur.fetchone()
|
||||
if row is not None:
|
||||
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
|
||||
for w in row["widgets"]:
|
||||
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
|
||||
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
|
||||
for s in w["series"]:
|
||||
s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
|
||||
return helper.dict_to_camel_case(row)
|
||||
|
||||
|
||||
def delete_dashboard(project_id, user_id, dashboard_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = """UPDATE dashboards
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE dashboards.project_id = %(projectId)s
|
||||
AND dashboard_id = %(dashboard_id)s
|
||||
AND (dashboards.user_id = %(userId)s OR is_public);"""
|
||||
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id}
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
return {"data": {"success": True}}
|
||||
|
||||
|
||||
def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashboardSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = """SELECT COALESCE(COUNT(*),0) AS count
|
||||
FROM dashboard_widgets
|
||||
WHERE dashboard_id = %(dashboard_id)s;"""
|
||||
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
row = cur.fetchone()
|
||||
offset = row["count"]
|
||||
pg_query = f"""UPDATE dashboards
|
||||
SET name = %(name)s,
|
||||
description= %(description)s
|
||||
{", is_public = %(is_public)s" if data.is_public is not None else ""}
|
||||
{", is_pinned = %(is_pinned)s" if data.is_pinned is not None else ""}
|
||||
WHERE dashboards.project_id = %(projectId)s
|
||||
AND dashboard_id = %(dashboard_id)s
|
||||
AND (dashboards.user_id = %(userId)s OR is_public)"""
|
||||
if data.metrics is not None and len(data.metrics) > 0:
|
||||
pg_query = f"""WITH dash AS ({pg_query})
|
||||
INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
|
||||
VALUES {",".join([f"(%(dashboard_id)s, %(metric_id_{i})s, %(userId)s, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id_{i})s)||%(config_{i})s)" for i in range(len(data.metrics))])};"""
|
||||
for i, m in enumerate(data.metrics):
|
||||
params[f"metric_id_{i}"] = m
|
||||
# params[f"config_{i}"] = schemas.AddWidgetToDashboardPayloadSchema.schema() \
|
||||
# .get("properties", {}).get("config", {}).get("default", {})
|
||||
# params[f"config_{i}"]["position"] = i
|
||||
# params[f"config_{i}"] = json.dumps(params[f"config_{i}"])
|
||||
params[f"config_{i}"] = json.dumps({"position": i + offset})
|
||||
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
|
||||
return get_dashboard(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
|
||||
|
||||
|
||||
def get_widget(project_id, user_id, dashboard_id, widget_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = """SELECT metrics.*, metric_series.series
|
||||
FROM dashboard_widgets
|
||||
INNER JOIN dashboards USING (dashboard_id)
|
||||
INNER JOIN metrics USING (metric_id)
|
||||
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index), '[]'::jsonb) AS series
|
||||
FROM metric_series
|
||||
WHERE metric_series.metric_id = metrics.metric_id
|
||||
AND metric_series.deleted_at ISNULL
|
||||
) AS metric_series ON (TRUE)
|
||||
WHERE dashboard_id = %(dashboard_id)s
|
||||
AND widget_id = %(widget_id)s
|
||||
AND (dashboards.is_public OR dashboards.user_id = %(userId)s)
|
||||
AND dashboards.deleted_at IS NULL
|
||||
AND metrics.deleted_at ISNULL
|
||||
AND (metrics.project_id = %(projectId)s OR metrics.project_id ISNULL)
|
||||
AND (metrics.is_public OR metrics.user_id = %(userId)s);"""
|
||||
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
row = cur.fetchone()
|
||||
return helper.dict_to_camel_case(row)
|
||||
|
||||
|
||||
def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashboardPayloadSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = """INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config)
|
||||
SELECT %(dashboard_id)s AS dashboard_id, %(metric_id)s AS metric_id,
|
||||
%(userId)s AS user_id, (SELECT default_config FROM metrics WHERE metric_id=%(metric_id)s)||%(config)s::jsonb AS config
|
||||
WHERE EXISTS(SELECT 1 FROM dashboards
|
||||
WHERE dashboards.deleted_at ISNULL AND dashboards.project_id = %(projectId)s
|
||||
AND dashboard_id = %(dashboard_id)s
|
||||
AND (dashboards.user_id = %(userId)s OR is_public))
|
||||
RETURNING *;"""
|
||||
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()}
|
||||
params["config"] = json.dumps(data.config)
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
row = cur.fetchone()
|
||||
return helper.dict_to_camel_case(row)
|
||||
|
||||
|
||||
def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.UpdateWidgetPayloadSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = """UPDATE dashboard_widgets
|
||||
SET config= %(config)s
|
||||
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s
|
||||
RETURNING *;"""
|
||||
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id,
|
||||
"widget_id": widget_id, **data.dict()}
|
||||
params["config"] = json.dumps(data.config)
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
row = cur.fetchone()
|
||||
return helper.dict_to_camel_case(row)
|
||||
|
||||
|
||||
def remove_widget(project_id, user_id, dashboard_id, widget_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = """DELETE FROM dashboard_widgets
|
||||
WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s;"""
|
||||
params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, "widget_id": widget_id}
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
return {"data": {"success": True}}
|
||||
|
||||
|
||||
def pin_dashboard(project_id, user_id, dashboard_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
pg_query = """UPDATE dashboards
|
||||
SET is_pinned = FALSE
|
||||
WHERE project_id=%(project_id)s;
|
||||
UPDATE dashboards
|
||||
SET is_pinned = True
|
||||
WHERE dashboard_id=%(dashboard_id)s AND project_id=%(project_id)s AND deleted_at ISNULL
|
||||
RETURNING *;"""
|
||||
params = {"userId": user_id, "project_id": project_id, "dashboard_id": dashboard_id}
|
||||
cur.execute(cur.mogrify(pg_query, params))
|
||||
row = cur.fetchone()
|
||||
return helper.dict_to_camel_case(row)
|
||||
|
||||
|
||||
def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CreateCustomMetricsSchema):
|
||||
metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True)
|
||||
return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id,
|
||||
data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id))
|
||||
|
||||
|
||||
PREDEFINED = {schemas.TemplatePredefinedKeys.count_sessions: metrics.get_processed_sessions,
|
||||
schemas.TemplatePredefinedKeys.avg_image_load_time: metrics.get_application_activity_avg_image_load_time,
|
||||
schemas.TemplatePredefinedKeys.avg_page_load_time: metrics.get_application_activity_avg_page_load_time,
|
||||
schemas.TemplatePredefinedKeys.avg_request_load_time: metrics.get_application_activity_avg_request_load_time,
|
||||
schemas.TemplatePredefinedKeys.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start,
|
||||
schemas.TemplatePredefinedKeys.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel,
|
||||
schemas.TemplatePredefinedKeys.avg_visited_pages: metrics.get_user_activity_avg_visited_pages,
|
||||
schemas.TemplatePredefinedKeys.avg_session_duration: metrics.get_user_activity_avg_session_duration,
|
||||
schemas.TemplatePredefinedKeys.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time,
|
||||
schemas.TemplatePredefinedKeys.avg_pages_response_time: metrics.get_pages_response_time,
|
||||
schemas.TemplatePredefinedKeys.avg_response_time: metrics.get_top_metrics_avg_response_time,
|
||||
schemas.TemplatePredefinedKeys.avg_first_paint: metrics.get_top_metrics_avg_first_paint,
|
||||
schemas.TemplatePredefinedKeys.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded,
|
||||
schemas.TemplatePredefinedKeys.avg_till_first_bit: metrics.get_top_metrics_avg_till_first_bit,
|
||||
schemas.TemplatePredefinedKeys.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive,
|
||||
schemas.TemplatePredefinedKeys.count_requests: metrics.get_top_metrics_count_requests,
|
||||
schemas.TemplatePredefinedKeys.avg_time_to_render: metrics.get_time_to_render,
|
||||
schemas.TemplatePredefinedKeys.avg_used_js_heap_size: metrics.get_memory_consumption,
|
||||
schemas.TemplatePredefinedKeys.avg_cpu: metrics.get_avg_cpu,
|
||||
schemas.TemplatePredefinedKeys.avg_fps: metrics.get_avg_fps,
|
||||
schemas.TemplatePredefinedKeys.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors,
|
||||
schemas.TemplatePredefinedKeys.domains_errors_4xx: metrics.get_domains_errors_4xx,
|
||||
schemas.TemplatePredefinedKeys.domains_errors_5xx: metrics.get_domains_errors_5xx,
|
||||
schemas.TemplatePredefinedKeys.errors_per_domains: metrics.get_errors_per_domains,
|
||||
schemas.TemplatePredefinedKeys.calls_errors: metrics.get_calls_errors,
|
||||
schemas.TemplatePredefinedKeys.errors_by_type: metrics.get_errors_per_type,
|
||||
schemas.TemplatePredefinedKeys.errors_by_origin: metrics.get_resources_by_party,
|
||||
schemas.TemplatePredefinedKeys.speed_index_by_location: metrics.get_speed_index_location,
|
||||
schemas.TemplatePredefinedKeys.slowest_domains: metrics.get_slowest_domains,
|
||||
schemas.TemplatePredefinedKeys.sessions_per_browser: metrics.get_sessions_per_browser,
|
||||
schemas.TemplatePredefinedKeys.time_to_render: metrics.get_time_to_render,
|
||||
schemas.TemplatePredefinedKeys.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages,
|
||||
schemas.TemplatePredefinedKeys.memory_consumption: metrics.get_memory_consumption,
|
||||
schemas.TemplatePredefinedKeys.cpu_load: metrics.get_avg_cpu,
|
||||
schemas.TemplatePredefinedKeys.frame_rate: metrics.get_avg_fps,
|
||||
schemas.TemplatePredefinedKeys.crashes: metrics.get_crashes,
|
||||
schemas.TemplatePredefinedKeys.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete,
|
||||
schemas.TemplatePredefinedKeys.pages_dom_buildtime: metrics.get_pages_dom_build_time,
|
||||
schemas.TemplatePredefinedKeys.pages_response_time: metrics.get_pages_response_time,
|
||||
schemas.TemplatePredefinedKeys.pages_response_time_distribution: metrics.get_pages_response_time_distribution,
|
||||
schemas.TemplatePredefinedKeys.missing_resources: metrics.get_missing_resources_trend,
|
||||
schemas.TemplatePredefinedKeys.slowest_resources: metrics.get_slowest_resources,
|
||||
schemas.TemplatePredefinedKeys.resources_fetch_time: metrics.get_resources_loading_time,
|
||||
schemas.TemplatePredefinedKeys.resource_type_vs_response_end: metrics.resource_type_vs_response_end,
|
||||
schemas.TemplatePredefinedKeys.resources_count_by_type: metrics.get_resources_count_by_type,
|
||||
}
|
||||
|
||||
|
||||
def get_predefined_metric(key: schemas.TemplatePredefinedKeys, project_id: int, data: dict):
|
||||
return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data)
|
||||
|
||||
|
||||
def make_chart_metrics(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema):
|
||||
raw_metric = custom_metrics.get_with_template(metric_id=metric_id, project_id=project_id, user_id=user_id,
|
||||
include_dashboard=False)
|
||||
if raw_metric is None:
|
||||
return None
|
||||
metric: schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
|
||||
if metric.is_template and metric.predefined_key is None:
|
||||
return None
|
||||
if metric.is_template:
|
||||
return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
|
||||
else:
|
||||
return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=metric_id, data=data,
|
||||
metric=raw_metric)
|
||||
|
||||
|
||||
def make_chart_widget(dashboard_id, project_id, user_id, widget_id, data: schemas.CustomMetricChartPayloadSchema):
|
||||
raw_metric = get_widget(widget_id=widget_id, project_id=project_id, user_id=user_id, dashboard_id=dashboard_id)
|
||||
if raw_metric is None:
|
||||
return None
|
||||
metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate.parse_obj(raw_metric)
|
||||
if metric.is_template:
|
||||
return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict())
|
||||
else:
|
||||
return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"],
|
||||
data=data, metric=raw_metric)
|
||||
786
ee/api/chalicelib/core/errors.py
Normal file
786
ee/api/chalicelib/core/errors.py
Normal file
|
|
@ -0,0 +1,786 @@
|
|||
import json
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sourcemaps
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.metrics_helper import __get_step_size
|
||||
|
||||
from decouple import config
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
|
||||
def get(error_id, family=False):
|
||||
if family:
|
||||
return get_batch([error_id])
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;",
|
||||
{"error_id": error_id})
|
||||
cur.execute(query=query)
|
||||
result = cur.fetchone()
|
||||
if result is not None:
|
||||
result["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(result["stacktrace_parsed_at"])
|
||||
return helper.dict_to_camel_case(result)
|
||||
|
||||
|
||||
def get_batch(error_ids):
|
||||
if len(error_ids) == 0:
|
||||
return []
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""
|
||||
WITH RECURSIVE error_family AS (
|
||||
SELECT *
|
||||
FROM public.errors
|
||||
WHERE error_id IN %(error_ids)s
|
||||
UNION
|
||||
SELECT child_errors.*
|
||||
FROM public.errors AS child_errors
|
||||
INNER JOIN error_family ON error_family.error_id = child_errors.parent_error_id OR error_family.parent_error_id = child_errors.error_id
|
||||
)
|
||||
SELECT *
|
||||
FROM error_family;""",
|
||||
{"error_ids": tuple(error_ids)})
|
||||
cur.execute(query=query)
|
||||
errors = cur.fetchall()
|
||||
for e in errors:
|
||||
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||
return helper.list_to_camel_case(errors)
|
||||
|
||||
|
||||
def __flatten_sort_key_count_version(data, merge_nested=False):
|
||||
if data is None:
|
||||
return []
|
||||
return sorted(
|
||||
[
|
||||
{
|
||||
"name": f'{o["name"]}@{v["version"]}',
|
||||
"count": v["count"]
|
||||
} for o in data for v in o["partition"]
|
||||
],
|
||||
key=lambda o: o["count"], reverse=True) if merge_nested else \
|
||||
[
|
||||
{
|
||||
"name": o["name"],
|
||||
"count": o["count"],
|
||||
} for o in data
|
||||
]
|
||||
|
||||
|
||||
def __process_tags(row):
|
||||
return [
|
||||
{"name": "browser", "partitions": __flatten_sort_key_count_version(data=row.get("browsers_partition"))},
|
||||
{"name": "browser.ver",
|
||||
"partitions": __flatten_sort_key_count_version(data=row.pop("browsers_partition"), merge_nested=True)},
|
||||
{"name": "OS", "partitions": __flatten_sort_key_count_version(data=row.get("os_partition"))},
|
||||
{"name": "OS.ver",
|
||||
"partitions": __flatten_sort_key_count_version(data=row.pop("os_partition"), merge_nested=True)},
|
||||
{"name": "device.family", "partitions": __flatten_sort_key_count_version(data=row.get("device_partition"))},
|
||||
{"name": "device",
|
||||
"partitions": __flatten_sort_key_count_version(data=row.pop("device_partition"), merge_nested=True)},
|
||||
{"name": "country", "partitions": row.pop("country_partition")}
|
||||
]
|
||||
|
||||
|
||||
def get_details(project_id, error_id, user_id, **data):
|
||||
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
|
||||
pg_sub_query24.append("error_id = %(error_id)s")
|
||||
pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30")
|
||||
pg_sub_query30.append("error_id = %(error_id)s")
|
||||
pg_basic_query = __get_basic_constraints(time_constraint=False)
|
||||
pg_basic_query.append("error_id = %(error_id)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
data["startDate24"] = TimeUTC.now(-1)
|
||||
data["endDate24"] = TimeUTC.now()
|
||||
data["startDate30"] = TimeUTC.now(-30)
|
||||
data["endDate30"] = TimeUTC.now()
|
||||
density24 = int(data.get("density24", 24))
|
||||
step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1)
|
||||
density30 = int(data.get("density30", 30))
|
||||
step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1)
|
||||
params = {
|
||||
"startDate24": data['startDate24'],
|
||||
"endDate24": data['endDate24'],
|
||||
"startDate30": data['startDate30'],
|
||||
"endDate30": data['endDate30'],
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size24": step_size24,
|
||||
"step_size30": step_size30,
|
||||
"error_id": error_id}
|
||||
|
||||
main_pg_query = f"""\
|
||||
SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
users,
|
||||
sessions,
|
||||
last_occurrence,
|
||||
first_occurrence,
|
||||
last_session_id,
|
||||
browsers_partition,
|
||||
os_partition,
|
||||
device_partition,
|
||||
country_partition,
|
||||
chart24,
|
||||
chart30
|
||||
FROM (SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
COUNT(DISTINCT user_uuid) AS users,
|
||||
COUNT(DISTINCT session_id) AS sessions
|
||||
FROM public.errors
|
||||
INNER JOIN events.errors AS s_errors USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE error_id = %(error_id)s
|
||||
GROUP BY error_id, name, message) AS details
|
||||
INNER JOIN (SELECT error_id,
|
||||
MAX(timestamp) AS last_occurrence,
|
||||
MIN(timestamp) AS first_occurrence
|
||||
FROM events.errors
|
||||
WHERE error_id = %(error_id)s
|
||||
GROUP BY error_id) AS time_details USING (error_id)
|
||||
INNER JOIN (SELECT error_id,
|
||||
session_id AS last_session_id,
|
||||
user_os,
|
||||
user_os_version,
|
||||
user_browser,
|
||||
user_browser_version,
|
||||
user_device,
|
||||
user_device_type,
|
||||
user_uuid
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE error_id = %(error_id)s
|
||||
ORDER BY errors.timestamp DESC
|
||||
LIMIT 1) AS last_session_details USING (error_id)
|
||||
INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_browser AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors
|
||||
INNER JOIN sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
GROUP BY user_browser
|
||||
ORDER BY count DESC) AS count_per_browser_query
|
||||
INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition
|
||||
FROM (SELECT user_browser_version AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
AND sessions.user_browser = count_per_browser_query.name
|
||||
GROUP BY user_browser_version
|
||||
ORDER BY count DESC) AS version_details
|
||||
) AS browser_version_details ON (TRUE)) AS browser_details) AS browser_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_os AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
GROUP BY user_os
|
||||
ORDER BY count DESC) AS count_per_os_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
|
||||
FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
AND sessions.user_os = count_per_os_details.name
|
||||
GROUP BY user_os_version
|
||||
ORDER BY count DESC) AS count_per_version_details
|
||||
GROUP BY count_per_os_details.name ) AS os_version_details
|
||||
ON (TRUE)) AS os_details) AS os_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_device_type AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
GROUP BY user_device_type
|
||||
ORDER BY count DESC) AS count_per_device_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition
|
||||
FROM (SELECT CASE
|
||||
WHEN user_device = '' OR user_device ISNULL
|
||||
THEN 'unknown'
|
||||
ELSE user_device END AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
AND sessions.user_device_type = count_per_device_details.name
|
||||
GROUP BY user_device
|
||||
ORDER BY count DESC) AS count_per_device_v_details
|
||||
GROUP BY count_per_device_details.name ) AS device_version_details
|
||||
ON (TRUE)) AS device_details) AS device_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
|
||||
FROM (SELECT user_country AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
GROUP BY user_country
|
||||
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate24)s, %(endDate24)s, %(step_size24)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query24)}
|
||||
) AS chart_details ON (TRUE)
|
||||
GROUP BY generated_timestamp
|
||||
ORDER BY generated_timestamp) AS chart_details) AS chart_details24 ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart30
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate30)s, %(endDate30)s, %(step_size30)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30)}) AS chart_details
|
||||
ON (TRUE)
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE);
|
||||
"""
|
||||
|
||||
# print("--------------------")
|
||||
# print(cur.mogrify(main_pg_query, params))
|
||||
# print("--------------------")
|
||||
cur.execute(cur.mogrify(main_pg_query, params))
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return {"errors": ["error not found"]}
|
||||
row["tags"] = __process_tags(row)
|
||||
|
||||
query = cur.mogrify(
|
||||
f"""SELECT error_id, status, session_id, start_ts,
|
||||
parent_error_id,session_id, user_anonymous_id,
|
||||
user_id, user_uuid, user_browser, user_browser_version,
|
||||
user_os, user_os_version, user_device, payload,
|
||||
FALSE AS favorite,
|
||||
True AS viewed
|
||||
FROM public.errors AS pe
|
||||
INNER JOIN events.errors AS ee USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE pe.project_id = %(project_id)s
|
||||
AND error_id = %(error_id)s
|
||||
ORDER BY start_ts DESC
|
||||
LIMIT 1;""",
|
||||
{"project_id": project_id, "error_id": error_id, "user_id": user_id})
|
||||
cur.execute(query=query)
|
||||
status = cur.fetchone()
|
||||
|
||||
if status is not None:
|
||||
row["stack"] = format_first_stack_frame(status).pop("stack")
|
||||
row["status"] = status.pop("status")
|
||||
row["parent_error_id"] = status.pop("parent_error_id")
|
||||
row["favorite"] = status.pop("favorite")
|
||||
row["viewed"] = status.pop("viewed")
|
||||
row["last_hydrated_session"] = status
|
||||
else:
|
||||
row["stack"] = []
|
||||
row["last_hydrated_session"] = None
|
||||
row["status"] = "untracked"
|
||||
row["parent_error_id"] = None
|
||||
row["favorite"] = False
|
||||
row["viewed"] = False
|
||||
return {"data": helper.dict_to_camel_case(row)}
|
||||
|
||||
|
||||
def get_details_chart(project_id, error_id, user_id, **data):
|
||||
pg_sub_query = __get_basic_constraints()
|
||||
pg_sub_query.append("error_id = %(error_id)s")
|
||||
pg_sub_query_chart = __get_basic_constraints(time_constraint=False, chart=True)
|
||||
pg_sub_query_chart.append("error_id = %(error_id)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if data.get("startDate") is None:
|
||||
data["startDate"] = TimeUTC.now(-7)
|
||||
else:
|
||||
data["startDate"] = int(data["startDate"])
|
||||
if data.get("endDate") is None:
|
||||
data["endDate"] = TimeUTC.now()
|
||||
else:
|
||||
data["endDate"] = int(data["endDate"])
|
||||
density = int(data.get("density", 7))
|
||||
step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
|
||||
params = {
|
||||
"startDate": data['startDate'],
|
||||
"endDate": data['endDate'],
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size": step_size,
|
||||
"error_id": error_id}
|
||||
|
||||
main_pg_query = f"""\
|
||||
SELECT %(error_id)s AS error_id,
|
||||
browsers_partition,
|
||||
os_partition,
|
||||
device_partition,
|
||||
country_partition,
|
||||
chart
|
||||
FROM (SELECT jsonb_agg(browser_details) AS browsers_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_browser AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY user_browser
|
||||
ORDER BY count DESC) AS count_per_browser_query
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
|
||||
FROM (SELECT user_browser_version AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
AND user_browser = count_per_browser_query.name
|
||||
GROUP BY user_browser_version
|
||||
ORDER BY count DESC) AS count_per_version_details) AS browesr_version_details
|
||||
ON (TRUE)) AS browser_details) AS browser_details
|
||||
INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_os AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY user_os
|
||||
ORDER BY count DESC) AS count_per_os_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_query) AS partition
|
||||
FROM (SELECT COALESCE(user_os_version, 'unknown') AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
AND user_os = count_per_os_details.name
|
||||
GROUP BY user_os_version
|
||||
ORDER BY count DESC) AS count_per_version_query
|
||||
) AS os_version_query ON (TRUE)) AS os_details) AS os_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_device_type AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY user_device_type
|
||||
ORDER BY count DESC) AS count_per_device_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_details) AS partition
|
||||
FROM (SELECT CASE
|
||||
WHEN user_device = '' OR user_device ISNULL
|
||||
THEN 'unknown'
|
||||
ELSE user_device END AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
AND user_device_type = count_per_device_details.name
|
||||
GROUP BY user_device_type, user_device
|
||||
ORDER BY count DESC) AS count_per_device_details
|
||||
) AS device_version_details ON (TRUE)) AS device_details) AS device_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
|
||||
FROM (SELECT user_country AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY user_country
|
||||
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
||||
) AS chart_details ON (TRUE)
|
||||
GROUP BY generated_timestamp
|
||||
ORDER BY generated_timestamp) AS chart_details) AS chart_details ON (TRUE);"""
|
||||
|
||||
cur.execute(cur.mogrify(main_pg_query, params))
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return {"errors": ["error not found"]}
|
||||
row["tags"] = __process_tags(row)
|
||||
return {"data": helper.dict_to_camel_case(row)}
|
||||
|
||||
|
||||
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
|
||||
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
|
||||
project_key="project_id"):
|
||||
if project_key is None:
|
||||
ch_sub_query = []
|
||||
else:
|
||||
ch_sub_query = [f"{project_key} =%(project_id)s"]
|
||||
if time_constraint:
|
||||
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
|
||||
f"timestamp < %({endTime_arg_name})s"]
|
||||
if chart:
|
||||
ch_sub_query += [f"timestamp >= generated_timestamp",
|
||||
f"timestamp < generated_timestamp + %({step_size_name})s"]
|
||||
if platform == schemas.PlatformType.mobile:
|
||||
ch_sub_query.append("user_device_type = 'mobile'")
|
||||
elif platform == schemas.PlatformType.desktop:
|
||||
ch_sub_query.append("user_device_type = 'desktop'")
|
||||
return ch_sub_query
|
||||
|
||||
|
||||
def __get_sort_key(key):
|
||||
return {
|
||||
schemas.ErrorSort.occurrence: "max_datetime",
|
||||
schemas.ErrorSort.users_count: "users",
|
||||
schemas.ErrorSort.sessions_count: "sessions"
|
||||
}.get(key, 'max_datetime')
|
||||
|
||||
|
||||
def search(data: schemas.SearchErrorsSchema, project_id, user_id):
|
||||
empty_response = {
|
||||
'total': 0,
|
||||
'errors': []
|
||||
}
|
||||
|
||||
platform = None
|
||||
for f in data.filters:
|
||||
if f.type == schemas.FilterType.platform and len(f.value) > 0:
|
||||
platform = f.value[0]
|
||||
pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id")
|
||||
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
|
||||
"pe.project_id=%(project_id)s"]
|
||||
# To ignore Script error
|
||||
pg_sub_query.append("pe.message!='Script error.'")
|
||||
pg_sub_query_chart = __get_basic_constraints(platform, time_constraint=False, chart=True, project_key=None)
|
||||
# pg_sub_query_chart.append("source ='js_exception'")
|
||||
pg_sub_query_chart.append("errors.error_id =details.error_id")
|
||||
statuses = []
|
||||
error_ids = None
|
||||
if data.startDate is None:
|
||||
data.startDate = TimeUTC.now(-30)
|
||||
if data.endDate is None:
|
||||
data.endDate = TimeUTC.now(1)
|
||||
if len(data.events) > 0 or len(data.filters) > 0:
|
||||
print("-- searching for sessions before errors")
|
||||
statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True,
|
||||
error_status=data.status)
|
||||
if len(statuses) == 0:
|
||||
return empty_response
|
||||
error_ids = [e["errorId"] for e in statuses]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1)
|
||||
sort = __get_sort_key('datetime')
|
||||
if data.sort is not None:
|
||||
sort = __get_sort_key(data.sort)
|
||||
order = schemas.SortOrderType.desc
|
||||
if data.order is not None:
|
||||
order = data.order
|
||||
extra_join = ""
|
||||
|
||||
params = {
|
||||
"startDate": data.startDate,
|
||||
"endDate": data.endDate,
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size": step_size}
|
||||
if data.status != schemas.ErrorStatus.all:
|
||||
pg_sub_query.append("status = %(error_status)s")
|
||||
params["error_status"] = data.status
|
||||
if data.limit is not None and data.page is not None:
|
||||
params["errors_offset"] = (data.page - 1) * data.limit
|
||||
params["errors_limit"] = data.limit
|
||||
else:
|
||||
params["errors_offset"] = 0
|
||||
params["errors_limit"] = 200
|
||||
|
||||
if error_ids is not None:
|
||||
params["error_ids"] = tuple(error_ids)
|
||||
pg_sub_query.append("error_id IN %(error_ids)s")
|
||||
# if data.bookmarked:
|
||||
# pg_sub_query.append("ufe.user_id = %(userId)s")
|
||||
# extra_join += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
|
||||
if data.query is not None and len(data.query) > 0:
|
||||
pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)")
|
||||
params["error_query"] = helper.values_for_operator(value=data.query,
|
||||
op=schemas.SearchEventOperator._contains)
|
||||
|
||||
main_pg_query = f"""SELECT full_count,
|
||||
error_id,
|
||||
name,
|
||||
message,
|
||||
users,
|
||||
sessions,
|
||||
last_occurrence,
|
||||
first_occurrence,
|
||||
chart
|
||||
FROM (SELECT COUNT(details) OVER () AS full_count, details.*
|
||||
FROM (SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
COUNT(DISTINCT COALESCE(user_id,user_uuid::text)) AS users,
|
||||
COUNT(DISTINCT session_id) AS sessions,
|
||||
MAX(timestamp) AS max_datetime,
|
||||
MIN(timestamp) AS min_datetime
|
||||
FROM events.errors
|
||||
INNER JOIN public.errors AS pe USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
{extra_join}
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY error_id, name, message
|
||||
ORDER BY {sort} {order}) AS details
|
||||
LIMIT %(errors_limit)s OFFSET %(errors_offset)s
|
||||
) AS details
|
||||
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
|
||||
MIN(timestamp) AS first_occurrence
|
||||
FROM events.errors
|
||||
WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors
|
||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
||||
) AS sessions ON (TRUE)
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
|
||||
|
||||
# print("--------------------")
|
||||
# print(cur.mogrify(main_pg_query, params))
|
||||
# print("--------------------")
|
||||
|
||||
cur.execute(cur.mogrify(main_pg_query, params))
|
||||
rows = cur.fetchall()
|
||||
total = 0 if len(rows) == 0 else rows[0]["full_count"]
|
||||
|
||||
if total == 0:
|
||||
rows = []
|
||||
else:
|
||||
if len(statuses) == 0:
|
||||
query = cur.mogrify(
|
||||
"""SELECT error_id,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_errors AS ve
|
||||
WHERE errors.error_id = ve.error_id
|
||||
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
|
||||
FROM public.errors
|
||||
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
|
||||
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
|
||||
"user_id": user_id})
|
||||
cur.execute(query=query)
|
||||
statuses = helper.list_to_camel_case(cur.fetchall())
|
||||
statuses = {
|
||||
s["errorId"]: s for s in statuses
|
||||
}
|
||||
|
||||
for r in rows:
|
||||
r.pop("full_count")
|
||||
if r["error_id"] in statuses:
|
||||
r["viewed"] = statuses[r["error_id"]]["viewed"]
|
||||
else:
|
||||
r["viewed"] = False
|
||||
|
||||
return {
|
||||
'total': total,
|
||||
'errors': helper.list_to_camel_case(rows)
|
||||
}
|
||||
|
||||
|
||||
def __save_stacktrace(error_id, data):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""UPDATE public.errors
|
||||
SET stacktrace=%(data)s::jsonb, stacktrace_parsed_at=timezone('utc'::text, now())
|
||||
WHERE error_id = %(error_id)s;""",
|
||||
{"error_id": error_id, "data": json.dumps(data)})
|
||||
cur.execute(query=query)
|
||||
|
||||
|
||||
def get_trace(project_id, error_id):
|
||||
error = get(error_id=error_id, family=False)
|
||||
if error is None:
|
||||
return {"errors": ["error not found"]}
|
||||
if error.get("source", "") != "js_exception":
|
||||
return {"errors": ["this source of errors doesn't have a sourcemap"]}
|
||||
if error.get("payload") is None:
|
||||
return {"errors": ["null payload"]}
|
||||
if error.get("stacktrace") is not None:
|
||||
return {"sourcemapUploaded": True,
|
||||
"trace": error.get("stacktrace"),
|
||||
"preparsed": True}
|
||||
trace, all_exists = sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"])
|
||||
if all_exists:
|
||||
__save_stacktrace(error_id=error_id, data=trace)
|
||||
return {"sourcemapUploaded": all_exists,
|
||||
"trace": trace,
|
||||
"preparsed": False}
|
||||
|
||||
|
||||
def get_sessions(start_date, end_date, project_id, user_id, error_id):
|
||||
extra_constraints = ["s.project_id = %(project_id)s",
|
||||
"s.start_ts >= %(startDate)s",
|
||||
"s.start_ts <= %(endDate)s",
|
||||
"e.error_id = %(error_id)s"]
|
||||
if start_date is None:
|
||||
start_date = TimeUTC.now(-7)
|
||||
if end_date is None:
|
||||
end_date = TimeUTC.now()
|
||||
|
||||
params = {
|
||||
"startDate": start_date,
|
||||
"endDate": end_date,
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"error_id": error_id}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT s.project_id,
|
||||
s.session_id::text AS session_id,
|
||||
s.user_uuid,
|
||||
s.user_id,
|
||||
s.user_agent,
|
||||
s.user_os,
|
||||
s.user_browser,
|
||||
s.user_device,
|
||||
s.user_country,
|
||||
s.start_ts,
|
||||
s.duration,
|
||||
s.events_count,
|
||||
s.pages_count,
|
||||
s.errors_count,
|
||||
s.issue_types,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
|
||||
FROM public.sessions AS s INNER JOIN events.errors AS e USING (session_id)
|
||||
WHERE {" AND ".join(extra_constraints)}
|
||||
ORDER BY s.start_ts DESC;""",
|
||||
params)
|
||||
cur.execute(query=query)
|
||||
sessions_list = []
|
||||
total = cur.rowcount
|
||||
row = cur.fetchone()
|
||||
while row is not None and len(sessions_list) < 100:
|
||||
sessions_list.append(row)
|
||||
row = cur.fetchone()
|
||||
|
||||
return {
|
||||
'total': total,
|
||||
'sessions': helper.list_to_camel_case(sessions_list)
|
||||
}
|
||||
|
||||
|
||||
ACTION_STATE = {
|
||||
"unsolve": 'unresolved',
|
||||
"solve": 'resolved',
|
||||
"ignore": 'ignored'
|
||||
}
|
||||
|
||||
|
||||
def change_state(project_id, user_id, error_id, action):
|
||||
errors = get(error_id, family=True)
|
||||
print(len(errors))
|
||||
status = ACTION_STATE.get(action)
|
||||
if errors is None or len(errors) == 0:
|
||||
return {"errors": ["error not found"]}
|
||||
if errors[0]["status"] == status:
|
||||
return {"errors": [f"error is already {status}"]}
|
||||
|
||||
if errors[0]["status"] == ACTION_STATE["solve"] and status == ACTION_STATE["ignore"]:
|
||||
return {"errors": [f"state transition not permitted {errors[0]['status']} -> {status}"]}
|
||||
|
||||
params = {
|
||||
"userId": user_id,
|
||||
"error_ids": tuple([e["errorId"] for e in errors]),
|
||||
"status": status}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""UPDATE public.errors
|
||||
SET status = %(status)s
|
||||
WHERE error_id IN %(error_ids)s
|
||||
RETURNING status""",
|
||||
params)
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
if row is not None:
|
||||
for e in errors:
|
||||
e["status"] = row["status"]
|
||||
return {"data": errors}
|
||||
|
||||
|
||||
MAX_RANK = 2
|
||||
|
||||
|
||||
def __status_rank(status):
|
||||
return {
|
||||
'unresolved': MAX_RANK - 2,
|
||||
'ignored': MAX_RANK - 1,
|
||||
'resolved': MAX_RANK
|
||||
}.get(status)
|
||||
|
||||
|
||||
def merge(error_ids):
|
||||
error_ids = list(set(error_ids))
|
||||
errors = get_batch(error_ids)
|
||||
if len(error_ids) <= 1 or len(error_ids) > len(errors):
|
||||
return {"errors": ["invalid list of ids"]}
|
||||
error_ids = [e["errorId"] for e in errors]
|
||||
parent_error_id = error_ids[0]
|
||||
status = "unresolved"
|
||||
for e in errors:
|
||||
if __status_rank(status) < __status_rank(e["status"]):
|
||||
status = e["status"]
|
||||
if __status_rank(status) == MAX_RANK:
|
||||
break
|
||||
params = {
|
||||
"error_ids": tuple(error_ids),
|
||||
"parent_error_id": parent_error_id,
|
||||
"status": status
|
||||
}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""UPDATE public.errors
|
||||
SET parent_error_id = %(parent_error_id)s, status = %(status)s
|
||||
WHERE error_id IN %(error_ids)s OR parent_error_id IN %(error_ids)s;""",
|
||||
params)
|
||||
cur.execute(query=query)
|
||||
# row = cur.fetchone()
|
||||
|
||||
return {"data": "success"}
|
||||
|
||||
|
||||
def format_first_stack_frame(error):
|
||||
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
|
||||
for s in error["stack"]:
|
||||
for c in s.get("context", []):
|
||||
for sci, sc in enumerate(c):
|
||||
if isinstance(sc, str) and len(sc) > 1000:
|
||||
c[sci] = sc[:1000]
|
||||
# convert bytes to string:
|
||||
if isinstance(s["filename"], bytes):
|
||||
s["filename"] = s["filename"].decode("utf-8")
|
||||
return error
|
||||
|
||||
|
||||
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s)
|
||||
SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed
|
||||
FROM (SELECT root_error.error_id
|
||||
FROM events.errors
|
||||
INNER JOIN public.errors AS root_error USING (error_id)
|
||||
LEFT JOIN user_viewed USING (error_id)
|
||||
WHERE project_id = %(project_id)s
|
||||
AND timestamp >= %(startTimestamp)s
|
||||
AND timestamp <= %(endTimestamp)s
|
||||
AND source = 'js_exception'
|
||||
AND root_error.status = 'unresolved'
|
||||
AND user_viewed.error_id ISNULL
|
||||
LIMIT 1
|
||||
) AS timed_errors;""",
|
||||
{"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp,
|
||||
"endTimestamp": endTimestamp})
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
|
||||
return {
|
||||
"data": helper.dict_to_camel_case(row)
|
||||
}
|
||||
399
ee/api/chalicelib/core/events.py
Normal file
399
ee/api/chalicelib/core/events.py
Normal file
|
|
@ -0,0 +1,399 @@
|
|||
import schemas
|
||||
from chalicelib.core import issues
|
||||
from chalicelib.core import metadata
|
||||
from chalicelib.core import sessions_metas
|
||||
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
|
||||
|
||||
from decouple import config
|
||||
|
||||
if config("EXP_AUTOCOMPLETE", cast=bool, default=False):
|
||||
from . import autocomplete_exp as autocomplete
|
||||
else:
|
||||
from . import autocomplete as autocomplete
|
||||
|
||||
|
||||
def get_customs_by_sessionId2_pg(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
c.*,
|
||||
'CUSTOM' AS type
|
||||
FROM events_common.customs AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return helper.dict_to_camel_case(rows)
|
||||
|
||||
|
||||
def __merge_cells(rows, start, count, replacement):
|
||||
rows[start] = replacement
|
||||
rows = rows[:start + 1] + rows[start + count:]
|
||||
return rows
|
||||
|
||||
|
||||
def __get_grouped_clickrage(rows, session_id, project_id):
|
||||
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage", project_id=project_id)
|
||||
if len(click_rage_issues) == 0:
|
||||
return rows
|
||||
|
||||
for c in click_rage_issues:
|
||||
merge_count = c.get("payload")
|
||||
if merge_count is not None:
|
||||
merge_count = merge_count.get("count", 3)
|
||||
else:
|
||||
merge_count = 3
|
||||
for i in range(len(rows)):
|
||||
if rows[i]["timestamp"] == c["timestamp"]:
|
||||
rows = __merge_cells(rows=rows,
|
||||
start=i,
|
||||
count=merge_count,
|
||||
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
|
||||
break
|
||||
return rows
|
||||
|
||||
|
||||
def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
c.*,
|
||||
'CLICK' AS type
|
||||
FROM events.clicks AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
if group_clickrage:
|
||||
rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id)
|
||||
|
||||
cur.execute(cur.mogrify("""
|
||||
SELECT
|
||||
i.*,
|
||||
'INPUT' AS type
|
||||
FROM events.inputs AS i
|
||||
WHERE
|
||||
i.session_id = %(session_id)s
|
||||
ORDER BY i.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
l.*,
|
||||
l.path AS value,
|
||||
l.path AS url,
|
||||
'LOCATION' AS type
|
||||
FROM events.pages AS l
|
||||
WHERE
|
||||
l.session_id = %(session_id)s
|
||||
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
|
||||
rows += cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: (k["timestamp"], k["messageId"]))
|
||||
return rows
|
||||
|
||||
|
||||
def __pg_errors_query(source=None, value_length=None):
|
||||
if value_length is None or value_length > 2:
|
||||
return f"""((SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{event_type.ERROR.ui_type}' AS type
|
||||
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(svalue)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
{"AND source = %(source)s" if source is not None else ""}
|
||||
LIMIT 5)
|
||||
UNION DISTINCT
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{event_type.ERROR.ui_type}' AS type
|
||||
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(svalue)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
{"AND source = %(source)s" if source is not None else ""}
|
||||
LIMIT 5)
|
||||
UNION DISTINCT
|
||||
(SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{event_type.ERROR.ui_type}' AS type
|
||||
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(value)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
{"AND source = %(source)s" if source is not None else ""}
|
||||
LIMIT 5)
|
||||
UNION DISTINCT
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{event_type.ERROR.ui_type}' AS type
|
||||
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(value)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
{"AND source = %(source)s" if source is not None else ""}
|
||||
LIMIT 5));"""
|
||||
return f"""((SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{event_type.ERROR.ui_type}' AS type
|
||||
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(svalue)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
{"AND source = %(source)s" if source is not None else ""}
|
||||
LIMIT 5)
|
||||
UNION DISTINCT
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{event_type.ERROR.ui_type}' AS type
|
||||
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(svalue)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
{"AND source = %(source)s" if source is not None else ""}
|
||||
LIMIT 5));"""
|
||||
|
||||
|
||||
def __search_pg_errors(project_id, value, key=None, source=None):
|
||||
now = TimeUTC.now()
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(__pg_errors_query(source,
|
||||
value_length=len(value)),
|
||||
{"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value),
|
||||
"source": source}))
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
print(f"{TimeUTC.now() - now} : errors")
|
||||
return results
|
||||
|
||||
|
||||
def __search_pg_errors_ios(project_id, value, key=None, source=None):
|
||||
now = TimeUTC.now()
|
||||
if len(value) > 2:
|
||||
query = f"""(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
AND lg.reason ILIKE %(svalue)s
|
||||
LIMIT 5)
|
||||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(svalue)s
|
||||
LIMIT 5)
|
||||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
AND lg.reason ILIKE %(value)s
|
||||
LIMIT 5)
|
||||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(value)s
|
||||
LIMIT 5);"""
|
||||
else:
|
||||
query = f"""(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
AND lg.reason ILIKE %(svalue)s
|
||||
LIMIT 5)
|
||||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(svalue)s
|
||||
LIMIT 5);"""
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(query, {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)}))
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
print(f"{TimeUTC.now() - now} : errors")
|
||||
return results
|
||||
|
||||
|
||||
def __search_pg_metadata(project_id, value, key=None, source=None):
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
|
||||
return []
|
||||
sub_from = []
|
||||
if key is not None:
|
||||
meta_keys = {key: meta_keys[key]}
|
||||
|
||||
for k in meta_keys.keys():
|
||||
colname = metadata.index_to_colname(meta_keys[k])
|
||||
if len(value) > 2:
|
||||
sub_from.append(f"""((SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
|
||||
FROM public.sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND {colname} ILIKE %(svalue)s LIMIT 5)
|
||||
UNION
|
||||
(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
|
||||
FROM public.sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND {colname} ILIKE %(value)s LIMIT 5))
|
||||
""")
|
||||
else:
|
||||
sub_from.append(f"""(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key
|
||||
FROM public.sessions
|
||||
WHERE project_id = %(project_id)s
|
||||
AND {colname} ILIKE %(svalue)s LIMIT 5)""")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT key, value, 'METADATA' AS TYPE
|
||||
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
||||
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)}))
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
return results
|
||||
|
||||
|
||||
class event_type:
|
||||
CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
|
||||
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
|
||||
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path")
|
||||
CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name")
|
||||
REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="path")
|
||||
GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name")
|
||||
STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name")
|
||||
ERROR = Event(ui_type=schemas.EventType.error, table="events.errors",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None)
|
||||
# IOS
|
||||
CLICK_IOS = Event(ui_type=schemas.EventType.click_ios, table="events_ios.clicks", column="label")
|
||||
INPUT_IOS = Event(ui_type=schemas.EventType.input_ios, table="events_ios.inputs", column="label")
|
||||
VIEW_IOS = Event(ui_type=schemas.EventType.view_ios, table="events_ios.views", column="name")
|
||||
CUSTOM_IOS = Event(ui_type=schemas.EventType.custom_ios, table="events_common.customs", column="name")
|
||||
REQUEST_IOS = Event(ui_type=schemas.EventType.request_ios, table="events_common.requests", column="url")
|
||||
ERROR_IOS = Event(ui_type=schemas.EventType.error_ios, table="events_ios.crashes",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
|
||||
|
||||
SUPPORTED_TYPES = {
|
||||
event_type.CLICK.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK),
|
||||
query=autocomplete.__generic_query(typename=event_type.CLICK.ui_type)),
|
||||
event_type.INPUT.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT),
|
||||
query=autocomplete.__generic_query(typename=event_type.INPUT.ui_type)),
|
||||
event_type.LOCATION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.LOCATION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=event_type.LOCATION.ui_type)),
|
||||
event_type.CUSTOM.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM),
|
||||
query=autocomplete.__generic_query(typename=event_type.CUSTOM.ui_type)),
|
||||
event_type.REQUEST.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=event_type.REQUEST.ui_type)),
|
||||
event_type.GRAPHQL.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.GRAPHQL),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=event_type.GRAPHQL.ui_type)),
|
||||
event_type.STATEACTION.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.STATEACTION),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=event_type.STATEACTION.ui_type)),
|
||||
event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
|
||||
query=None),
|
||||
event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
|
||||
query=None),
|
||||
# IOS
|
||||
event_type.CLICK_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CLICK_IOS),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=event_type.CLICK_IOS.ui_type)),
|
||||
event_type.INPUT_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.INPUT_IOS),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=event_type.INPUT_IOS.ui_type)),
|
||||
event_type.VIEW_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.VIEW_IOS),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=event_type.VIEW_IOS.ui_type)),
|
||||
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.CUSTOM_IOS),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=event_type.CUSTOM_IOS.ui_type)),
|
||||
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=autocomplete.__generic_autocomplete(event_type.REQUEST_IOS),
|
||||
query=autocomplete.__generic_query(
|
||||
typename=event_type.REQUEST_IOS.ui_type)),
|
||||
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors_ios,
|
||||
query=None),
|
||||
}
|
||||
|
||||
|
||||
def get_errors_by_session_id(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
|
||||
FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE er.session_id = %(session_id)s AND s.project_id=%(project_id)s
|
||||
ORDER BY timestamp;""", {"session_id": session_id, "project_id": project_id}))
|
||||
errors = cur.fetchall()
|
||||
for e in errors:
|
||||
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||
return helper.list_to_camel_case(errors)
|
||||
|
||||
|
||||
def search(text, event_type, project_id, source, key):
|
||||
if not event_type:
|
||||
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
|
||||
|
||||
if event_type in SUPPORTED_TYPES.keys():
|
||||
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
|
||||
# for IOS events autocomplete
|
||||
# if event_type + "_IOS" in SUPPORTED_TYPES.keys():
|
||||
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
|
||||
# source=source)
|
||||
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
|
||||
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
|
||||
source=source)
|
||||
elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_IOS") \
|
||||
and event_type[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
else:
|
||||
return {"errors": ["unsupported event"]}
|
||||
|
||||
return {"data": rows}
|
||||
380
ee/api/chalicelib/core/funnels.py
Normal file
380
ee/api/chalicelib/core/funnels.py
Normal file
|
|
@ -0,0 +1,380 @@
|
|||
import json
|
||||
from typing import List
|
||||
|
||||
import chalicelib.utils.helper
|
||||
import schemas
|
||||
from chalicelib.core import significance
|
||||
from chalicelib.utils import dev
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
from decouple import config
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
|
||||
|
||||
ALLOW_UPDATE_FOR = ["name", "filter"]
|
||||
|
||||
|
||||
def filter_stages(stages: List[schemas._SessionSearchEventSchema]):
|
||||
ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input,
|
||||
schemas.EventType.location, schemas.EventType.custom,
|
||||
schemas.EventType.click_ios, schemas.EventType.input_ios,
|
||||
schemas.EventType.view_ios, schemas.EventType.custom_ios, ]
|
||||
return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None]
|
||||
|
||||
|
||||
def __parse_events(f_events: List[dict]):
|
||||
return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events]
|
||||
|
||||
|
||||
def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]):
|
||||
return [e.dict() for e in f_events]
|
||||
|
||||
|
||||
def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]):
|
||||
if f_events is None:
|
||||
return
|
||||
events = []
|
||||
for e in f_events:
|
||||
if e.operator is None:
|
||||
e.operator = schemas.SearchEventOperator._is
|
||||
|
||||
if not isinstance(e.value, list):
|
||||
e.value = [e.value]
|
||||
is_any = sessions._isAny_opreator(e.operator)
|
||||
if not is_any and isinstance(e.value, list) and len(e.value) == 0:
|
||||
continue
|
||||
events.append(e)
|
||||
return events
|
||||
|
||||
|
||||
def __transform_old_funnels(events):
|
||||
for e in events:
|
||||
if not isinstance(e.get("value"), list):
|
||||
e["value"] = [e["value"]]
|
||||
return events
|
||||
|
||||
|
||||
def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public):
|
||||
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
|
||||
filter.events = filter_stages(stages=filter.events)
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""\
|
||||
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
|
||||
RETURNING *;""",
|
||||
{"user_id": user_id, "project_id": project_id, "name": name,
|
||||
"filter": json.dumps(filter.dict()),
|
||||
"is_public": is_public})
|
||||
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r = helper.dict_to_camel_case(r)
|
||||
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
|
||||
return {"data": r}
|
||||
|
||||
|
||||
def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=None):
|
||||
s_query = []
|
||||
if filter is not None:
|
||||
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
|
||||
s_query.append("filter = %(filter)s::jsonb")
|
||||
if name is not None and len(name) > 0:
|
||||
s_query.append("name = %(name)s")
|
||||
if is_public is not None:
|
||||
s_query.append("is_public = %(is_public)s")
|
||||
if len(s_query) == 0:
|
||||
return {"errors": ["Nothing to update"]}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
UPDATE public.funnels
|
||||
SET {" , ".join(s_query)}
|
||||
WHERE funnel_id=%(funnel_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND (user_id = %(user_id)s OR is_public)
|
||||
RETURNING *;""", {"user_id": user_id, "funnel_id": funnel_id, "name": name,
|
||||
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public,
|
||||
"project_id": project_id})
|
||||
# print("--------------------")
|
||||
# print(query)
|
||||
# print("--------------------")
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
if r is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r = helper.dict_to_camel_case(r)
|
||||
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
|
||||
r["filter"] = helper.old_search_payload_to_flat(r["filter"])
|
||||
return {"data": r}
|
||||
|
||||
|
||||
def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
SELECT funnel_id, project_id, user_id, name, created_at, deleted_at, is_public
|
||||
{",filter" if details else ""}
|
||||
FROM public.funnels
|
||||
WHERE project_id = %(project_id)s
|
||||
AND funnels.deleted_at IS NULL
|
||||
AND (funnels.user_id = %(user_id)s OR funnels.is_public);""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
if details:
|
||||
row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"]))
|
||||
if row.get("filter") is not None and row["filter"].get("events") is not None:
|
||||
row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"]))
|
||||
|
||||
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
|
||||
end_date=end_date)
|
||||
counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]),
|
||||
project_id=project_id, user_id=None, count_only=True)
|
||||
row["sessionsCount"] = counts["countSessions"]
|
||||
row["usersCount"] = counts["countUsers"]
|
||||
filter_clone = dict(row["filter"])
|
||||
overview = significance.get_overview(filter_d=row["filter"], project_id=project_id)
|
||||
row["stages"] = overview["stages"]
|
||||
row.pop("filter")
|
||||
row["stagesCount"] = len(row["stages"])
|
||||
# TODO: ask david to count it alone
|
||||
row["criticalIssuesCount"] = overview["criticalIssuesCount"]
|
||||
row["missedConversions"] = 0 if len(row["stages"]) < 2 \
|
||||
else row["stages"][0]["sessionsCount"] - row["stages"][-1]["sessionsCount"]
|
||||
row["filter"] = helper.old_search_payload_to_flat(filter_clone)
|
||||
return rows
|
||||
|
||||
|
||||
def get_possible_issue_types(project_id):
|
||||
return [{"type": t, "title": chalicelib.utils.helper.get_issue_title(t)} for t in
|
||||
['click_rage', 'dead_click', 'excessive_scrolling',
|
||||
'bad_request', 'missing_resource', 'memory', 'cpu',
|
||||
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
|
||||
'js_error']]
|
||||
|
||||
|
||||
def get_start_end_time(filter_d, range_value, start_date, end_date):
|
||||
if start_date is not None and end_date is not None:
|
||||
filter_d["startDate"], filter_d["endDate"] = start_date, end_date
|
||||
elif range_value is not None and len(range_value) > 0:
|
||||
filter_d["rangeValue"] = range_value
|
||||
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value)
|
||||
else:
|
||||
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"])
|
||||
|
||||
|
||||
def delete(project_id, funnel_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.funnels
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND funnel_id = %(funnel_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]),
|
||||
project_id=project_id,
|
||||
user_id=user_id)
|
||||
|
||||
|
||||
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
data.events = filter_stages(data.events)
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.range_value,
|
||||
start_date=data.startDate, end_date=data.endDate)
|
||||
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||
return sessions.search_sessions(data=data, project_id=project_id,
|
||||
user_id=user_id)
|
||||
|
||||
|
||||
def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
|
||||
insights = helper.list_to_camel_case(insights)
|
||||
if len(insights) > 0:
|
||||
# fix: this fix for huge drop count
|
||||
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||
total_drop_due_to_issues = insights[0]["sessionsCount"]
|
||||
# end fix
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"data": {"stages": insights,
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||
|
||||
|
||||
def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema):
|
||||
data.events = filter_stages(__parse_events(data.events))
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
|
||||
start_date=data.startDate,
|
||||
end_date=data.endDate)
|
||||
data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"])
|
||||
data.events = __fix_stages(data.events)
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
|
||||
insights = helper.list_to_camel_case(insights)
|
||||
if len(insights) > 0:
|
||||
# fix: this fix for huge drop count
|
||||
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||
total_drop_due_to_issues = insights[0]["sessionsCount"]
|
||||
# end fix
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"data": {"stages": insights,
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}}
|
||||
|
||||
|
||||
# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema):
|
||||
def get_top_insights_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
|
||||
data.events = filter_stages(__parse_events(data.events))
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) == 0:
|
||||
return {"stages": [], "totalDropDueToIssues": 0}
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id)
|
||||
insights = helper.list_to_camel_case(insights)
|
||||
if len(insights) > 0:
|
||||
# TODO: check if this correct
|
||||
if total_drop_due_to_issues > insights[0]["sessionsCount"]:
|
||||
if len(insights) == 0:
|
||||
total_drop_due_to_issues = 0
|
||||
else:
|
||||
total_drop_due_to_issues = insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"stages": insights,
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}
|
||||
|
||||
|
||||
def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
return {"data": {
|
||||
"issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id))
|
||||
}}
|
||||
|
||||
|
||||
def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
data.events = filter_stages(data.events)
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue,
|
||||
start_date=data.startDate,
|
||||
end_date=data.endDate)
|
||||
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||
if len(data.events) < 2:
|
||||
return {"issues": []}
|
||||
return {
|
||||
"issues": helper.dict_to_camel_case(
|
||||
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
|
||||
last_stage=len(data.events)))}
|
||||
|
||||
|
||||
# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema):
|
||||
def get_issues_on_the_fly_widget(project_id, data: schemas.CustomMetricSeriesFilterSchema):
|
||||
data.events = filter_stages(data.events)
|
||||
data.events = __fix_stages(data.events)
|
||||
if len(data.events) < 0:
|
||||
return {"issues": []}
|
||||
|
||||
return {
|
||||
"issues": helper.dict_to_camel_case(
|
||||
significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1,
|
||||
last_stage=len(data.events)))}
|
||||
|
||||
|
||||
def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
*
|
||||
FROM public.funnels
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND funnel_id = %(funnel_id)s
|
||||
AND (user_id = %(user_id)s OR is_public);""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
||||
f = helper.dict_to_camel_case(cur.fetchone())
|
||||
if f is None:
|
||||
return None
|
||||
if f.get("filter") is not None and f["filter"].get("events") is not None:
|
||||
f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"])
|
||||
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
|
||||
f["filter"]["events"] = __parse_events(f["filter"]["events"])
|
||||
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
|
||||
if fix_stages:
|
||||
f["filter"]["events"] = __fix_stages(f["filter"]["events"])
|
||||
f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]]
|
||||
if flatten:
|
||||
f["filter"] = helper.old_search_payload_to_flat(f["filter"])
|
||||
return f
|
||||
|
||||
|
||||
def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None,
|
||||
start_date=None, end_date=None):
|
||||
if len(data.events) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
data.startDate = data.startDate if data.startDate is not None else start_date
|
||||
data.endDate = data.endDate if data.endDate is not None else end_date
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.startDate,
|
||||
end_date=data.endDate)
|
||||
data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"])
|
||||
|
||||
issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \
|
||||
.get("issues", {})
|
||||
issues = issues.get("significant", []) + issues.get("insignificant", [])
|
||||
issue = None
|
||||
for i in issues:
|
||||
if i.get("issueId", "") == issue_id:
|
||||
issue = i
|
||||
break
|
||||
return {"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, issue=issue,
|
||||
data=data) if issue is not None else {"total": 0, "sessions": []},
|
||||
# "stages": helper.list_to_camel_case(insights),
|
||||
# "totalDropDueToIssues": total_drop_due_to_issues,
|
||||
"issue": issue}
|
||||
|
|
@ -793,7 +793,7 @@ def get_missing_resources_trend(project_id, startTimestamp=TimeUTC.now(delta_day
|
|||
GROUP BY url_hostpath
|
||||
ORDER BY doc_count DESC
|
||||
LIMIT 10;"""
|
||||
params = {"project_id": project_id, "startTimestamp": startTimestamp,
|
||||
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
|
||||
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
|
|||
r.pop("first_recorded_session_at")
|
||||
r.pop("first_recorded")
|
||||
|
||||
if recording_state:
|
||||
if recording_state and len(rows) > 0:
|
||||
project_ids = [f'({r["project_id"]})' for r in rows]
|
||||
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
|
||||
FROM (VALUES {",".join(project_ids)}) AS projects(project_id)
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@ from chalicelib.utils import email_helper, captcha, helper
|
|||
|
||||
|
||||
def reset(data: schemas.ForgetPasswordPayloadSchema):
|
||||
print("====================== reset password ===============")
|
||||
print(data)
|
||||
print(f"====================== reset password {data.email}")
|
||||
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
|
||||
print("error: Invalid captcha.")
|
||||
return {"errors": ["Invalid captcha."]}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import helper, exp_ch_helper
|
||||
from chalicelib.utils import ch_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from decouple import config
|
||||
|
|
@ -9,14 +9,26 @@ def get_by_session_id(session_id, project_id, start_ts, duration):
|
|||
if duration is None or (type(duration) != 'int' and type(duration) != 'float') or duration < 0:
|
||||
duration = 0
|
||||
delta = config("events_ts_delta", cast=int, default=60 * 60) * 1000
|
||||
ch_query = """\
|
||||
SELECT
|
||||
datetime,url,type,duration,ttfb,header_size,encoded_body_size,decoded_body_size,success,coalesce(status,if(success, 200, status)) AS status
|
||||
FROM resources
|
||||
WHERE session_id = toUInt64(%(session_id)s)
|
||||
AND project_id=%(project_id)s
|
||||
AND datetime >= toDateTime(%(res_start_ts)s / 1000)
|
||||
AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
|
||||
if config("EXP_RESOURCES", cast=bool, default=False):
|
||||
ch_query = f"""SELECT
|
||||
datetime,url,type,duration,ttfb,header_size,
|
||||
encoded_body_size,decoded_body_size,success,
|
||||
if(success, 200, 400) AS status
|
||||
FROM {exp_ch_helper.get_main_resources_table(start_ts)}
|
||||
WHERE session_id = toUInt64(%(session_id)s)
|
||||
AND project_id = toUInt16(%(project_id)s)
|
||||
AND datetime >= toDateTime(%(res_start_ts)s / 1000)
|
||||
AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
|
||||
else:
|
||||
ch_query = """SELECT
|
||||
datetime,url,type,duration,ttfb,header_size,
|
||||
encoded_body_size,decoded_body_size,success,
|
||||
coalesce(status,if(success, 200, status)) AS status
|
||||
FROM resources
|
||||
WHERE session_id = toUInt64(%(session_id)s)
|
||||
AND project_id = toUInt64(%(project_id)s)
|
||||
AND datetime >= toDateTime(%(res_start_ts)s / 1000)
|
||||
AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
|
||||
params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration,
|
||||
"res_start_ts": start_ts - delta, "res_end_ts": start_ts + duration + delta, }
|
||||
rows = ch.execute(query=ch_query, params=params)
|
||||
|
|
|
|||
644
ee/api/chalicelib/core/significance.py
Normal file
644
ee/api/chalicelib/core/significance.py
Normal file
|
|
@ -0,0 +1,644 @@
|
|||
__author__ = "AZNAUROV David"
|
||||
__maintainer__ = "KRAIEM Taha Yassine"
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.utils import dev
|
||||
|
||||
from decouple import config
|
||||
|
||||
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
||||
from chalicelib.core import sessions_legacy as sessions
|
||||
else:
|
||||
from chalicelib.core import sessions
|
||||
|
||||
"""
|
||||
todo: remove LIMIT from the query
|
||||
"""
|
||||
|
||||
from typing import List
|
||||
import math
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
|
||||
from psycopg2.extras import RealDictRow
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
SIGNIFICANCE_THRSH = 0.4
|
||||
|
||||
T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.365, 8: 2.306, 9: 2.262, 10: 2.228,
|
||||
11: 2.201, 12: 2.179, 13: 2.160, 14: 2.145, 15: 2.13, 16: 2.120, 17: 2.110, 18: 2.101, 19: 2.093, 20: 2.086,
|
||||
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
|
||||
|
||||
|
||||
def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
|
||||
"""
|
||||
Add minimal timestamp
|
||||
:param filter_d: dict contains events&filters&...
|
||||
:return:
|
||||
"""
|
||||
stages: [dict] = filter_d.get("events", [])
|
||||
filters: [dict] = filter_d.get("filters", [])
|
||||
filter_issues = filter_d.get("issueTypes")
|
||||
if filter_issues is None or len(filter_issues) == 0:
|
||||
filter_issues = []
|
||||
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
|
||||
first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s",
|
||||
"s.start_ts <= %(endTimestamp)s"]
|
||||
filter_extra_from = []
|
||||
n_stages_query = []
|
||||
values = {}
|
||||
if len(filters) > 0:
|
||||
meta_keys = None
|
||||
for i, f in enumerate(filters):
|
||||
if not isinstance(f["value"], list):
|
||||
f.value = [f["value"]]
|
||||
if len(f["value"]) == 0 or f["value"] is None:
|
||||
continue
|
||||
f["value"] = helper.values_for_operator(value=f["value"], op=f["operator"])
|
||||
# filter_args = _multiple_values(f["value"])
|
||||
op = sessions.__get_sql_operator(f["operator"])
|
||||
|
||||
filter_type = f["type"]
|
||||
# values[f_k] = sessions.__get_sql_value_multiple(f["value"])
|
||||
f_k = f"f_value{i}"
|
||||
values = {**values,
|
||||
**sessions._multiple_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
|
||||
value_key=f_k)}
|
||||
if filter_type == schemas.FilterType.user_browser:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sessions._multiple_conditions(f's.user_browser {op} %({f_k})s', f["value"], value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sessions._multiple_conditions(f's.user_os {op} %({f_k})s', f["value"], value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sessions._multiple_conditions(f's.user_device {op} %({f_k})s', f["value"], value_key=f_k))
|
||||
|
||||
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sessions._multiple_conditions(f's.user_country {op} %({f_k})s', f["value"], value_key=f_k))
|
||||
elif filter_type == schemas.FilterType.duration:
|
||||
if len(f["value"]) > 0 and f["value"][0] is not None:
|
||||
first_stage_extra_constraints.append(f's.duration >= %(minDuration)s')
|
||||
values["minDuration"] = f["value"][0]
|
||||
if len(f["value"]) > 1 and f["value"][1] is not None and int(f["value"][1]) > 0:
|
||||
first_stage_extra_constraints.append('s.duration <= %(maxDuration)s')
|
||||
values["maxDuration"] = f["value"][1]
|
||||
elif filter_type == schemas.FilterType.referrer:
|
||||
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||
filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"]
|
||||
# op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sessions._multiple_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
|
||||
elif filter_type == events.event_type.METADATA.ui_type:
|
||||
if meta_keys is None:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
if f.get("key") in meta_keys.keys():
|
||||
first_stage_extra_constraints.append(
|
||||
sessions._multiple_conditions(
|
||||
f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f_k})s', f["value"],
|
||||
value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sessions._multiple_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.user_anonymous_id,
|
||||
schemas.FilterType.user_anonymous_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sessions._multiple_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
|
||||
# op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(
|
||||
sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
|
||||
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
i = -1
|
||||
for s in stages:
|
||||
|
||||
if s.get("operator") is None:
|
||||
s["operator"] = "is"
|
||||
|
||||
if not isinstance(s["value"], list):
|
||||
s["value"] = [s["value"]]
|
||||
is_any = sessions._isAny_opreator(s["operator"])
|
||||
if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
|
||||
continue
|
||||
i += 1
|
||||
if i == 0:
|
||||
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
|
||||
else:
|
||||
extra_from = []
|
||||
op = sessions.__get_sql_operator(s["operator"])
|
||||
event_type = s["type"].upper()
|
||||
if event_type == events.event_type.CLICK.ui_type:
|
||||
next_table = events.event_type.CLICK.table
|
||||
next_col_name = events.event_type.CLICK.column
|
||||
elif event_type == events.event_type.INPUT.ui_type:
|
||||
next_table = events.event_type.INPUT.table
|
||||
next_col_name = events.event_type.INPUT.column
|
||||
elif event_type == events.event_type.LOCATION.ui_type:
|
||||
next_table = events.event_type.LOCATION.table
|
||||
next_col_name = events.event_type.LOCATION.column
|
||||
elif event_type == events.event_type.CUSTOM.ui_type:
|
||||
next_table = events.event_type.CUSTOM.table
|
||||
next_col_name = events.event_type.CUSTOM.column
|
||||
# IOS --------------
|
||||
elif event_type == events.event_type.CLICK_IOS.ui_type:
|
||||
next_table = events.event_type.CLICK_IOS.table
|
||||
next_col_name = events.event_type.CLICK_IOS.column
|
||||
elif event_type == events.event_type.INPUT_IOS.ui_type:
|
||||
next_table = events.event_type.INPUT_IOS.table
|
||||
next_col_name = events.event_type.INPUT_IOS.column
|
||||
elif event_type == events.event_type.VIEW_IOS.ui_type:
|
||||
next_table = events.event_type.VIEW_IOS.table
|
||||
next_col_name = events.event_type.VIEW_IOS.column
|
||||
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
|
||||
next_table = events.event_type.CUSTOM_IOS.table
|
||||
next_col_name = events.event_type.CUSTOM_IOS.column
|
||||
else:
|
||||
print("=================UNDEFINED")
|
||||
continue
|
||||
|
||||
values = {**values, **sessions._multiple_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
|
||||
value_key=f"value{i + 1}")}
|
||||
if sessions.__is_negation_operator(op) and i > 0:
|
||||
op = sessions.__reverse_sql_operator(op)
|
||||
main_condition = "left_not.session_id ISNULL"
|
||||
extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
|
||||
FROM {next_table} AS s_main
|
||||
WHERE s_main.{next_col_name} {op} %(value{i + 1})s
|
||||
AND s_main.timestamp >= T{i}.stage{i}_timestamp
|
||||
AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""")
|
||||
else:
|
||||
if is_any:
|
||||
main_condition = "TRUE"
|
||||
else:
|
||||
main_condition = sessions._multiple_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
|
||||
values=s["value"], value_key=f"value{i + 1}")
|
||||
n_stages_query.append(f"""
|
||||
(SELECT main.session_id,
|
||||
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp,
|
||||
'{event_type}' AS type,
|
||||
'{s["operator"]}' AS operator
|
||||
FROM {next_table} AS main {" ".join(extra_from)}
|
||||
WHERE main.timestamp >= {f"T{i}.stage{i}_timestamp" if i > 0 else "%(startTimestamp)s"}
|
||||
{f"AND main.session_id=T1.session_id" if i > 0 else ""}
|
||||
AND {main_condition}
|
||||
{(" AND " + " AND ".join(stage_constraints)) if len(stage_constraints) > 0 else ""}
|
||||
{(" AND " + " AND ".join(first_stage_extra_constraints)) if len(first_stage_extra_constraints) > 0 and i == 0 else ""}
|
||||
GROUP BY main.session_id)
|
||||
AS T{i + 1} {"USING (session_id)" if i > 0 else ""}
|
||||
""")
|
||||
if len(n_stages_query) == 0:
|
||||
return []
|
||||
n_stages_query = " LEFT JOIN LATERAL ".join(n_stages_query)
|
||||
n_stages_query += ") AS stages_t"
|
||||
|
||||
n_stages_query = f"""
|
||||
SELECT stages_and_issues_t.*,sessions.session_id, sessions.user_uuid FROM (
|
||||
SELECT * FROM (
|
||||
SELECT * FROM
|
||||
{n_stages_query}
|
||||
LEFT JOIN LATERAL
|
||||
(
|
||||
SELECT * FROM
|
||||
(SELECT ISE.session_id,
|
||||
ISS.type as issue_type,
|
||||
ISE.timestamp AS issue_timestamp,
|
||||
ISS.context_string as issue_context,
|
||||
ISS.issue_id as issue_id
|
||||
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
|
||||
WHERE ISE.timestamp >= stages_t.stage1_timestamp
|
||||
AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
|
||||
AND ISS.project_id=%(project_id)s
|
||||
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t
|
||||
) AS issues_t
|
||||
USING (session_id)) AS stages_and_issues_t
|
||||
inner join sessions USING(session_id);
|
||||
"""
|
||||
|
||||
# LIMIT 10000
|
||||
params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"],
|
||||
"issueTypes": tuple(filter_issues), **values}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
# print("---------------------------------------------------")
|
||||
# print(cur.mogrify(n_stages_query, params))
|
||||
# print("---------------------------------------------------")
|
||||
cur.execute(cur.mogrify(n_stages_query, params))
|
||||
rows = cur.fetchall()
|
||||
return rows
|
||||
|
||||
|
||||
def pearson_corr(x: list, y: list):
|
||||
n = len(x)
|
||||
if n != len(y):
|
||||
raise ValueError(f'x and y must have the same length. Got {len(x)} and {len(y)} instead')
|
||||
|
||||
if n < 2:
|
||||
warnings.warn(f'x and y must have length at least 2. Got {n} instead')
|
||||
return None, None, False
|
||||
|
||||
# If an input is constant, the correlation coefficient is not defined.
|
||||
if all(t == x[0] for t in x) or all(t == y[0] for t in y):
|
||||
warnings.warn("An input array is constant; the correlation coefficent is not defined.")
|
||||
return None, None, False
|
||||
|
||||
if n == 2:
|
||||
return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0
|
||||
|
||||
xmean = sum(x) / len(x)
|
||||
ymean = sum(y) / len(y)
|
||||
|
||||
xm = [el - xmean for el in x]
|
||||
ym = [el - ymean for el in y]
|
||||
|
||||
normxm = math.sqrt((sum([xm[i] * xm[i] for i in range(len(xm))])))
|
||||
normym = math.sqrt((sum([ym[i] * ym[i] for i in range(len(ym))])))
|
||||
|
||||
threshold = 1e-8
|
||||
if normxm < threshold * abs(xmean) or normym < threshold * abs(ymean):
|
||||
# If all the values in x (likewise y) are very close to the mean,
|
||||
# the loss of precision that occurs in the subtraction xm = x - xmean
|
||||
# might result in large errors in r.
|
||||
warnings.warn("An input array is constant; the correlation coefficent is not defined.")
|
||||
|
||||
r = sum(
|
||||
i[0] * i[1] for i in zip([xm[i] / normxm for i in range(len(xm))], [ym[i] / normym for i in range(len(ym))]))
|
||||
|
||||
# Presumably, if abs(r) > 1, then it is only some small artifact of floating point arithmetic.
|
||||
# However, if r < 0, we don't care, as our problem is to find only positive correlations
|
||||
r = max(min(r, 1.0), 0.0)
|
||||
|
||||
# approximated confidence
|
||||
if n < 31:
|
||||
t_c = T_VALUES[n]
|
||||
elif n < 50:
|
||||
t_c = 2.02
|
||||
else:
|
||||
t_c = 2
|
||||
if r >= 0.999:
|
||||
confidence = 1
|
||||
else:
|
||||
confidence = r * math.sqrt(n - 2) / math.sqrt(1 - r ** 2)
|
||||
|
||||
if confidence > SIGNIFICANCE_THRSH:
|
||||
return r, confidence, True
|
||||
else:
|
||||
return r, confidence, False
|
||||
|
||||
|
||||
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage):
|
||||
"""
|
||||
Returns two lists with binary values 0/1:
|
||||
|
||||
transitions ::: if transited from the first stage to the last - 1
|
||||
else - 0
|
||||
errors ::: a dictionary where the keys are all unique issues (currently context-wise)
|
||||
the values are lists
|
||||
if an issue happened between the first stage to the last - 1
|
||||
else - 0
|
||||
|
||||
For a small task of calculating a total drop due to issues,
|
||||
we need to disregard the issue type when creating the `errors`-like array.
|
||||
The `all_errors` array can be obtained by logical OR statement applied to all errors by issue
|
||||
The `transitions` array stays the same
|
||||
"""
|
||||
transitions = []
|
||||
n_sess_affected = 0
|
||||
errors = {}
|
||||
for issue in all_issues_with_context:
|
||||
split = issue.split('__^__')
|
||||
errors[issue] = {
|
||||
"errors": [],
|
||||
"issue_type": split[0],
|
||||
"context": split[1]}
|
||||
|
||||
for row in rows:
|
||||
t = 0
|
||||
first_ts = row[f'stage{first_stage}_timestamp']
|
||||
last_ts = row[f'stage{last_stage}_timestamp']
|
||||
if first_ts is None:
|
||||
continue
|
||||
elif first_ts is not None and last_ts is not None:
|
||||
t = 1
|
||||
transitions.append(t)
|
||||
|
||||
ic_present = False
|
||||
for issue_type_with_context in errors:
|
||||
ic = 0
|
||||
issue_type = errors[issue_type_with_context]["issue_type"]
|
||||
context = errors[issue_type_with_context]["context"]
|
||||
if row['issue_type'] is not None:
|
||||
if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts):
|
||||
context_in_row = row['issue_context'] if row['issue_context'] is not None else ''
|
||||
if issue_type == row['issue_type'] and context == context_in_row:
|
||||
ic = 1
|
||||
ic_present = True
|
||||
errors[issue_type_with_context]["errors"].append(ic)
|
||||
|
||||
if ic_present and t:
|
||||
n_sess_affected += 1
|
||||
|
||||
# def tuple_or(t: tuple):
|
||||
# x = 0
|
||||
# for el in t:
|
||||
# x |= el
|
||||
# return x
|
||||
def tuple_or(t: tuple):
|
||||
for el in t:
|
||||
if el > 0:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
errors = {key: errors[key]["errors"] for key in errors}
|
||||
all_errors = [tuple_or(t) for t in zip(*errors.values())]
|
||||
|
||||
return transitions, errors, all_errors, n_sess_affected
|
||||
|
||||
|
||||
def get_affected_users_for_all_issues(rows, first_stage, last_stage):
|
||||
"""
|
||||
|
||||
:param rows:
|
||||
:param first_stage:
|
||||
:param last_stage:
|
||||
:return:
|
||||
"""
|
||||
affected_users = defaultdict(lambda: set())
|
||||
affected_sessions = defaultdict(lambda: set())
|
||||
contexts = defaultdict(lambda: None)
|
||||
n_affected_users_dict = defaultdict(lambda: None)
|
||||
n_affected_sessions_dict = defaultdict(lambda: None)
|
||||
all_issues_with_context = set()
|
||||
n_issues_dict = defaultdict(lambda: 0)
|
||||
issues_by_session = defaultdict(lambda: 0)
|
||||
|
||||
for row in rows:
|
||||
|
||||
# check that the session has reached the first stage of subfunnel:
|
||||
if row[f'stage{first_stage}_timestamp'] is None:
|
||||
continue
|
||||
|
||||
iss = row['issue_type']
|
||||
iss_ts = row['issue_timestamp']
|
||||
|
||||
# check that the issue exists and belongs to subfunnel:
|
||||
if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or
|
||||
(row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])):
|
||||
context_string = row['issue_context'] if row['issue_context'] is not None else ''
|
||||
issue_with_context = iss + '__^__' + context_string
|
||||
contexts[issue_with_context] = {"context": context_string, "id": row["issue_id"]}
|
||||
all_issues_with_context.add(issue_with_context)
|
||||
n_issues_dict[issue_with_context] += 1
|
||||
if row['user_uuid'] is not None:
|
||||
affected_users[issue_with_context].add(row['user_uuid'])
|
||||
|
||||
affected_sessions[issue_with_context].add(row['session_id'])
|
||||
issues_by_session[row[f'session_id']] += 1
|
||||
|
||||
if len(affected_users) > 0:
|
||||
n_affected_users_dict.update({
|
||||
iss: len(affected_users[iss]) for iss in affected_users
|
||||
})
|
||||
if len(affected_sessions) > 0:
|
||||
n_affected_sessions_dict.update({
|
||||
iss: len(affected_sessions[iss]) for iss in affected_sessions
|
||||
})
|
||||
return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts
|
||||
|
||||
|
||||
def count_sessions(rows, n_stages):
|
||||
session_counts = {i: set() for i in range(1, n_stages + 1)}
|
||||
for ind, row in enumerate(rows):
|
||||
for i in range(1, n_stages + 1):
|
||||
if row[f"stage{i}_timestamp"] is not None:
|
||||
session_counts[i].add(row[f"session_id"])
|
||||
session_counts = {i: len(session_counts[i]) for i in session_counts}
|
||||
return session_counts
|
||||
|
||||
|
||||
def count_users(rows, n_stages):
|
||||
users_in_stages = defaultdict(lambda: set())
|
||||
|
||||
for ind, row in enumerate(rows):
|
||||
for i in range(1, n_stages + 1):
|
||||
if row[f"stage{i}_timestamp"] is not None:
|
||||
users_in_stages[i].add(row["user_uuid"])
|
||||
|
||||
users_count = {i: len(users_in_stages[i]) for i in range(1, n_stages + 1)}
|
||||
|
||||
return users_count
|
||||
|
||||
|
||||
def get_stages(stages, rows):
|
||||
n_stages = len(stages)
|
||||
session_counts = count_sessions(rows, n_stages)
|
||||
users_counts = count_users(rows, n_stages)
|
||||
|
||||
stages_list = []
|
||||
for i, stage in enumerate(stages):
|
||||
|
||||
drop = None
|
||||
if i != 0:
|
||||
if session_counts[i] == 0:
|
||||
drop = 0
|
||||
elif session_counts[i] > 0:
|
||||
drop = int(100 * (session_counts[i] - session_counts[i + 1]) / session_counts[i])
|
||||
|
||||
stages_list.append(
|
||||
{"value": stage["value"],
|
||||
"type": stage["type"],
|
||||
"operator": stage["operator"],
|
||||
"sessionsCount": session_counts[i + 1],
|
||||
"drop_pct": drop,
|
||||
"usersCount": users_counts[i + 1],
|
||||
"dropDueToIssues": 0
|
||||
}
|
||||
)
|
||||
return stages_list
|
||||
|
||||
|
||||
def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False):
|
||||
"""
|
||||
|
||||
:param stages:
|
||||
:param rows:
|
||||
:param first_stage: If it's a part of the initial funnel, provide a number of the first stage (starting from 1)
|
||||
:param last_stage: If it's a part of the initial funnel, provide a number of the last stage (starting from 1)
|
||||
:return:
|
||||
"""
|
||||
|
||||
n_stages = len(stages)
|
||||
|
||||
if first_stage is None:
|
||||
first_stage = 1
|
||||
if last_stage is None:
|
||||
last_stage = n_stages
|
||||
if last_stage > n_stages:
|
||||
print("The number of the last stage provided is greater than the number of stages. Using n_stages instead")
|
||||
last_stage = n_stages
|
||||
|
||||
n_critical_issues = 0
|
||||
issues_dict = dict({"significant": [],
|
||||
"insignificant": []})
|
||||
session_counts = count_sessions(rows, n_stages)
|
||||
drop = session_counts[first_stage] - session_counts[last_stage]
|
||||
|
||||
all_issues_with_context, n_issues_dict, affected_users_dict, affected_sessions, contexts = get_affected_users_for_all_issues(
|
||||
rows, first_stage, last_stage)
|
||||
transitions, errors, all_errors, n_sess_affected = get_transitions_and_issues_of_each_type(rows,
|
||||
all_issues_with_context,
|
||||
first_stage, last_stage)
|
||||
|
||||
# print("len(transitions) =", len(transitions))
|
||||
|
||||
if any(all_errors):
|
||||
total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors)
|
||||
if total_drop_corr is not None and drop is not None:
|
||||
total_drop_due_to_issues = int(total_drop_corr * n_sess_affected)
|
||||
else:
|
||||
total_drop_due_to_issues = 0
|
||||
else:
|
||||
total_drop_due_to_issues = 0
|
||||
|
||||
if drop_only:
|
||||
return total_drop_due_to_issues
|
||||
for issue in all_issues_with_context:
|
||||
|
||||
if not any(errors[issue]):
|
||||
continue
|
||||
r, confidence, is_sign = pearson_corr(transitions, errors[issue])
|
||||
|
||||
if r is not None and drop is not None and is_sign:
|
||||
lost_conversions = int(r * affected_sessions[issue])
|
||||
else:
|
||||
lost_conversions = None
|
||||
if r is None:
|
||||
r = 0
|
||||
split = issue.split('__^__')
|
||||
issues_dict['significant' if is_sign else 'insignificant'].append({
|
||||
"type": split[0],
|
||||
"title": helper.get_issue_title(split[0]),
|
||||
"affected_sessions": affected_sessions[issue],
|
||||
"unaffected_sessions": session_counts[1] - affected_sessions[issue],
|
||||
"lost_conversions": lost_conversions,
|
||||
"affected_users": affected_users_dict[issue],
|
||||
"conversion_impact": round(r * 100),
|
||||
"context_string": contexts[issue]["context"],
|
||||
"issue_id": contexts[issue]["id"]
|
||||
})
|
||||
|
||||
if is_sign:
|
||||
n_critical_issues += n_issues_dict[issue]
|
||||
|
||||
return n_critical_issues, issues_dict, total_drop_due_to_issues
|
||||
|
||||
|
||||
def get_top_insights(filter_d, project_id):
|
||||
output = []
|
||||
stages = filter_d.get("events", [])
|
||||
# TODO: handle 1 stage alone
|
||||
if len(stages) == 0:
|
||||
print("no stages found")
|
||||
return output, 0
|
||||
elif len(stages) == 1:
|
||||
# TODO: count sessions, and users for single stage
|
||||
output = [{
|
||||
"type": stages[0]["type"],
|
||||
"value": stages[0]["value"],
|
||||
"dropPercentage": None,
|
||||
"operator": stages[0]["operator"],
|
||||
"sessionsCount": 0,
|
||||
"dropPct": 0,
|
||||
"usersCount": 0,
|
||||
"dropDueToIssues": 0
|
||||
|
||||
}]
|
||||
counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d),
|
||||
project_id=project_id,
|
||||
user_id=None, count_only=True)
|
||||
output[0]["sessionsCount"] = counts["countSessions"]
|
||||
output[0]["usersCount"] = counts["countUsers"]
|
||||
return output, 0
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
if len(rows) == 0:
|
||||
return get_stages(stages, []), 0
|
||||
# Obtain the first part of the output
|
||||
stages_list = get_stages(stages, rows)
|
||||
# Obtain the second part of the output
|
||||
total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"),
|
||||
last_stage=filter_d.get("lastStage"), drop_only=True)
|
||||
return stages_list, total_drop_due_to_issues
|
||||
|
||||
|
||||
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None):
|
||||
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
|
||||
stages = filter_d.get("events", [])
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
# print(json.dumps(rows[0],indent=4))
|
||||
# return
|
||||
if len(rows) == 0:
|
||||
return output
|
||||
# Obtain the second part of the output
|
||||
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, first_stage=first_stage,
|
||||
last_stage=last_stage)
|
||||
output['total_drop_due_to_issues'] = total_drop_due_to_issues
|
||||
# output['critical_issues_count'] = n_critical_issues
|
||||
output = {**output, **issues_dict}
|
||||
return output
|
||||
|
||||
|
||||
def get_overview(filter_d, project_id, first_stage=None, last_stage=None):
|
||||
output = dict()
|
||||
stages = filter_d["events"]
|
||||
# TODO: handle 1 stage alone
|
||||
if len(stages) == 0:
|
||||
return {"stages": [],
|
||||
"criticalIssuesCount": 0}
|
||||
elif len(stages) == 1:
|
||||
# TODO: count sessions, and users for single stage
|
||||
output["stages"] = [{
|
||||
"type": stages[0]["type"],
|
||||
"value": stages[0]["value"],
|
||||
"sessionsCount": None,
|
||||
"dropPercentage": None,
|
||||
"usersCount": None
|
||||
}]
|
||||
return output
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
if len(rows) == 0:
|
||||
# PS: not sure what to return if rows are empty
|
||||
output["stages"] = [{
|
||||
"type": stages[0]["type"],
|
||||
"value": stages[0]["value"],
|
||||
"sessionsCount": None,
|
||||
"dropPercentage": None,
|
||||
"usersCount": None
|
||||
}]
|
||||
output['criticalIssuesCount'] = 0
|
||||
return output
|
||||
# Obtain the first part of the output
|
||||
stages_list = get_stages(stages, rows)
|
||||
|
||||
# Obtain the second part of the output
|
||||
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, first_stage=first_stage,
|
||||
last_stage=last_stage)
|
||||
|
||||
output['stages'] = stages_list
|
||||
output['criticalIssuesCount'] = n_critical_issues
|
||||
return output
|
||||
|
|
@ -201,6 +201,6 @@ def get_available_actions(tenant_id):
|
|||
|
||||
|
||||
cron_jobs = [
|
||||
{"func": process_traces_queue, "trigger": "interval", "seconds": config("traces_period", cast=int, default=60),
|
||||
{"func": process_traces_queue, "trigger": "interval", "seconds": config("TRACE_PERIOD", cast=int, default=60),
|
||||
"misfire_grace_time": 20}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,17 +1,17 @@
|
|||
#!/bin/bash
|
||||
|
||||
rm -rf ./chalicelib/core/alerts.py
|
||||
rm -rf ./chalicelib/core/alerts_processor.py
|
||||
#exp rm -rf ./chalicelib/core/alerts_processor.py
|
||||
rm -rf ./chalicelib/core/announcements.py
|
||||
rm -rf ./chalicelib/core/autocomplete.py
|
||||
rm -rf ./chalicelib/core/collaboration_slack.py
|
||||
rm -rf ./chalicelib/core/countries.py
|
||||
rm -rf ./chalicelib/core/errors.py
|
||||
#exp rm -rf ./chalicelib/core/errors.py
|
||||
rm -rf ./chalicelib/core/errors_favorite.py
|
||||
rm -rf ./chalicelib/core/events.py
|
||||
#exp rm -rf ./chalicelib/core/events.py
|
||||
rm -rf ./chalicelib/core/events_ios.py
|
||||
rm -rf ./chalicelib/core/dashboards.py
|
||||
rm -rf ./chalicelib/core/funnels.py
|
||||
#exp rm -rf ./chalicelib/core/dashboards.py
|
||||
#exp rm -rf ./chalicelib/core/funnels.py
|
||||
rm -rf ./chalicelib/core/integration_base.py
|
||||
rm -rf ./chalicelib/core/integration_base_issue.py
|
||||
rm -rf ./chalicelib/core/integration_github.py
|
||||
|
|
@ -36,7 +36,7 @@ rm -rf ./chalicelib/core/sessions.py
|
|||
rm -rf ./chalicelib/core/sessions_assignments.py
|
||||
rm -rf ./chalicelib/core/sessions_metas.py
|
||||
rm -rf ./chalicelib/core/sessions_mobs.py
|
||||
rm -rf ./chalicelib/core/significance.py
|
||||
#exp rm -rf ./chalicelib/core/significance.py
|
||||
rm -rf ./chalicelib/core/slack.py
|
||||
rm -rf ./chalicelib/core/socket_ios.py
|
||||
rm -rf ./chalicelib/core/sourcemaps.py
|
||||
|
|
@ -78,7 +78,7 @@ rm -rf ./routers/subs/insights.py
|
|||
rm -rf ./schemas.py
|
||||
rm -rf ./routers/subs/v1_api.py
|
||||
rm -rf ./routers/subs/metrics.py
|
||||
rm -rf ./chalicelib/core/custom_metrics.py
|
||||
#exp rm -rf ./chalicelib/core/custom_metrics.py
|
||||
rm -rf ./chalicelib/core/performance_event.py
|
||||
rm -rf ./chalicelib/core/saved_search.py
|
||||
rm -rf ./app_alerts.py
|
||||
|
|
|
|||
|
|
@ -2,6 +2,6 @@
|
|||
sh env_vars.sh
|
||||
source /tmp/.env.override
|
||||
cd sourcemap-reader
|
||||
nohup npm start &> /tmp/sourcemap-reader.log &
|
||||
nohup npm start &
|
||||
cd ..
|
||||
uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers
|
||||
|
|
|
|||
|
|
@ -37,9 +37,9 @@ jwt_algorithm=HS512
|
|||
jwt_exp_delta_seconds=2592000
|
||||
jwt_issuer=openreplay-default-ee
|
||||
jwt_secret="SET A RANDOM STRING HERE"
|
||||
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001
|
||||
assist=/assist/%s/sockets-live
|
||||
assistList=/assist/%s/sockets-list
|
||||
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
|
||||
assist=/sockets-live
|
||||
assistList=/sockets-list
|
||||
pg_dbname=postgres
|
||||
pg_host=postgresql.db.svc.cluster.local
|
||||
pg_password=asayerPostgres
|
||||
|
|
@ -56,14 +56,16 @@ sentryURL=
|
|||
sessions_bucket=mobs
|
||||
sessions_region=us-east-1
|
||||
sourcemaps_bucket=sourcemaps
|
||||
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps
|
||||
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps
|
||||
stage=default-ee
|
||||
version_number=1.0.0
|
||||
FS_DIR=/mnt/efs
|
||||
EXP_SESSIONS_SEARCH=false
|
||||
EXP_AUTOCOMPLETE=false
|
||||
EXP_ERRORS_SEARCH=false
|
||||
EXP_METRICS=false
|
||||
EXP_METRICS=true
|
||||
EXP_7D_MV=false
|
||||
EXP_ALERTS=false
|
||||
EXP_FUNNELS=false
|
||||
EXP_FUNNELS=false
|
||||
EXP_RESOURCES=true
|
||||
TRACE_PERIOD=300
|
||||
|
|
@ -6,9 +6,8 @@ from starlette.responses import RedirectResponse
|
|||
|
||||
import schemas
|
||||
import schemas_ee
|
||||
from chalicelib.core import integrations_manager
|
||||
from chalicelib.core import sessions
|
||||
from chalicelib.core import tenants, users, metadata, projects, license
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.utils import SAML2_helper
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from chalicelib.core import telemetry, unlock
|
||||
from chalicelib.core import weekly_report, jobs
|
||||
from chalicelib.core import jobs
|
||||
from chalicelib.core import weekly_report as weekly_report_script
|
||||
from decouple import config
|
||||
|
||||
|
||||
|
|
@ -7,15 +8,14 @@ async def run_scheduled_jobs() -> None:
|
|||
jobs.execute_jobs()
|
||||
|
||||
|
||||
async def weekly_report2() -> None:
|
||||
weekly_report.cron()
|
||||
async def weekly_report() -> None:
|
||||
weekly_report_script.cron()
|
||||
|
||||
|
||||
async def telemetry_cron() -> None:
|
||||
telemetry.compute()
|
||||
|
||||
|
||||
# @app.schedule(Cron('0/60', '*', '*', '*', '?', '*'))
|
||||
def unlock_cron() -> None:
|
||||
print("validating license")
|
||||
unlock.check()
|
||||
|
|
@ -28,7 +28,7 @@ cron_jobs = [
|
|||
|
||||
SINGLE_CRONS = [{"func": telemetry_cron, "trigger": "cron", "day_of_week": "*"},
|
||||
{"func": run_scheduled_jobs, "trigger": "interval", "seconds": 60, "misfire_grace_time": 20},
|
||||
{"func": weekly_report2, "trigger": "cron", "day_of_week": "mon", "hour": 5,
|
||||
{"func": weekly_report, "trigger": "cron", "day_of_week": "mon", "hour": 5,
|
||||
"misfire_grace_time": 60 * 60}]
|
||||
|
||||
if config("LOCAL_CRONS", default=False, cast=bool):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
from chalicelib.core import roles, traces
|
||||
from chalicelib.core import unlock
|
||||
from chalicelib.utils import assist_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
unlock.check()
|
||||
|
||||
|
|
|
|||
|
|
@ -2,23 +2,18 @@ package datasaver
|
|||
|
||||
import (
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"openreplay/backend/pkg/db/clickhouse"
|
||||
"openreplay/backend/pkg/db/types"
|
||||
"openreplay/backend/pkg/env"
|
||||
"openreplay/backend/pkg/messages"
|
||||
)
|
||||
|
||||
var finalizeTicker <-chan time.Time
|
||||
|
||||
func (si *Saver) InitStats() {
|
||||
si.ch = clickhouse.NewConnector(env.String("CLICKHOUSE_STRING"))
|
||||
if err := si.ch.Prepare(); err != nil {
|
||||
log.Fatalf("Clickhouse prepare error: %v\n", err)
|
||||
}
|
||||
si.pg.Conn.SetClickHouse(si.ch)
|
||||
finalizeTicker = time.Tick(20 * time.Minute)
|
||||
}
|
||||
|
||||
func (si *Saver) InsertStats(session *types.Session, msg messages.Message) error {
|
||||
|
|
@ -43,13 +38,6 @@ func (si *Saver) InsertStats(session *types.Session, msg messages.Message) error
|
|||
return nil
|
||||
}
|
||||
|
||||
func (si *Saver) CommitStats() error {
|
||||
select {
|
||||
case <-finalizeTicker:
|
||||
if err := si.ch.FinaliseSessionsTable(); err != nil {
|
||||
log.Printf("Stats: FinaliseSessionsTable returned an error. %v", err)
|
||||
}
|
||||
default:
|
||||
}
|
||||
func (si *Saver) CommitStats(optimize bool) error {
|
||||
return si.ch.Commit()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -69,7 +69,6 @@ var CONTAINER_TYPE_MAP = map[uint64]string{0: "window", 1: "iframe", 2: "embed",
|
|||
type Connector interface {
|
||||
Prepare() error
|
||||
Commit() error
|
||||
FinaliseSessionsTable() error
|
||||
InsertWebSession(session *types.Session) error
|
||||
InsertWebResourceEvent(session *types.Session, msg *messages.ResourceEvent) error
|
||||
InsertWebPageEvent(session *types.Session, msg *messages.PageEvent) error
|
||||
|
|
@ -157,13 +156,6 @@ func (c *connectorImpl) Commit() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) FinaliseSessionsTable() error {
|
||||
if err := c.conn.Exec(context.Background(), "OPTIMIZE TABLE sessions FINAL"); err != nil {
|
||||
return fmt.Errorf("can't finalise sessions table: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *connectorImpl) checkError(name string, err error) {
|
||||
if err != clickhouse.ErrBatchAlreadySent {
|
||||
log.Printf("can't create %s batch after failed append operation: %s", name, err)
|
||||
|
|
|
|||
|
|
@ -194,3 +194,16 @@ func (consumer *Consumer) Close() {
|
|||
log.Printf("Kafka consumer close error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (consumer *Consumer) HasFirstPartition() bool {
|
||||
assigned, err := consumer.c.Assignment()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
for _, p := range assigned {
|
||||
if p.Partition == 1 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
|
|
|||
|
|
@ -63,13 +63,6 @@ class SessionStart(Message):
|
|||
self.user_id = user_id
|
||||
|
||||
|
||||
class SessionDisconnect(Message):
|
||||
__id__ = 2
|
||||
|
||||
def __init__(self, timestamp):
|
||||
self.timestamp = timestamp
|
||||
|
||||
|
||||
class SessionEnd(Message):
|
||||
__id__ = 3
|
||||
|
||||
|
|
@ -106,7 +99,6 @@ class CreateDocument(Message):
|
|||
__id__ = 7
|
||||
|
||||
def __init__(self, ):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
|
@ -752,6 +744,14 @@ class AdoptedSSRemoveOwner(Message):
|
|||
self.id = id
|
||||
|
||||
|
||||
class Zustand(Message):
|
||||
__id__ = 79
|
||||
|
||||
def __init__(self, mutation, state):
|
||||
self.mutation = mutation
|
||||
self.state = state
|
||||
|
||||
|
||||
class IOSBatchMeta(Message):
|
||||
__id__ = 107
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from msgcodec.codec import Codec
|
||||
from msgcodec.messages import *
|
||||
from typing import List
|
||||
import io
|
||||
|
||||
class MessageCodec(Codec):
|
||||
|
|
@ -42,7 +43,7 @@ class MessageCodec(Codec):
|
|||
raise UnicodeDecodeError(f"Error while decoding message key (SessionID) from {b}\n{e}")
|
||||
return decoded
|
||||
|
||||
def decode_detailed(self, b: bytes):
|
||||
def decode_detailed(self, b: bytes) -> List[Message]:
|
||||
reader = io.BytesIO(b)
|
||||
messages_list = list()
|
||||
messages_list.append(self.handler(reader, 0))
|
||||
|
|
@ -61,7 +62,7 @@ class MessageCodec(Codec):
|
|||
break
|
||||
return messages_list
|
||||
|
||||
def handler(self, reader: io.BytesIO, mode=0):
|
||||
def handler(self, reader: io.BytesIO, mode=0) -> Message:
|
||||
message_id = self.read_message_id(reader)
|
||||
if mode == 1:
|
||||
# We skip the three bytes representing the length of message. It can be used to skip unwanted messages
|
||||
|
|
@ -71,9 +72,10 @@ class MessageCodec(Codec):
|
|||
# Old format with no bytes for message length
|
||||
return self.read_head_message(reader, message_id)
|
||||
else:
|
||||
raise IOError()
|
||||
raise IOError()
|
||||
|
||||
def read_head_message(self, reader: io.BytesIO, message_id) -> Message:
|
||||
|
||||
def read_head_message(self, reader: io.BytesIO, message_id: int):
|
||||
if message_id == 80:
|
||||
return BatchMeta(
|
||||
page_no=self.read_uint(reader),
|
||||
|
|
@ -121,11 +123,6 @@ class MessageCodec(Codec):
|
|||
user_id=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 2:
|
||||
return SessionDisconnect(
|
||||
timestamp=self.read_uint(reader)
|
||||
)
|
||||
|
||||
if message_id == 3:
|
||||
return SessionEnd(
|
||||
timestamp=self.read_uint(reader)
|
||||
|
|
@ -665,6 +662,12 @@ class MessageCodec(Codec):
|
|||
id=self.read_uint(reader)
|
||||
)
|
||||
|
||||
if message_id == 79:
|
||||
return Zustand(
|
||||
mutation=self.read_string(reader),
|
||||
state=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 107:
|
||||
return IOSBatchMeta(
|
||||
timestamp=self.read_uint(reader),
|
||||
|
|
|
|||
38
ee/scripts/helm/db/init_dbs/postgresql/1.8.1/1.8.1.sql
Normal file
38
ee/scripts/helm/db/init_dbs/postgresql/1.8.1/1.8.1.sql
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
BEGIN;
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT 'v1.8.1-ee'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
|
||||
|
||||
INSERT INTO metrics (name, category, default_config, is_predefined, is_template, is_public, predefined_key, metric_type,
|
||||
view_type)
|
||||
VALUES ('Fetch Calls with Errors', 'errors', '{
|
||||
"col": 4,
|
||||
"row": 2,
|
||||
"position": 0
|
||||
}', true, true, true, 'calls_errors', 'predefined', 'table')
|
||||
ON CONFLICT (predefined_key) DO UPDATE
|
||||
SET name=excluded.name,
|
||||
category=excluded.category,
|
||||
default_config=excluded.default_config,
|
||||
is_predefined=excluded.is_predefined,
|
||||
is_template=excluded.is_template,
|
||||
is_public=excluded.is_public,
|
||||
metric_type=excluded.metric_type,
|
||||
view_type=excluded.view_type;
|
||||
|
||||
ALTER TABLE IF EXISTS oauth_authentication
|
||||
DROP CONSTRAINT IF EXISTS oauth_authentication_user_id_provider_provider_user_id_key;
|
||||
|
||||
DROP INDEX IF EXISTS oauth_authentication_user_id_provider_provider_user_id_key;
|
||||
|
||||
ALTER TABLE IF EXISTS oauth_authentication
|
||||
DROP CONSTRAINT IF EXISTS oauth_authentication_user_id_provider_key;
|
||||
|
||||
DROP INDEX IF EXISTS oauth_authentication_user_id_provider_key;
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider);
|
||||
|
||||
COMMIT;
|
||||
|
|
@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
|||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT 'v1.8.0-ee'
|
||||
SELECT 'v1.8.1-ee'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
|
||||
|
||||
|
|
@ -221,10 +221,9 @@ $$
|
|||
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
|
||||
provider oauth_provider NOT NULL,
|
||||
provider_user_id text NOT NULL,
|
||||
token text NOT NULL,
|
||||
UNIQUE (user_id, provider)
|
||||
token text NOT NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication(user_id,provider);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS projects
|
||||
(
|
||||
|
|
@ -1344,7 +1343,7 @@ VALUES ('Captured sessions', 'web vitals', '{
|
|||
"position": 0
|
||||
}', true, true, true, 'errors_per_domains', 'predefined', 'table'),
|
||||
('Fetch Calls with Errors', 'errors', '{
|
||||
"col": 2,
|
||||
"col": 4,
|
||||
"row": 2,
|
||||
"position": 0
|
||||
}', true, true, true, 'calls_errors', 'predefined', 'table'),
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
const dumps = require('./utils/HeapSnapshot');
|
||||
const {request_logger} = require('./utils/helper');
|
||||
const express = require('express');
|
||||
const assert = require('assert').strict;
|
||||
|
||||
let socket;
|
||||
if (process.env.redis === "true") {
|
||||
socket = require("./servers/websocket-cluster");
|
||||
|
|
@ -8,24 +10,27 @@ if (process.env.redis === "true") {
|
|||
socket = require("./servers/websocket");
|
||||
}
|
||||
|
||||
const HOST = '0.0.0.0';
|
||||
const HOST = process.env.LISTEN_HOST || '0.0.0.0';
|
||||
const PORT = process.env.LISTEN_PORT || 9001;
|
||||
assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required');
|
||||
const P_KEY = process.env.ASSIST_KEY;
|
||||
const PREFIX = process.env.PREFIX || process.env.prefix || `/assist`
|
||||
|
||||
let debug = process.env.debug === "1" || false;
|
||||
const PREFIX = process.env.prefix || `/assist`
|
||||
let debug = process.env.debug === "1";
|
||||
const heapdump = process.env.heapdump === "1";
|
||||
|
||||
if (process.env.uws !== "true") {
|
||||
let wsapp = express();
|
||||
wsapp.use(express.json());
|
||||
wsapp.use(express.urlencoded({extended: true}));
|
||||
wsapp.use(request_logger("[wsapp]"));
|
||||
wsapp.get([PREFIX, `${PREFIX}/`], (req, res) => {
|
||||
wsapp.get(['/', PREFIX, `${PREFIX}/`, `${PREFIX}/${P_KEY}`, `${PREFIX}/${P_KEY}/`], (req, res) => {
|
||||
res.statusCode = 200;
|
||||
res.end("ok!");
|
||||
}
|
||||
);
|
||||
wsapp.use(`/heapdump/${process.env.S3_KEY}`, dumps.router);
|
||||
wsapp.use(`${PREFIX}/${process.env.S3_KEY}`, socket.wsRouter);
|
||||
heapdump && wsapp.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router);
|
||||
wsapp.use(`${PREFIX}/${P_KEY}`, socket.wsRouter);
|
||||
wsapp.enable('trust proxy');
|
||||
const wsserver = wsapp.listen(PORT, HOST, () => {
|
||||
console.log(`WS App listening on http://${HOST}:${PORT}`);
|
||||
|
|
@ -44,9 +49,11 @@ if (process.env.uws !== "true") {
|
|||
const healthFn = (res, req) => {
|
||||
res.writeStatus('200 OK').end('ok!');
|
||||
}
|
||||
uapp.get('/', healthFn);
|
||||
uapp.get(PREFIX, healthFn);
|
||||
uapp.get(`${PREFIX}/`, healthFn);
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}`, healthFn);
|
||||
uapp.get(`${PREFIX}/${P_KEY}`, healthFn);
|
||||
uapp.get(`${PREFIX}/${P_KEY}/`, healthFn);
|
||||
|
||||
|
||||
/* Either onAborted or simply finished request */
|
||||
|
|
@ -73,19 +80,19 @@ if (process.env.uws !== "true") {
|
|||
return fn(req, res);
|
||||
}
|
||||
}
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
|
||||
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
|
||||
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject));
|
||||
uapp.get(`${PREFIX}/${P_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
|
||||
uapp.post(`${PREFIX}/${P_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
|
||||
uapp.get(`${PREFIX}/${P_KEY}/sockets-list/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
|
||||
uapp.get(`${PREFIX}/${P_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
|
||||
uapp.post(`${PREFIX}/${P_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
|
||||
uapp.get(`${PREFIX}/${P_KEY}/sockets-list/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsListByProject));
|
||||
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
|
||||
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
|
||||
uapp.post(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject));
|
||||
uapp.get(`${PREFIX}/${P_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
|
||||
uapp.post(`${PREFIX}/${P_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
|
||||
uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey/autocomplete`, uWrapper(socket.handlers.autocomplete));
|
||||
uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
|
||||
uapp.post(`${PREFIX}/${P_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
|
||||
uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject));
|
||||
|
||||
|
||||
socket.start(uapp);
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ const pubClient = createClient({url: REDIS_URL});
|
|||
const subClient = pubClient.duplicate();
|
||||
console.log(`Using Redis: ${REDIS_URL}`);
|
||||
let io;
|
||||
const debug = process.env.debug === "1" || false;
|
||||
const debug = process.env.debug === "1";
|
||||
|
||||
const createSocketIOServer = function (server, prefix) {
|
||||
if (process.env.uws !== "true") {
|
||||
|
|
@ -283,6 +283,7 @@ module.exports = {
|
|||
start: (server, prefix) => {
|
||||
createSocketIOServer(server, prefix);
|
||||
io.on('connection', async (socket) => {
|
||||
socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
|
||||
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
|
||||
socket._connectedAt = new Date();
|
||||
socket.peerId = socket.handshake.query.peerId;
|
||||
|
|
@ -351,7 +352,6 @@ module.exports = {
|
|||
|
||||
socket.on(EVENTS_DEFINITION.listen.CONNECT_ERROR, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_ERROR, err));
|
||||
socket.on(EVENTS_DEFINITION.listen.CONNECT_FAILED, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_FAILED, err));
|
||||
socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
|
||||
|
||||
socket.onAny(async (eventName, ...args) => {
|
||||
if (Object.values(EVENTS_DEFINITION.listen).indexOf(eventName) >= 0) {
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ const {
|
|||
const wsRouter = express.Router();
|
||||
|
||||
let io;
|
||||
const debug = process.env.debug === "1" || false;
|
||||
const debug = process.env.debug === "1";
|
||||
|
||||
const createSocketIOServer = function (server, prefix) {
|
||||
if (process.env.uws !== "true") {
|
||||
|
|
@ -261,6 +261,7 @@ module.exports = {
|
|||
start: (server, prefix) => {
|
||||
createSocketIOServer(server, prefix);
|
||||
io.on('connection', async (socket) => {
|
||||
socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
|
||||
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
|
||||
socket._connectedAt = new Date();
|
||||
socket.peerId = socket.handshake.query.peerId;
|
||||
|
|
@ -327,7 +328,6 @@ module.exports = {
|
|||
|
||||
socket.on(EVENTS_DEFINITION.listen.CONNECT_ERROR, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_ERROR, err));
|
||||
socket.on(EVENTS_DEFINITION.listen.CONNECT_FAILED, err => errorHandler(EVENTS_DEFINITION.listen.CONNECT_FAILED, err));
|
||||
socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
|
||||
|
||||
socket.onAny(async (eventName, ...args) => {
|
||||
if (Object.values(EVENTS_DEFINITION.listen).indexOf(eventName) >= 0) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const helper = require('./helper');
|
||||
let debug = process.env.debug === "1" || false;
|
||||
let debug = process.env.debug === "1";
|
||||
const getBodyFromUWSResponse = async function (res) {
|
||||
return new Promise(((resolve, reject) => {
|
||||
let buffer;
|
||||
|
|
|
|||
|
|
@ -22,5 +22,5 @@ MINIO_ACCESS_KEY = ''
|
|||
MINIO_SECRET_KEY = ''
|
||||
|
||||
# APP and TRACKER VERSIONS
|
||||
VERSION = '1.8.0'
|
||||
TRACKER_VERSION = '3.6.0'
|
||||
VERSION = '1.8.1'
|
||||
TRACKER_VERSION = '4.1.0'
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ class Router extends React.Component {
|
|||
}
|
||||
|
||||
fetchInitialData = async () => {
|
||||
await this.props.fetchUserInfo(),
|
||||
await this.props.fetchUserInfo()
|
||||
await this.props.fetchSiteList()
|
||||
const { mstore } = this.props;
|
||||
mstore.initClient();
|
||||
|
|
|
|||
|
|
@ -12,10 +12,14 @@ export default store => next => (action) => {
|
|||
const client = new APIClient();
|
||||
|
||||
return call(client)
|
||||
.then(response => {
|
||||
.then(async response => {
|
||||
if (response.status === 403) {
|
||||
next({ type: DELETE });
|
||||
}
|
||||
if (!response.ok) {
|
||||
const text = await response.text()
|
||||
return Promise.reject(text);
|
||||
}
|
||||
return response.json()
|
||||
})
|
||||
.then(json => json || {}) // TEMP TODO on server: no empty responces
|
||||
|
|
@ -31,7 +35,7 @@ export default store => next => (action) => {
|
|||
})
|
||||
.catch((e) => {
|
||||
logger.error("Error during API request. ", e)
|
||||
return next({ type: FAILURE, errors: [ "Connection error", String(e) ] });
|
||||
return next({ type: FAILURE, errors: JSON.parse(e).errors || [] });
|
||||
});
|
||||
};
|
||||
|
||||
|
|
|
|||
1
frontend/app/assets/integrations/pinia.svg
Normal file
1
frontend/app/assets/integrations/pinia.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 5.2 KiB |
|
|
@ -26,7 +26,7 @@ function ChatWindow({ userId, incomeStream, localStream, endCall, isPrestart }:
|
|||
>
|
||||
<div className="handle flex items-center p-2 cursor-move select-none border-b">
|
||||
<div className={stl.headerTitle}>
|
||||
<b>Talking to </b> {userId ? userId : 'Anonymous User'}
|
||||
<b>Call with </b> {userId ? userId : 'Anonymous User'}
|
||||
<br />
|
||||
{incomeStream && incomeStream.length > 2 ? ' (+ other agents in the call)' : ''}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,53 @@
|
|||
import React from 'react';
|
||||
import { INDEXES } from 'App/constants/zindex';
|
||||
import { connect } from 'react-redux';
|
||||
import { Button, Loader, Icon } from 'UI';
|
||||
import { initiateCallEnd, releaseRemoteControl } from 'Player';
|
||||
|
||||
interface Props {
|
||||
userDisplayName: string;
|
||||
type: WindowType;
|
||||
}
|
||||
|
||||
export enum WindowType {
|
||||
Call,
|
||||
Control,
|
||||
}
|
||||
|
||||
const WIN_VARIANTS = {
|
||||
[WindowType.Call]: {
|
||||
text: 'to accept the call',
|
||||
icon: 'call' as const,
|
||||
action: initiateCallEnd,
|
||||
},
|
||||
[WindowType.Control]: {
|
||||
text: 'to accept remote control request',
|
||||
icon: 'remote-control' as const,
|
||||
action: releaseRemoteControl,
|
||||
},
|
||||
};
|
||||
|
||||
function RequestingWindow({ userDisplayName, type }: Props) {
|
||||
return (
|
||||
<div
|
||||
className="w-full h-full absolute top-0 left-0 flex items-center justify-center"
|
||||
style={{ background: 'rgba(0,0,0, 0.30)', zIndex: INDEXES.PLAYER_REQUEST_WINDOW }}
|
||||
>
|
||||
<div className="rounded bg-white pt-4 pb-2 px-8 flex flex-col text-lg items-center max-w-lg text-center">
|
||||
<Icon size={40} color="teal" name={WIN_VARIANTS[type].icon} className="mb-4" />
|
||||
<div>
|
||||
Waiting for <span className="font-semibold">{userDisplayName}</span>
|
||||
</div>
|
||||
<span>{WIN_VARIANTS[type].text}</span>
|
||||
<Loader size={30} style={{ minHeight: 60 }} />
|
||||
<Button variant="text-primary" onClick={WIN_VARIANTS[type].action}>
|
||||
Cancel
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default connect((state) => ({
|
||||
userDisplayName: state.getIn(['sessions', 'current', 'userDisplayName']),
|
||||
}))(RequestingWindow);
|
||||
1
frontend/app/components/Assist/RequestingWindow/index.ts
Normal file
1
frontend/app/components/Assist/RequestingWindow/index.ts
Normal file
|
|
@ -0,0 +1 @@
|
|||
export { default, WindowType } from './RequestingWindow'
|
||||
|
|
@ -5,200 +5,252 @@ import cn from 'classnames';
|
|||
import { toggleChatWindow } from 'Duck/sessions';
|
||||
import { connectPlayer } from 'Player/store';
|
||||
import ChatWindow from '../../ChatWindow';
|
||||
import { callPeer, setCallArgs, requestReleaseRemoteControl, toggleAnnotation } from 'Player';
|
||||
import { CallingState, ConnectionStatus, RemoteControlStatus } from 'Player/MessageDistributor/managers/AssistManager';
|
||||
import {
|
||||
callPeer,
|
||||
setCallArgs,
|
||||
requestReleaseRemoteControl,
|
||||
toggleAnnotation,
|
||||
toggleUserName,
|
||||
} from 'Player';
|
||||
import {
|
||||
CallingState,
|
||||
ConnectionStatus,
|
||||
RemoteControlStatus,
|
||||
} from 'Player/MessageDistributor/managers/AssistManager';
|
||||
import RequestLocalStream from 'Player/MessageDistributor/managers/LocalStream';
|
||||
import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream';
|
||||
|
||||
import { Tooltip } from 'react-tippy';
|
||||
import { toast } from 'react-toastify';
|
||||
import { confirm } from 'UI';
|
||||
import stl from './AassistActions.module.css';
|
||||
|
||||
function onReject() {
|
||||
toast.info(`Call was rejected.`);
|
||||
toast.info(`Call was rejected.`);
|
||||
}
|
||||
|
||||
function onError(e: any) {
|
||||
console.log(e)
|
||||
toast.error(typeof e === 'string' ? e : e.message);
|
||||
console.log(e);
|
||||
toast.error(typeof e === 'string' ? e : e.message);
|
||||
}
|
||||
|
||||
interface Props {
|
||||
userId: string;
|
||||
calling: CallingState;
|
||||
annotating: boolean;
|
||||
peerConnectionStatus: ConnectionStatus;
|
||||
remoteControlStatus: RemoteControlStatus;
|
||||
hasPermission: boolean;
|
||||
isEnterprise: boolean;
|
||||
isCallActive: boolean;
|
||||
agentIds: string[];
|
||||
livePlay: boolean;
|
||||
userId: string;
|
||||
calling: CallingState;
|
||||
annotating: boolean;
|
||||
peerConnectionStatus: ConnectionStatus;
|
||||
remoteControlStatus: RemoteControlStatus;
|
||||
hasPermission: boolean;
|
||||
isEnterprise: boolean;
|
||||
isCallActive: boolean;
|
||||
agentIds: string[];
|
||||
livePlay: boolean;
|
||||
userDisplayName: string;
|
||||
}
|
||||
|
||||
function AssistActions({
|
||||
userId,
|
||||
calling,
|
||||
annotating,
|
||||
peerConnectionStatus,
|
||||
remoteControlStatus,
|
||||
hasPermission,
|
||||
isEnterprise,
|
||||
isCallActive,
|
||||
agentIds,
|
||||
livePlay
|
||||
userId,
|
||||
calling,
|
||||
annotating,
|
||||
peerConnectionStatus,
|
||||
remoteControlStatus,
|
||||
hasPermission,
|
||||
isEnterprise,
|
||||
isCallActive,
|
||||
agentIds,
|
||||
livePlay,
|
||||
userDisplayName,
|
||||
}: Props) {
|
||||
const [isPrestart, setPrestart] = useState(false);
|
||||
const [incomeStream, setIncomeStream] = useState<MediaStream[] | null>([]);
|
||||
const [localStream, setLocalStream] = useState<LocalStream | null>(null);
|
||||
const [callObject, setCallObject] = useState<{ end: () => void } | null>(null);
|
||||
const [isPrestart, setPrestart] = useState(false);
|
||||
const [incomeStream, setIncomeStream] = useState<MediaStream[] | null>([]);
|
||||
const [localStream, setLocalStream] = useState<LocalStream | null>(null);
|
||||
const [callObject, setCallObject] = useState<{ end: () => void } | null>(null);
|
||||
|
||||
const onCall = calling === CallingState.OnCall || calling === CallingState.Reconnecting;
|
||||
const cannotCall = peerConnectionStatus !== ConnectionStatus.Connected || (isEnterprise && !hasPermission);
|
||||
const remoteActive = remoteControlStatus === RemoteControlStatus.Enabled;
|
||||
const onCall = calling === CallingState.OnCall || calling === CallingState.Reconnecting;
|
||||
const callRequesting = calling === CallingState.Connecting;
|
||||
const cannotCall =
|
||||
peerConnectionStatus !== ConnectionStatus.Connected || (isEnterprise && !hasPermission);
|
||||
|
||||
useEffect(() => {
|
||||
return callObject?.end()
|
||||
}, [])
|
||||
const remoteRequesting = remoteControlStatus === RemoteControlStatus.Requesting;
|
||||
const remoteActive = remoteControlStatus === RemoteControlStatus.Enabled;
|
||||
|
||||
useEffect(() => {
|
||||
if (peerConnectionStatus == ConnectionStatus.Disconnected) {
|
||||
toast.info(`Live session was closed.`);
|
||||
}
|
||||
}, [peerConnectionStatus]);
|
||||
|
||||
const addIncomeStream = (stream: MediaStream) => {
|
||||
setIncomeStream(oldState => {
|
||||
if (!oldState.find(existingStream => existingStream.id === stream.id)) {
|
||||
return [...oldState, stream]
|
||||
}
|
||||
return oldState
|
||||
});
|
||||
useEffect(() => {
|
||||
if (!onCall && isCallActive && agentIds) {
|
||||
setPrestart(true);
|
||||
// call(agentIds); do not autocall on prestart, can change later
|
||||
}
|
||||
}, [agentIds, isCallActive]);
|
||||
|
||||
function call(additionalAgentIds?: string[]) {
|
||||
RequestLocalStream().then(lStream => {
|
||||
setLocalStream(lStream);
|
||||
setCallArgs(
|
||||
lStream,
|
||||
addIncomeStream,
|
||||
lStream.stop.bind(lStream),
|
||||
onReject,
|
||||
onError
|
||||
)
|
||||
setCallObject(callPeer());
|
||||
if (additionalAgentIds) {
|
||||
callPeer(additionalAgentIds)
|
||||
}
|
||||
}).catch(onError)
|
||||
useEffect(() => {
|
||||
if (!livePlay) {
|
||||
if (annotating) {
|
||||
toggleAnnotation(false);
|
||||
}
|
||||
if (remoteActive) {
|
||||
requestReleaseRemoteControl();
|
||||
}
|
||||
}
|
||||
}, [livePlay]);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!onCall && isCallActive && agentIds) {
|
||||
setPrestart(true);
|
||||
// call(agentIds); do not autocall on prestart, can change later
|
||||
useEffect(() => {
|
||||
if (remoteActive) {
|
||||
toggleUserName(userDisplayName);
|
||||
} else {
|
||||
toggleUserName();
|
||||
}
|
||||
}, [remoteActive]);
|
||||
|
||||
useEffect(() => {
|
||||
return callObject?.end();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (peerConnectionStatus == ConnectionStatus.Disconnected) {
|
||||
toast.info(`Live session was closed.`);
|
||||
}
|
||||
}, [peerConnectionStatus]);
|
||||
|
||||
const addIncomeStream = (stream: MediaStream) => {
|
||||
setIncomeStream((oldState) => {
|
||||
if (!oldState.find((existingStream) => existingStream.id === stream.id)) {
|
||||
return [...oldState, stream];
|
||||
}
|
||||
return oldState;
|
||||
});
|
||||
};
|
||||
|
||||
function call(additionalAgentIds?: string[]) {
|
||||
RequestLocalStream()
|
||||
.then((lStream) => {
|
||||
setLocalStream(lStream);
|
||||
setCallArgs(lStream, addIncomeStream, lStream.stop.bind(lStream), onReject, onError);
|
||||
setCallObject(callPeer());
|
||||
if (additionalAgentIds) {
|
||||
callPeer(additionalAgentIds);
|
||||
}
|
||||
}, [agentIds, isCallActive])
|
||||
})
|
||||
.catch(onError);
|
||||
}
|
||||
|
||||
const confirmCall = async () => {
|
||||
if (
|
||||
await confirm({
|
||||
header: 'Start Call',
|
||||
confirmButton: 'Call',
|
||||
confirmation: `Are you sure you want to call ${userId ? userId : 'User'}?`,
|
||||
})
|
||||
) {
|
||||
call(agentIds);
|
||||
}
|
||||
};
|
||||
const confirmCall = async () => {
|
||||
if (callRequesting || remoteRequesting) return;
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!livePlay) {
|
||||
if (annotating) {
|
||||
toggleAnnotation(false);
|
||||
}
|
||||
if (remoteActive) {
|
||||
requestReleaseRemoteControl()
|
||||
}
|
||||
}
|
||||
}, [livePlay])
|
||||
if (
|
||||
await confirm({
|
||||
header: 'Start Call',
|
||||
confirmButton: 'Call',
|
||||
confirmation: `Are you sure you want to call ${userId ? userId : 'User'}?`,
|
||||
})
|
||||
) {
|
||||
call(agentIds);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex items-center">
|
||||
{(onCall || remoteActive) && (
|
||||
<>
|
||||
<div
|
||||
className={cn('cursor-pointer p-2 flex items-center', { [stl.disabled]: cannotCall || !livePlay })}
|
||||
onClick={() => toggleAnnotation(!annotating)}
|
||||
role="button"
|
||||
>
|
||||
<Button
|
||||
icon={annotating ? 'pencil-stop' : 'pencil'}
|
||||
variant={annotating ? 'text-red' : 'text-primary'}
|
||||
style={{ height: '28px' }}
|
||||
>
|
||||
Annotate
|
||||
</Button>
|
||||
{/* <IconButton label={`Annotate`} icon={annotating ? 'pencil-stop' : 'pencil'} primaryText redText={annotating} /> */}
|
||||
</div>
|
||||
<div className={stl.divider} />
|
||||
</>
|
||||
)}
|
||||
<div
|
||||
className={cn('cursor-pointer p-2 flex items-center', { [stl.disabled]: cannotCall || !livePlay })}
|
||||
onClick={requestReleaseRemoteControl}
|
||||
role="button"
|
||||
const requestControl = () => {
|
||||
if (callRequesting || remoteRequesting) return;
|
||||
requestReleaseRemoteControl();
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex items-center">
|
||||
{(onCall || remoteActive) && (
|
||||
<>
|
||||
<div
|
||||
className={cn('cursor-pointer p-2 flex items-center', {
|
||||
[stl.disabled]: cannotCall || !livePlay,
|
||||
})}
|
||||
onClick={() => toggleAnnotation(!annotating)}
|
||||
role="button"
|
||||
>
|
||||
<Button
|
||||
icon={annotating ? 'pencil-stop' : 'pencil'}
|
||||
variant={annotating ? 'text-red' : 'text-primary'}
|
||||
style={{ height: '28px' }}
|
||||
>
|
||||
<Button
|
||||
icon={remoteActive ? 'window-x' : 'remote-control'}
|
||||
variant={remoteActive ? 'text-red' : 'text-primary'}
|
||||
style={{ height: '28px' }}
|
||||
>
|
||||
Remote Control
|
||||
</Button>
|
||||
{/* <IconButton label={`Remote Control`} icon={remoteActive ? 'window-x' : 'remote-control'} primaryText redText={remoteActive} /> */}
|
||||
</div>
|
||||
<div className={stl.divider} />
|
||||
Annotate
|
||||
</Button>
|
||||
</div>
|
||||
<div className={stl.divider} />
|
||||
</>
|
||||
)}
|
||||
|
||||
<Popup content={cannotCall ? `You don't have the permissions to perform this action.` : `Call ${userId ? userId : 'User'}`}>
|
||||
<div
|
||||
className={cn('cursor-pointer p-2 flex items-center', { [stl.disabled]: cannotCall })}
|
||||
onClick={onCall ? callObject?.end : confirmCall}
|
||||
role="button"
|
||||
>
|
||||
<Button icon="headset" variant={onCall ? 'text-red' : isPrestart ? 'green' : 'primary'} style={{ height: '28px' }}>
|
||||
{onCall ? 'End' : isPrestart ? 'Join Call' : 'Call'}
|
||||
</Button>
|
||||
{/* <IconButton size="small" primary={!onCall} red={onCall} label={onCall ? 'End' : 'Call'} icon="headset" /> */}
|
||||
</div>
|
||||
</Popup>
|
||||
|
||||
<div className="fixed ml-3 left-0 top-0" style={{ zIndex: 999 }}>
|
||||
{onCall && callObject && (
|
||||
<ChatWindow endCall={callObject.end} userId={userId} incomeStream={incomeStream} localStream={localStream} isPrestart={isPrestart} />
|
||||
)}
|
||||
</div>
|
||||
{/* @ts-ignore */}
|
||||
<Tooltip title="Go live to initiate remote control" disabled={livePlay}>
|
||||
<div
|
||||
className={cn('cursor-pointer p-2 flex items-center', {
|
||||
[stl.disabled]: cannotCall || !livePlay || callRequesting || remoteRequesting,
|
||||
})}
|
||||
onClick={requestControl}
|
||||
role="button"
|
||||
>
|
||||
<Button
|
||||
icon={remoteActive ? 'window-x' : 'remote-control'}
|
||||
variant={remoteActive ? 'text-red' : 'text-primary'}
|
||||
style={{ height: '28px' }}
|
||||
>
|
||||
Remote Control
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
</Tooltip>
|
||||
<div className={stl.divider} />
|
||||
|
||||
<Popup
|
||||
content={
|
||||
cannotCall
|
||||
? `You don't have the permissions to perform this action.`
|
||||
: `Call ${userId ? userId : 'User'}`
|
||||
}
|
||||
disabled={onCall}
|
||||
>
|
||||
<div
|
||||
className={cn('cursor-pointer p-2 flex items-center', {
|
||||
[stl.disabled]: cannotCall || callRequesting || remoteRequesting,
|
||||
})}
|
||||
onClick={onCall ? callObject?.end : confirmCall}
|
||||
role="button"
|
||||
>
|
||||
<Button
|
||||
icon="headset"
|
||||
variant={onCall ? 'text-red' : isPrestart ? 'green' : 'primary'}
|
||||
style={{ height: '28px' }}
|
||||
>
|
||||
{onCall ? 'End' : isPrestart ? 'Join Call' : 'Call'}
|
||||
</Button>
|
||||
</div>
|
||||
</Popup>
|
||||
|
||||
<div className="fixed ml-3 left-0 top-0" style={{ zIndex: 999 }}>
|
||||
{onCall && callObject && (
|
||||
<ChatWindow
|
||||
endCall={callObject.end}
|
||||
userId={userId}
|
||||
incomeStream={incomeStream}
|
||||
localStream={localStream}
|
||||
isPrestart={isPrestart}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const con = connect(
|
||||
(state) => {
|
||||
const permissions = state.getIn(['user', 'account', 'permissions']) || [];
|
||||
return {
|
||||
hasPermission: permissions.includes('ASSIST_CALL'),
|
||||
isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
|
||||
};
|
||||
},
|
||||
{ toggleChatWindow }
|
||||
(state) => {
|
||||
const permissions = state.getIn(['user', 'account', 'permissions']) || [];
|
||||
return {
|
||||
hasPermission: permissions.includes('ASSIST_CALL'),
|
||||
isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
|
||||
userDisplayName: state.getIn(['sessions', 'current', 'userDisplayName']),
|
||||
};
|
||||
},
|
||||
{ toggleChatWindow }
|
||||
);
|
||||
|
||||
export default con(
|
||||
connectPlayer((state) => ({
|
||||
calling: state.calling,
|
||||
annotating: state.annotating,
|
||||
remoteControlStatus: state.remoteControl,
|
||||
peerConnectionStatus: state.peerConnectionStatus,
|
||||
livePlay: state.livePlay,
|
||||
}))(AssistActions)
|
||||
connectPlayer((state) => ({
|
||||
calling: state.calling,
|
||||
annotating: state.annotating,
|
||||
remoteControlStatus: state.remoteControl,
|
||||
peerConnectionStatus: state.peerConnectionStatus,
|
||||
livePlay: state.livePlay,
|
||||
}))(AssistActions)
|
||||
);
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import React from 'react';
|
|||
import { connect } from 'react-redux';
|
||||
import { Input, Form, Button, Checkbox, Loader } from 'UI';
|
||||
import SiteDropdown from 'Shared/SiteDropdown';
|
||||
import { save, init, edit, remove, fetchList } from 'Duck/integrations/actions';
|
||||
import { save, init, edit, remove } from 'Duck/integrations/actions';
|
||||
import { fetchIntegrationList } from 'Duck/integrations/integrations';
|
||||
|
||||
@connect(
|
||||
|
|
@ -21,16 +21,22 @@ import { fetchIntegrationList } from 'Duck/integrations/integrations';
|
|||
init,
|
||||
edit,
|
||||
remove,
|
||||
fetchList,
|
||||
// fetchList,
|
||||
fetchIntegrationList,
|
||||
}
|
||||
)
|
||||
export default class IntegrationForm extends React.PureComponent {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
// const currentSiteId = this.props.initialSiteId;
|
||||
// this.state = { currentSiteId };
|
||||
// this.init(currentSiteId);
|
||||
}
|
||||
|
||||
fetchList = () => {
|
||||
const { siteId, initialSiteId } = this.props;
|
||||
if (!siteId) {
|
||||
this.props.fetchIntegrationList(initialSiteId);
|
||||
} else {
|
||||
this.props.fetchIntegrationList(siteId);
|
||||
}
|
||||
}
|
||||
|
||||
write = ({ target: { value, name: key, type, checked } }) => {
|
||||
|
|
@ -57,6 +63,7 @@ export default class IntegrationForm extends React.PureComponent {
|
|||
// const { currentSiteId } = this.state;
|
||||
this.props.save(customPath || name, !ignoreProject ? this.props.siteId : null, config).then(() => {
|
||||
// this.props.fetchList(name);
|
||||
this.fetchList();
|
||||
this.props.onClose();
|
||||
if (isExists) return;
|
||||
});
|
||||
|
|
@ -67,7 +74,7 @@ export default class IntegrationForm extends React.PureComponent {
|
|||
this.props.remove(name, !ignoreProject ? config.projectId : null).then(
|
||||
function () {
|
||||
this.props.onClose();
|
||||
this.props.fetchList(name);
|
||||
this.fetchList();
|
||||
}.bind(this)
|
||||
);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -22,7 +22,9 @@ const IntegrationItem = (props: Props) => {
|
|||
</Popup>
|
||||
</div>
|
||||
)}
|
||||
<img className="h-12 w-12" src={'/assets/' + integration.icon + '.svg'} alt="integration" />
|
||||
{integration.icon.length ? <img className="h-12 w-12" src={'/assets/' + integration.icon + '.svg'} alt="integration" /> : (
|
||||
<span style={{ fontSize: '3rem', lineHeight: '3rem' }}>{integration.header}</span>
|
||||
)}
|
||||
<div className="text-center mt-2">
|
||||
<h4 className="">{integration.title}</h4>
|
||||
{/* <p className="text-sm color-gray-medium m-0 p-0 h-3">{integration.subtitle && integration.subtitle}</p> */}
|
||||
|
|
|
|||
|
|
@ -29,6 +29,8 @@ import AssistDoc from './AssistDoc';
|
|||
import { PageTitle, Loader } from 'UI';
|
||||
import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG';
|
||||
import withPageTitle from 'HOCs/withPageTitle';
|
||||
import PiniaDoc from './PiniaDoc'
|
||||
import ZustandDoc from './ZustandDoc'
|
||||
|
||||
interface Props {
|
||||
fetch: (name: string, siteId: string) => void;
|
||||
|
|
@ -162,6 +164,7 @@ const integrations = [
|
|||
integrations: [
|
||||
{ title: 'Redux', slug: '', icon: 'integrations/redux', component: <ReduxDoc /> },
|
||||
{ title: 'VueX', slug: '', icon: 'integrations/vuejs', component: <VueDoc /> },
|
||||
{ title: 'Pinia', slug: '', icon: 'integrations/pinia', component: <PiniaDoc /> },
|
||||
{ title: 'GraphQL', slug: '', icon: 'integrations/graphql', component: <GraphQLDoc /> },
|
||||
{ title: 'NgRx', slug: '', icon: 'integrations/ngrx', component: <NgRxDoc /> },
|
||||
{ title: 'MobX', slug: '', icon: 'integrations/mobx', component: <MobxDoc /> },
|
||||
|
|
@ -169,6 +172,7 @@ const integrations = [
|
|||
{ title: 'Profiler', slug: '', icon: 'integrations/openreplay', component: <ProfilerDoc /> },
|
||||
{ title: 'Axios', slug: '', icon: 'integrations/openreplay', component: <AxiosDoc /> },
|
||||
{ title: 'Assist', slug: '', icon: 'integrations/openreplay', component: <AssistDoc /> },
|
||||
{ title: 'Zustand', slug: '', icon: '', header: '🐻', component: <ZustandDoc /> }
|
||||
],
|
||||
},
|
||||
];
|
||||
|
|
|
|||
|
|
@ -0,0 +1,102 @@
|
|||
import React from 'react';
|
||||
import Highlight from 'react-highlight';
|
||||
import ToggleContent from '../../../shared/ToggleContent';
|
||||
import DocLink from 'Shared/DocLink/DocLink';
|
||||
import { connect } from 'react-redux';
|
||||
|
||||
const PiniaDoc = (props) => {
|
||||
const { projectKey } = props;
|
||||
return (
|
||||
<div className="bg-white h-screen overflow-y-auto" style={{ width: '500px' }}>
|
||||
<h3 className="p-5 text-2xl">VueX</h3>
|
||||
<div className="p-5">
|
||||
<div>
|
||||
This plugin allows you to capture Pinia mutations + state and inspect them later on while
|
||||
replaying session recordings. This is very useful for understanding and fixing issues.
|
||||
</div>
|
||||
|
||||
<div className="font-bold my-2 text-lg">Installation</div>
|
||||
<Highlight className="js">{`npm i @openreplay/tracker-vuex --save`}</Highlight>
|
||||
|
||||
<div className="font-bold my-2 text-lg">Usage</div>
|
||||
<p>
|
||||
Initialize the @openreplay/tracker package as usual and load the plugin into it. Then put
|
||||
the generated plugin into your plugins field of your store.
|
||||
</p>
|
||||
<div className="py-3" />
|
||||
|
||||
<ToggleContent
|
||||
label="Server-Side-Rendered (SSR)?"
|
||||
first={
|
||||
<Highlight className="js">
|
||||
{`import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker';
|
||||
import trackerVuex from '@openreplay/tracker-vuex';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
tracker.start();
|
||||
//...
|
||||
const examplePiniaStore = useExamplePiniaStore()
|
||||
// check list of available options below
|
||||
const vuexPlugin = tracker.use(trackerVuex(<options>))
|
||||
// add a name to your store, optional
|
||||
//(will be randomly generated otherwise)
|
||||
const piniaStorePlugin = vuexPlugin('STORE NAME')
|
||||
|
||||
// start tracking state updates
|
||||
piniaStorePlugin(examplePiniaStore)
|
||||
// now you can use examplePiniaStore as
|
||||
// usual pinia store
|
||||
// (destructure values or return it as a whole etc)
|
||||
`}
|
||||
</Highlight>
|
||||
}
|
||||
second={
|
||||
<Highlight className="js">
|
||||
{`import Vuex from 'vuex'
|
||||
import OpenReplay from '@openreplay/tracker/cjs';
|
||||
import trackerVuex from '@openreplay/tracker-vuex/cjs';
|
||||
//...
|
||||
const tracker = new OpenReplay({
|
||||
projectKey: '${projectKey}'
|
||||
});
|
||||
//...
|
||||
|
||||
// start tracker when the app is mounted
|
||||
tracker.start();
|
||||
|
||||
//...
|
||||
const examplePiniaStore = useExamplePiniaStore()
|
||||
// check list of available options below
|
||||
const vuexPlugin = tracker.use(trackerVuex(<options>))
|
||||
// add a name to your store, optional
|
||||
// (will be randomly generated otherwise)
|
||||
const piniaStorePlugin = vuexPlugin('STORE NAME')
|
||||
|
||||
// start tracking state updates
|
||||
piniaStorePlugin(examplePiniaStore)
|
||||
// now you can use examplePiniaStore as
|
||||
// usual pinia store
|
||||
// (destructure values or return it as a whole etc)
|
||||
}`}
|
||||
</Highlight>
|
||||
}
|
||||
/>
|
||||
|
||||
<DocLink
|
||||
className="mt-4"
|
||||
label="Integrate Vuex"
|
||||
url="https://docs.openreplay.com/plugins/pinia"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
PiniaDoc.displayName = 'PiniaDoc';
|
||||
|
||||
export default connect((state) => ({
|
||||
projectKey: state.getIn(['site', 'instance', 'projectKey']),
|
||||
}))(PiniaDoc);
|
||||
|
|
@ -0,0 +1 @@
|
|||
export { default } from './PiniaDoc'
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
import React from 'react';
|
||||
import Highlight from 'react-highlight';
|
||||
import ToggleContent from '../../../shared/ToggleContent';
|
||||
import DocLink from 'Shared/DocLink/DocLink';
|
||||
import { connect } from 'react-redux';
|
||||
|
||||
const ZustandDoc = (props) => {
|
||||
const { projectKey } = props;
|
||||
return (
|
||||
<div className="bg-white h-screen overflow-y-auto" style={{ width: '500px' }}>
|
||||
<h3 className="p-5 text-2xl">Zustand</h3>
|
||||
<div className="p-5">
|
||||
<div>
|
||||
This plugin allows you to capture Zustand mutations/state and inspect them later on while replaying session recordings. This is very
|
||||
useful for understanding and fixing issues.
|
||||
</div>
|
||||
|
||||
<div className="font-bold my-2 text-lg">Installation</div>
|
||||
<Highlight className="js">{`npm i @openreplay/tracker-zustand --save`}</Highlight>
|
||||
|
||||
<div className="font-bold my-2 text-lg">Usage</div>
|
||||
<p>
|
||||
Initialize the @openreplay/tracker package as usual and load the plugin into it. Then put the generated plugin into your plugins
|
||||
field of your store.
|
||||
</p>
|
||||
<div className="py-3" />
|
||||
|
||||
<ToggleContent
|
||||
label="Server-Side-Rendered (SSR)?"
|
||||
first={
|
||||
<Highlight className="js">
|
||||
{`import create from "zustand";
|
||||
import Tracker from '@openreplay/tracker';
|
||||
import trackerZustand from '@openreplay/tracker-zustand';
|
||||
|
||||
|
||||
const tracker = new Tracker({
|
||||
projectKey: ${projectKey},
|
||||
});
|
||||
|
||||
const zustandPlugin = tracker.use(trackerZustand())
|
||||
// store name, optional
|
||||
// randomly generated if undefined
|
||||
const bearStoreLogger = zustandPlugin('bear_store')
|
||||
|
||||
|
||||
const useBearStore = create(
|
||||
bearStoreLogger((set: any) => ({
|
||||
bears: 0,
|
||||
increasePopulation: () => set((state: any) => ({ bears: state.bears + 1 })),
|
||||
removeAllBears: () => set({ bears: 0 }),
|
||||
}))
|
||||
)`}
|
||||
</Highlight>
|
||||
}
|
||||
second={
|
||||
<Highlight className="js">
|
||||
{`import create from "zustand";
|
||||
import Tracker from '@openreplay/tracker/cjs';
|
||||
import trackerZustand from '@openreplay/tracker-zustand/cjs';
|
||||
|
||||
|
||||
const tracker = new Tracker({
|
||||
projectKey: ${projectKey},
|
||||
});
|
||||
|
||||
const zustandPlugin = tracker.use(trackerZustand())
|
||||
// store name, optional
|
||||
// randomly generated if undefined
|
||||
const bearStoreLogger = zustandPlugin('bear_store')
|
||||
|
||||
|
||||
const useBearStore = create(
|
||||
bearStoreLogger((set: any) => ({
|
||||
bears: 0,
|
||||
increasePopulation: () => set((state: any) => ({ bears: state.bears + 1 })),
|
||||
removeAllBears: () => set({ bears: 0 }),
|
||||
}))
|
||||
)`}
|
||||
</Highlight>
|
||||
}
|
||||
/>
|
||||
|
||||
<DocLink className="mt-4" label="Integrate Vuex" url="https://docs.openreplay.com/plugins/zustand" />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
ZustandDoc.displayName = 'ZustandDoc';
|
||||
|
||||
export default connect((state) => ({ projectKey: state.getIn(['site', 'instance', 'projectKey'])}) )(ZustandDoc)
|
||||
|
|
@ -0,0 +1 @@
|
|||
export { default } from './ZustandDoc'
|
||||
|
|
@ -7,112 +7,126 @@ import { CLIENT_TABS, client as clientRoute } from 'App/routes';
|
|||
import { withRouter } from 'react-router-dom';
|
||||
|
||||
function PreferencesMenu({ account, activeTab, history, isEnterprise }) {
|
||||
const isAdmin = account.admin || account.superAdmin;
|
||||
const setTab = (tab) => {
|
||||
history.push(clientRoute(tab));
|
||||
};
|
||||
const isAdmin = account.admin || account.superAdmin;
|
||||
const setTab = (tab) => {
|
||||
history.push(clientRoute(tab));
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={cn(stl.wrapper, 'h-full overflow-y-auto pb-24')}>
|
||||
<div className={cn(stl.header, 'flex items-end')}>
|
||||
<div className={stl.label}>
|
||||
<span>Preferences</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.PROFILE}
|
||||
title="Account"
|
||||
iconName="user-circle"
|
||||
onClick={() => setTab(CLIENT_TABS.PROFILE)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.INTEGRATIONS}
|
||||
title="Integrations"
|
||||
iconName="puzzle-piece"
|
||||
onClick={() => setTab(CLIENT_TABS.INTEGRATIONS)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
iconName="tags"
|
||||
active={activeTab === CLIENT_TABS.CUSTOM_FIELDS}
|
||||
onClick={() => setTab(CLIENT_TABS.CUSTOM_FIELDS)}
|
||||
title="Metadata"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.WEBHOOKS}
|
||||
title="Webhooks"
|
||||
iconName="anchor"
|
||||
onClick={() => setTab(CLIENT_TABS.WEBHOOKS)}
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.SITES}
|
||||
title="Projects"
|
||||
iconName="window-restore"
|
||||
onClick={() => setTab(CLIENT_TABS.SITES)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{isEnterprise && isAdmin && (
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.MANAGE_ROLES}
|
||||
title="Roles & Access"
|
||||
iconName="diagram-3"
|
||||
onClick={() => setTab(CLIENT_TABS.MANAGE_ROLES)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isEnterprise && isAdmin && (
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.AUDIT}
|
||||
title="Audit"
|
||||
iconName="list-ul"
|
||||
onClick={() => setTab(CLIENT_TABS.AUDIT)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isAdmin && (
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.MANAGE_USERS}
|
||||
title="Team"
|
||||
iconName="users"
|
||||
onClick={() => setTab(CLIENT_TABS.MANAGE_USERS)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.NOTIFICATIONS}
|
||||
title="Notifications"
|
||||
iconName="bell"
|
||||
onClick={() => setTab(CLIENT_TABS.NOTIFICATIONS)}
|
||||
/>
|
||||
</div>
|
||||
return (
|
||||
<div className={cn(stl.wrapper, 'h-full overflow-y-auto pb-24')}>
|
||||
<div className={cn(stl.header, 'flex items-end')}>
|
||||
<div className={stl.label}>
|
||||
<span>Preferences</span>
|
||||
</div>
|
||||
);
|
||||
</div>
|
||||
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.PROFILE}
|
||||
title="Account"
|
||||
iconName="user-circle"
|
||||
onClick={() => setTab(CLIENT_TABS.PROFILE)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.INTEGRATIONS}
|
||||
title="Integrations"
|
||||
iconName="puzzle-piece"
|
||||
onClick={() => setTab(CLIENT_TABS.INTEGRATIONS)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
iconName="tags"
|
||||
active={activeTab === CLIENT_TABS.CUSTOM_FIELDS}
|
||||
onClick={() => setTab(CLIENT_TABS.CUSTOM_FIELDS)}
|
||||
title="Metadata"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.WEBHOOKS}
|
||||
title="Webhooks"
|
||||
iconName="anchor"
|
||||
onClick={() => setTab(CLIENT_TABS.WEBHOOKS)}
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.SITES}
|
||||
title="Projects"
|
||||
iconName="window-restore"
|
||||
onClick={() => setTab(CLIENT_TABS.SITES)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{isEnterprise && isAdmin && (
|
||||
<div className="mb-2 relative">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.MANAGE_ROLES}
|
||||
title="Roles & Access"
|
||||
iconName="diagram-3"
|
||||
onClick={() => setTab(CLIENT_TABS.MANAGE_ROLES)}
|
||||
leading={<AdminOnlyBadge />}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isEnterprise && isAdmin && (
|
||||
<div className="mb-2 relative">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.AUDIT}
|
||||
title="Audit"
|
||||
iconName="list-ul"
|
||||
onClick={() => setTab(CLIENT_TABS.AUDIT)}
|
||||
leading={<AdminOnlyBadge />}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isAdmin && (
|
||||
<div className="mb-2 relative">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.MANAGE_USERS}
|
||||
title="Team"
|
||||
iconName="users"
|
||||
onClick={() => setTab(CLIENT_TABS.MANAGE_USERS)}
|
||||
leading={<AdminOnlyBadge />}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="mb-2">
|
||||
<SideMenuitem
|
||||
active={activeTab === CLIENT_TABS.NOTIFICATIONS}
|
||||
title="Notifications"
|
||||
iconName="bell"
|
||||
onClick={() => setTab(CLIENT_TABS.NOTIFICATIONS)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default connect((state) => ({
|
||||
isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
|
||||
account: state.getIn(['user', 'account']),
|
||||
isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
|
||||
account: state.getIn(['user', 'account']),
|
||||
}))(withRouter(PreferencesMenu));
|
||||
|
||||
function AdminOnlyBadge() {
|
||||
return (
|
||||
<div
|
||||
className="ml-1 rounded-full bg-gray-light text-xs flex items-center px-2 color-gray-medium"
|
||||
style={{ marginTop: '', height: '20px', whiteSpace: 'nowrap' }}
|
||||
>
|
||||
Admin Only
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
.wrapper {
|
||||
position: fixed;
|
||||
top: 81px;
|
||||
width: 200px;
|
||||
width: 210px;
|
||||
}
|
||||
|
||||
.header {
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ function Roles(props: Props) {
|
|||
|
||||
useEffect(() => {
|
||||
if (removeErrors && removeErrors.size > 0) {
|
||||
removeErrors.forEach((e) => {
|
||||
removeErrors.forEach((e: any) => {
|
||||
toast.error(e);
|
||||
});
|
||||
}
|
||||
|
|
@ -47,21 +47,20 @@ function Roles(props: Props) {
|
|||
};
|
||||
}, [removeErrors]);
|
||||
|
||||
const closeModal = (showToastMessage) => {
|
||||
if (showToastMessage) {
|
||||
toast.success(showToastMessage);
|
||||
props.fetchList();
|
||||
}
|
||||
setShowmModal(false);
|
||||
setTimeout(() => {
|
||||
init();
|
||||
}, 100);
|
||||
};
|
||||
// const closeModal = (showToastMessage: boolean) => {
|
||||
// if (showToastMessage) {
|
||||
// toast.success(showToastMessage);
|
||||
// props.fetchList();
|
||||
// }
|
||||
// // setShowmModal(false);
|
||||
// setTimeout(() => {
|
||||
// init();
|
||||
// }, 100);
|
||||
// };
|
||||
|
||||
const editHandler = (role: any) => {
|
||||
init(role);
|
||||
showModal(<RoleForm closeModal={hideModal} permissionsMap={permissionsMap} deleteHandler={deleteHandler} />, { right: true });
|
||||
// setShowmModal(true);
|
||||
};
|
||||
|
||||
const deleteHandler = async (role: any) => {
|
||||
|
|
@ -71,7 +70,7 @@ function Roles(props: Props) {
|
|||
confirmation: `Are you sure you want to remove this role?`,
|
||||
})
|
||||
) {
|
||||
deleteRole(role.roleId);
|
||||
deleteRole(role.roleId).then(hideModal);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -83,7 +82,7 @@ function Roles(props: Props) {
|
|||
<div className="flex items-center mr-auto px-5 pt-5">
|
||||
<h3 className={cn(stl.tabTitle, 'text-2xl')}>Roles and Access</h3>
|
||||
<Popup content="You don’t have the permissions to perform this action." disabled={isAdmin}>
|
||||
<Button variant="primary" onClick={() => setShowmModal(true)}>Add</Button>
|
||||
<Button variant="primary" onClick={() => editHandler({})}>Add</Button>
|
||||
</Popup>
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -123,7 +122,7 @@ function Roles(props: Props) {
|
|||
export default connect(
|
||||
(state: any) => {
|
||||
const permissions = state.getIn(['roles', 'permissions']);
|
||||
const permissionsMap = {};
|
||||
const permissionsMap: any = {};
|
||||
permissions.forEach((p: any) => {
|
||||
permissionsMap[p.value] = p.text;
|
||||
});
|
||||
|
|
|
|||
|
|
@ -7,6 +7,9 @@ import { setSiteId } from 'Duck/site';
|
|||
import { withRouter } from 'react-router-dom';
|
||||
import styles from './siteForm.module.css';
|
||||
import { confirm } from 'UI';
|
||||
import { clearSearch } from 'Duck/search';
|
||||
import { clearSearch as clearSearchLive } from 'Duck/liveSearch';
|
||||
import { withStore } from 'App/mstore';
|
||||
|
||||
@connect(
|
||||
(state) => ({
|
||||
|
|
@ -23,13 +26,17 @@ import { confirm } from 'UI';
|
|||
pushNewSite,
|
||||
fetchList,
|
||||
setSiteId,
|
||||
clearSearch,
|
||||
clearSearchLive,
|
||||
}
|
||||
)
|
||||
@withRouter
|
||||
@withStore
|
||||
export default class NewSiteForm extends React.PureComponent {
|
||||
state = {
|
||||
existsError: false,
|
||||
};
|
||||
|
||||
|
||||
componentDidMount() {
|
||||
const {
|
||||
|
|
@ -60,16 +67,10 @@ export default class NewSiteForm extends React.PureComponent {
|
|||
});
|
||||
} else {
|
||||
this.props.save(this.props.site).then(() => {
|
||||
this.props.fetchList().then(() => {
|
||||
const { sites } = this.props;
|
||||
const site = sites.last();
|
||||
if (!pathname.includes('/client')) {
|
||||
this.props.setSiteId(site.get('id'));
|
||||
}
|
||||
this.props.onClose(null, site);
|
||||
});
|
||||
|
||||
// this.props.pushNewSite(site)
|
||||
this.props.onClose(null);
|
||||
this.props.clearSearch();
|
||||
this.props.clearSearchLive();
|
||||
this.props.mstore.initClient();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -27,11 +27,15 @@ function UserListItem(props: Props) {
|
|||
<div className="grid grid-cols-12 py-4 px-5 border-t items-center select-none hover:bg-active-blue group cursor-pointer" onClick={editHandler}>
|
||||
<div className="col-span-5">
|
||||
<span className="mr-2">{user.name}</span>
|
||||
{isEnterprise && <AdminPrivilegeLabel user={user} />}
|
||||
{/* {isEnterprise && <AdminPrivilegeLabel user={user} />} */}
|
||||
</div>
|
||||
<div className="col-span-3">
|
||||
{!isEnterprise && <AdminPrivilegeLabel user={user} />}
|
||||
{isEnterprise && <span className="px-2 py-1 bg-gray-lightest rounded border text-sm capitalize">{user.roleName}</span>}
|
||||
{isEnterprise && (
|
||||
<>
|
||||
<span className="px-2 py-1 bg-gray-lightest rounded border text-sm capitalize">{user.roleName}</span>
|
||||
{ user.isSuperAdmin || user.isAdmin && <><span className="ml-2" /><AdminPrivilegeLabel user={user} /></> }
|
||||
</>)}
|
||||
</div>
|
||||
{!isOnboarding && (
|
||||
<div className="col-span-2">
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@
|
|||
/* min-height: calc(100vh - 81px); */
|
||||
|
||||
& .tabMenu {
|
||||
width: 240px;
|
||||
width: 250px;
|
||||
margin: 0;
|
||||
background-color: $gray-lightest;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import MethodType from './MethodType';
|
|||
import cn from 'classnames';
|
||||
import stl from './callWithErrors.module.css';
|
||||
import { NO_METRIC_DATA } from 'App/constants/messages'
|
||||
import { List } from 'immutable';
|
||||
|
||||
const cols = [
|
||||
{
|
||||
|
|
@ -51,10 +52,10 @@ interface Props {
|
|||
function CallWithErrors(props: Props) {
|
||||
const { data, metric } = props;
|
||||
const [search, setSearch] = React.useState('')
|
||||
const test = (value = '', serach) => getRE(serach, 'i').test(value);
|
||||
const _data = search ? metric.data.chart.filter(i => test(i.urlHostpath, search)) : metric.data.chart.images;
|
||||
const test = (value = '', serach: any) => getRE(serach, 'i').test(value);
|
||||
const _data = search ? metric.data.chart.filter((i: any) => test(i.urlHostpath, search)) : metric.data.chart;
|
||||
|
||||
const write = ({ target: { name, value } }) => {
|
||||
const write = ({ target: { name, value } }: any) => {
|
||||
setSearch(value)
|
||||
};
|
||||
|
||||
|
|
@ -71,8 +72,9 @@ function CallWithErrors(props: Props) {
|
|||
<input disabled={metric.data.chart.length === 0} className={stl.searchField} name="search" placeholder="Filter by Path" onChange={write} />
|
||||
</div>
|
||||
<Table
|
||||
small
|
||||
cols={ cols }
|
||||
rows={ _data }
|
||||
rows={ List(_data) }
|
||||
isTemplate={props.isTemplate}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -47,6 +47,8 @@ function MissingResources(props: Props) {
|
|||
if (!isTemplate) {
|
||||
cols.push(copyPathCol);
|
||||
}
|
||||
|
||||
console.log('metric.data.chart', metric.data.chart);
|
||||
|
||||
return (
|
||||
<NoContent
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ export default class ResourceInfo extends React.PureComponent {
|
|||
const { data } = this.props;
|
||||
return (
|
||||
<div className="flex flex-col" >
|
||||
<TextEllipsis className={ styles.name } text={ data.name } hintText={ data.url } />
|
||||
<TextEllipsis className={ styles.name } text={ data.url } hintText={ data.url } />
|
||||
<div className={ styles.timings }>
|
||||
{ data.endedAt && data.startedAt && `${ diffFromNowString(data.endedAt) } ago - ${ diffFromNowString(data.startedAt) } old` }
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@
|
|||
.row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
min-height: 54px;
|
||||
min-height: 50px;
|
||||
font-size: 13px;
|
||||
|
||||
& .cell {
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ import SelectDateRange from 'Shared/SelectDateRange';
|
|||
import { Tooltip } from 'react-tippy';
|
||||
import Breadcrumb from 'Shared/Breadcrumb';
|
||||
import AddMetricContainer from '../DashboardWidgetGrid/AddMetricContainer';
|
||||
import OutsideClickDetectingDiv from 'Shared/OutsideClickDetectingDiv';
|
||||
|
||||
interface IProps {
|
||||
siteId: string;
|
||||
|
|
@ -32,6 +33,7 @@ function DashboardView(props: Props) {
|
|||
const { dashboardStore } = useStore();
|
||||
const { showModal } = useModal();
|
||||
|
||||
const [showTooltip, setShowTooltip] = React.useState(false);
|
||||
const [focusTitle, setFocusedInput] = React.useState(true);
|
||||
const [showEditModal, setShowEditModal] = React.useState(false);
|
||||
|
||||
|
|
@ -125,18 +127,24 @@ function DashboardView(props: Props) {
|
|||
className="mr-3 select-none border-b border-b-borderColor-transparent hover:border-dotted hover:border-gray-medium cursor-pointer"
|
||||
actionButton={
|
||||
/* @ts-ignore */
|
||||
<Tooltip
|
||||
<Tooltip
|
||||
open={showTooltip}
|
||||
interactive
|
||||
useContext
|
||||
// @ts-ignore
|
||||
theme="nopadding"
|
||||
animation="none"
|
||||
hideDelay={200}
|
||||
hideDelay={0}
|
||||
duration={0}
|
||||
distance={20}
|
||||
html={<div style={{ padding: 0 }}><AddMetricContainer isPopup siteId={siteId} /></div>}
|
||||
html={
|
||||
<div style={{ padding: 0 }}>
|
||||
<OutsideClickDetectingDiv onClickOutside={() => setShowTooltip(false)}>
|
||||
<AddMetricContainer onAction={() => setShowTooltip(false)} isPopup siteId={siteId} />
|
||||
</OutsideClickDetectingDiv>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<Button variant="primary">
|
||||
<Button variant="primary" onClick={() => setShowTooltip(true)}>
|
||||
Add Metric
|
||||
</Button>
|
||||
</Tooltip>
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import AddPredefinedMetric from './AddPredefinedMetric';
|
|||
import cn from 'classnames';
|
||||
|
||||
interface AddMetricButtonProps {
|
||||
iconName: string;
|
||||
iconName: "bar-pencil" | "grid-check";
|
||||
title: string;
|
||||
description: string;
|
||||
isPremade?: boolean;
|
||||
|
|
@ -47,11 +47,18 @@ function AddMetricButton({ iconName, title, description, onClick, isPremade, isP
|
|||
);
|
||||
}
|
||||
|
||||
function AddMetricContainer({ siteId, isPopup }: any) {
|
||||
interface Props {
|
||||
siteId: string
|
||||
isPopup?: boolean
|
||||
onAction?: () => void
|
||||
}
|
||||
|
||||
function AddMetricContainer({ siteId, isPopup, onAction }: Props) {
|
||||
const { showModal } = useModal();
|
||||
const { dashboardStore } = useStore();
|
||||
|
||||
const onAddCustomMetrics = () => {
|
||||
onAction?.()
|
||||
dashboardStore.initDashboard(dashboardStore.selectedDashboard);
|
||||
showModal(
|
||||
<AddMetric
|
||||
|
|
@ -64,6 +71,7 @@ function AddMetricContainer({ siteId, isPopup }: any) {
|
|||
};
|
||||
|
||||
const onAddPredefinedMetrics = () => {
|
||||
onAction?.()
|
||||
dashboardStore.initDashboard(dashboardStore.selectedDashboard);
|
||||
showModal(
|
||||
<AddPredefinedMetric
|
||||
|
|
|
|||
|
|
@ -74,40 +74,44 @@ function AddPredefinedMetric({ history, siteId, title, description }: IProps) {
|
|||
>
|
||||
<div className="mb-6 pt-8 px-8 flex items-start justify-between">
|
||||
<div className="flex flex-col">
|
||||
<h1 className="text-2xl">{title}</h1>
|
||||
<h1 className="text-2xl" style={{ marginBottom: '7px' }}>{title}</h1>
|
||||
<div className="text-disabled-text">{description}</div>
|
||||
</div>
|
||||
|
||||
<Button variant="text-primary" className="font-medium ml-2" onClick={onCreateNew}>
|
||||
+ Create Custom Metric
|
||||
</Button>
|
||||
<div className="flex flex-col items-end">
|
||||
<Button variant="text-primary" className="font-medium ml-2" onClick={onCreateNew}>
|
||||
+ Create Custom Metric
|
||||
</Button>
|
||||
<div className="text-disabled-text">Past 7 Days</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex px-8 h-full" style={{ maxHeight: 'calc(100vh - 160px)' }}>
|
||||
<div style={{ flex: 3 }}>
|
||||
<div
|
||||
className="grid grid-cols-1 gap-4 py-1 pr-2"
|
||||
style={{
|
||||
maxHeight: 'calc(100vh - 160px)',
|
||||
overflowY: 'auto',
|
||||
gridAutoRows: 'max-content',
|
||||
}}
|
||||
>
|
||||
{activeCategory &&
|
||||
categories.map((category) => (
|
||||
<React.Fragment key={category.name}>
|
||||
<WidgetCategoryItem
|
||||
key={category.name}
|
||||
onClick={handleWidgetCategoryClick}
|
||||
category={category}
|
||||
isSelected={activeCategory.name === category.name}
|
||||
selectedWidgetIds={selectedWidgetIds}
|
||||
/>
|
||||
</React.Fragment>
|
||||
))}
|
||||
<Loader loading={dashboardStore.loadingTemplates}>
|
||||
<div className="flex px-8 h-full" style={{ maxHeight: 'calc(100vh - 160px)' }}>
|
||||
<div style={{ flex: 3 }}>
|
||||
<div
|
||||
className="grid grid-cols-1 gap-4 py-1 pr-2"
|
||||
style={{
|
||||
maxHeight: 'calc(100vh - 160px)',
|
||||
overflowY: 'auto',
|
||||
gridAutoRows: 'max-content',
|
||||
}}
|
||||
>
|
||||
{activeCategory &&
|
||||
categories.map((category) => (
|
||||
<React.Fragment key={category.name}>
|
||||
<WidgetCategoryItem
|
||||
key={category.name}
|
||||
onClick={handleWidgetCategoryClick}
|
||||
category={category}
|
||||
isSelected={activeCategory.name === category.name}
|
||||
selectedWidgetIds={selectedWidgetIds}
|
||||
/>
|
||||
</React.Fragment>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<Loader loading={dashboardStore.loadingTemplates}>
|
||||
|
||||
<div
|
||||
className="grid h-full grid-cols-4 gap-4 p-1 items-start"
|
||||
style={{
|
||||
|
|
@ -131,8 +135,8 @@ function AddPredefinedMetric({ history, siteId, title, description }: IProps) {
|
|||
</React.Fragment>
|
||||
))}
|
||||
</div>
|
||||
</Loader>
|
||||
</div>
|
||||
</div>
|
||||
</Loader>
|
||||
|
||||
<div className="py-4 border-t px-8 bg-white w-full flex items-center justify-between">
|
||||
<div>
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
import React from 'react';
|
||||
import { toJS } from 'mobx'
|
||||
import { useStore } from 'App/mstore';
|
||||
import WidgetWrapper from '../WidgetWrapper';
|
||||
import { NoContent, Loader } from 'UI';
|
||||
import { NoContent, Loader, Icon } from 'UI';
|
||||
import { useObserver } from 'mobx-react-lite';
|
||||
import AddMetricContainer from './AddMetricContainer'
|
||||
import Widget from 'App/mstore/types/widget';
|
||||
|
||||
interface Props {
|
||||
siteId: string,
|
||||
|
|
@ -15,8 +17,20 @@ function DashboardWidgetGrid(props: Props) {
|
|||
const { dashboardId, siteId } = props;
|
||||
const { dashboardStore } = useStore();
|
||||
const loading = useObserver(() => dashboardStore.isLoading);
|
||||
const dashboard: any = dashboardStore.selectedDashboard;
|
||||
const list: any = useObserver(() => dashboard?.widgets);
|
||||
const dashboard = dashboardStore.selectedDashboard;
|
||||
const list = useObserver(() => dashboard?.widgets);
|
||||
const smallWidgets: Widget[] = []
|
||||
const regularWidgets: Widget[] = []
|
||||
|
||||
list.forEach(item => {
|
||||
if (item.config.col === 1) {
|
||||
smallWidgets.push(item)
|
||||
} else {
|
||||
regularWidgets.push(item)
|
||||
}
|
||||
})
|
||||
|
||||
const smallWidgetsLen = smallWidgets.length
|
||||
|
||||
return useObserver(() => (
|
||||
// @ts-ignore
|
||||
|
|
@ -29,17 +43,49 @@ function DashboardWidgetGrid(props: Props) {
|
|||
<div className="w-4/5 m-auto mt-4"><AddMetricContainer siteId={siteId} /></div>
|
||||
}
|
||||
>
|
||||
{smallWidgets.length > 0 ? (
|
||||
<>
|
||||
<div className="font-semibold text-xl py-4 flex items-center gap-2">
|
||||
<Icon name="grid-horizontal" size={26} />
|
||||
Web Vitals
|
||||
</div>
|
||||
<div className="grid gap-4 grid-cols-4 items-start pb-10" id={props.id}>
|
||||
{smallWidgets && smallWidgets.map((item: any, index: any) => (
|
||||
<React.Fragment key={item.widgetId}>
|
||||
<WidgetWrapper
|
||||
index={index}
|
||||
widget={item}
|
||||
moveListItem={(dragIndex: any, hoverIndex: any) => dashboard.swapWidgetPosition(dragIndex, hoverIndex)}
|
||||
dashboardId={dashboardId}
|
||||
siteId={siteId}
|
||||
isWidget={true}
|
||||
grid="vitals"
|
||||
/>
|
||||
</React.Fragment>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
) : null}
|
||||
|
||||
{smallWidgets.length > 0 && regularWidgets.length > 0 ? (
|
||||
<div className="font-semibold text-xl py-4 flex items-center gap-2">
|
||||
<Icon name="grid-horizontal" size={26} />
|
||||
All Metrics
|
||||
</div>
|
||||
) : null}
|
||||
<div className="grid gap-4 grid-cols-4 items-start pb-10" id={props.id}>
|
||||
{list && list.map((item: any, index: any) => (
|
||||
<WidgetWrapper
|
||||
index={index}
|
||||
widget={item}
|
||||
key={item.widgetId}
|
||||
moveListItem={(dragIndex: any, hoverIndex: any) => dashboard.swapWidgetPosition(dragIndex, hoverIndex)}
|
||||
dashboardId={dashboardId}
|
||||
siteId={siteId}
|
||||
isWidget={true}
|
||||
/>
|
||||
{regularWidgets && regularWidgets.map((item: any, index: any) => (
|
||||
<React.Fragment key={item.widgetId}>
|
||||
<WidgetWrapper
|
||||
index={smallWidgetsLen + index}
|
||||
widget={item}
|
||||
moveListItem={(dragIndex: any, hoverIndex: any) => dashboard.swapWidgetPosition(dragIndex, hoverIndex)}
|
||||
dashboardId={dashboardId}
|
||||
siteId={siteId}
|
||||
isWidget={true}
|
||||
grid="other"
|
||||
/>
|
||||
</React.Fragment>
|
||||
))}
|
||||
<div className="col-span-2"><AddMetricContainer siteId={siteId} /></div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ function ErrorDetailsModal(props: Props) {
|
|||
style={{ width: '85vw', maxWidth: '1200px' }}
|
||||
className="bg-white h-screen p-4 overflow-y-auto"
|
||||
>
|
||||
<ErrorInfo errorId={props.errorId} list={[]} />
|
||||
<ErrorInfo errorId={props.errorId} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue