Merge remote-tracking branch 'origin/api-v1.12.0' into dev

# Conflicts:
#	api/chalicelib/core/health.py
#	api/chalicelib/core/jobs.py
#	api/chalicelib/core/sessions_devtool.py
#	api/chalicelib/core/sessions_mobs.py
#	api/chalicelib/utils/s3.py
#	api/routers/crons/core_dynamic_crons.py
#	ee/api/chalicelib/core/assist_records.py
#	ee/api/chalicelib/core/health.py
#	ee/api/chalicelib/core/sessions_devtool.py
#	ee/api/chalicelib/core/traces.py
#	ee/api/requirements-crons.txt
#	ee/api/routers/crons/core_dynamic_crons.py
#	ee/api/schemas_ee.py
This commit is contained in:
Taha Yassine Kraiem 2023-05-02 15:37:20 +02:00
commit 1bdfc2ecfc
70 changed files with 1006 additions and 473 deletions

View file

@ -1,6 +1,11 @@
# This action will push the assist changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false'
required: false
default: 'false'
push:
branches:
- dev

View file

@ -1,6 +1,11 @@
# This action will push the assist changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false'
required: false
default: 'false'
push:
branches:
- dev

View file

@ -11,8 +11,7 @@ from starlette.responses import StreamingResponse
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from routers import core, core_dynamic
from routers.crons import core_crons
from routers.crons import core_dynamic_crons
from crons import core_crons, core_dynamic_crons
from routers.subs import insights, metrics, v1_api, health
loglevel = config("LOGLEVEL", default=logging.INFO)

View file

@ -56,7 +56,7 @@ def __get_live_sessions_ws(project_id, data):
results = requests.post(ASSIST_URL + config("assist") + f"/{project_key}",
json=data, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code}")
print(f"!! issue with the peer-server code:{results.status_code} for __get_live_sessions_ws")
print(results.text)
return {"total": 0, "sessions": []}
live_peers = results.json().get("data", [])
@ -106,7 +106,7 @@ def get_live_session_by_id(project_id, session_id):
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code}")
print(f"!! issue with the peer-server code:{results.status_code} for get_live_session_by_id")
print(results.text)
return None
results = results.json().get("data")
@ -136,7 +136,7 @@ def is_live(project_id, session_id, project_key=None):
results = requests.get(ASSIST_URL + config("assistList") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code}")
print(f"!! issue with the peer-server code:{results.status_code} for is_live")
print(results.text)
return False
results = results.json().get("data")
@ -165,7 +165,7 @@ def autocomplete(project_id, q: str, key: str = None):
ASSIST_URL + config("assistList") + f"/{project_key}/autocomplete",
params=params, timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code}")
print(f"!! issue with the peer-server code:{results.status_code} for autocomplete")
print(results.text)
return {"errors": [f"Something went wrong wile calling assist:{results.text}"]}
results = results.json().get("data", [])
@ -248,7 +248,7 @@ def session_exists(project_id, session_id):
results = requests.get(ASSIST_URL + config("assist") + f"/{project_key}/{session_id}",
timeout=config("assistTimeout", cast=int, default=5))
if results.status_code != 200:
print(f"!! issue with the peer-server code:{results.status_code}")
print(f"!! issue with the peer-server code:{results.status_code} for session_exists")
print(results.text)
return None
results = results.json().get("data")

View file

@ -5,6 +5,7 @@ import requests
from decouple import config
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
def app_connection_string(name, port, path):
@ -33,7 +34,7 @@ HEALTH_ENDPOINTS = {
}
def __check_database_pg():
def __check_database_pg(*_):
fail_response = {
"health": False,
"details": {
@ -64,11 +65,11 @@ def __check_database_pg():
}
def __not_supported():
def __not_supported(*_):
return {"errors": ["not supported"]}
def __always_healthy():
def __always_healthy(*_):
return {
"health": True,
"details": {}
@ -76,7 +77,7 @@ def __always_healthy():
def __check_be_service(service_name):
def fn():
def fn(*_):
fail_response = {
"health": False,
"details": {
@ -112,7 +113,7 @@ def __check_be_service(service_name):
return fn
def __check_redis():
def __check_redis(*_):
fail_response = {
"health": False,
"details": {"errors": ["server health-check failed"]}
@ -139,6 +140,41 @@ def __check_redis():
}
def __check_SSL(*_):
fail_response = {
"health": False,
"details": {
"errors": ["SSL Certificate health-check failed"]
}
}
try:
requests.get(config("SITE_URL"), verify=True, allow_redirects=True)
except Exception as e:
print("!! health failed: SSL Certificate")
print(str(e))
return fail_response
return {
"health": True,
"details": {}
}
def __get_sessions_stats(*_):
with pg_client.PostgresClient() as cur:
constraints = ["projects.deleted_at IS NULL"]
query = cur.mogrify(f"""SELECT COALESCE(SUM(sessions_count),0) AS s_c,
COALESCE(SUM(events_count),0) AS e_c
FROM public.projects_stats
INNER JOIN public.projects USING(project_id)
WHERE {" AND ".join(constraints)};""")
cur.execute(query)
row = cur.fetchone()
return {
"numberOfSessionsCaptured": row["s_c"],
"numberOfEventCaptured": row["e_c"]
}
def get_health():
health_map = {
"databases": {
@ -163,9 +199,121 @@ def get_health():
"sink": __check_be_service("sink"),
"sourcemaps-reader": __check_be_service("sourcemaps-reader"),
"storage": __check_be_service("storage")
}
},
"details": __get_sessions_stats,
"ssl": __check_SSL
}
for parent_key in health_map.keys():
for element_key in health_map[parent_key]:
health_map[parent_key][element_key] = health_map[parent_key][element_key]()
if isinstance(health_map[parent_key], dict):
for element_key in health_map[parent_key]:
health_map[parent_key][element_key] = health_map[parent_key][element_key]()
else:
health_map[parent_key] = health_map[parent_key]()
return health_map
def cron():
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""SELECT projects.project_id,
projects.created_at,
projects.sessions_last_check_at,
projects.first_recorded_session_at,
projects_stats.last_update_at
FROM public.projects
LEFT JOIN public.projects_stats USING (project_id)
WHERE projects.deleted_at IS NULL
ORDER BY project_id;""")
cur.execute(query)
rows = cur.fetchall()
for r in rows:
insert = False
if r["last_update_at"] is None:
# never counted before, must insert
insert = True
if r["first_recorded_session_at"] is None:
if r["sessions_last_check_at"] is None:
count_start_from = r["created_at"]
else:
count_start_from = r["sessions_last_check_at"]
else:
count_start_from = r["first_recorded_session_at"]
else:
# counted before, must update
count_start_from = r["last_update_at"]
count_start_from = TimeUTC.datetime_to_timestamp(count_start_from)
params = {"project_id": r["project_id"],
"start_ts": count_start_from,
"end_ts": TimeUTC.now(),
"sessions_count": 0,
"events_count": 0}
query = cur.mogrify("""SELECT COUNT(1) AS sessions_count,
COALESCE(SUM(events_count),0) AS events_count
FROM public.sessions
WHERE project_id=%(project_id)s
AND start_ts>=%(start_ts)s
AND start_ts<=%(end_ts)s
AND duration IS NOT NULL;""",
params)
cur.execute(query)
row = cur.fetchone()
if row is not None:
params["sessions_count"] = row["sessions_count"]
params["events_count"] = row["events_count"]
if insert:
query = cur.mogrify("""INSERT INTO public.projects_stats(project_id, sessions_count, events_count, last_update_at)
VALUES (%(project_id)s, %(sessions_count)s, %(events_count)s, (now() AT TIME ZONE 'utc'::text));""",
params)
else:
query = cur.mogrify("""UPDATE public.projects_stats
SET sessions_count=sessions_count+%(sessions_count)s,
events_count=events_count+%(events_count)s,
last_update_at=(now() AT TIME ZONE 'utc'::text)
WHERE project_id=%(project_id)s;""",
params)
cur.execute(query)
# this cron is used to correct the sessions&events count every week
def weekly_cron():
with pg_client.PostgresClient(long_query=True) as cur:
query = cur.mogrify("""SELECT project_id,
projects_stats.last_update_at
FROM public.projects
LEFT JOIN public.projects_stats USING (project_id)
WHERE projects.deleted_at IS NULL
ORDER BY project_id;""")
cur.execute(query)
rows = cur.fetchall()
for r in rows:
if r["last_update_at"] is None:
continue
params = {"project_id": r["project_id"],
"end_ts": TimeUTC.now(),
"sessions_count": 0,
"events_count": 0}
query = cur.mogrify("""SELECT COUNT(1) AS sessions_count,
COALESCE(SUM(events_count),0) AS events_count
FROM public.sessions
WHERE project_id=%(project_id)s
AND start_ts<=%(end_ts)s
AND duration IS NOT NULL;""",
params)
cur.execute(query)
row = cur.fetchone()
if row is not None:
params["sessions_count"] = row["sessions_count"]
params["events_count"] = row["events_count"]
query = cur.mogrify("""UPDATE public.projects_stats
SET sessions_count=%(sessions_count)s,
events_count=%(events_count)s,
last_update_at=(now() AT TIME ZONE 'utc'::text)
WHERE project_id=%(project_id)s;""",
params)
cur.execute(query)

View file

@ -128,6 +128,11 @@ def __delete_sessions_by_session_ids(session_ids):
cur.execute(query=query)
def __delete_session_mobs_by_session_ids(session_ids, project_id):
sessions_mobs.delete_mobs(session_ids=session_ids, project_id=project_id)
sessions_devtool.delete_mobs(session_ids=session_ids, project_id=project_id)
def get_scheduled_jobs():
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
@ -151,9 +156,8 @@ def execute_jobs():
user_ids=[job["referenceId"]])
if len(session_ids) > 0:
print(f"Deleting {len(session_ids)} sessions")
__delete_sessions_by_session_ids(session_ids)
sessions_mobs.delete_mobs(session_ids=session_ids, project_id=job["projectId"])
sessions_devtool.delete_mobs(session_ids=session_ids, project_id=job["projectId"])
__delete_sessions_by_session_ids(session_ids=session_ids)
__delete_session_mobs_by_session_ids(session_ids=session_ids, project_id=job["projectId"])
else:
raise Exception(f"The action '{job['action']}' not supported.")

View file

@ -52,38 +52,28 @@ def __create(tenant_id, name):
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False):
stack_integrations = False
def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False):
with pg_client.PostgresClient() as cur:
extra_projection = ""
extra_join = ""
if gdpr:
extra_projection += ',s.gdpr'
if recorded:
extra_projection += """,COALESCE(nullif(EXTRACT(EPOCH FROM s.first_recorded_session_at) * 1000, NULL)::BIGINT,
extra_projection += """,\nCOALESCE(EXTRACT(EPOCH FROM s.first_recorded_session_at) * 1000::BIGINT,
(SELECT MIN(sessions.start_ts)
FROM public.sessions
WHERE sessions.project_id = s.project_id
AND sessions.start_ts >= (EXTRACT(EPOCH FROM
COALESCE(s.sessions_last_check_at, s.created_at)) * 1000-24*60*60*1000)
AND sessions.start_ts >= (EXTRACT(EPOCH
FROM COALESCE(s.sessions_last_check_at, s.created_at)) * 1000-%(check_delta)s)
AND sessions.start_ts <= %(now)s
LIMIT 1), NULL) AS first_recorded"""
if stack_integrations:
extra_projection += ',stack_integrations.count>0 AS stack_integrations'
if stack_integrations:
extra_join = """LEFT JOIN LATERAL (SELECT COUNT(*) AS count
FROM public.integrations
WHERE s.project_id = integrations.project_id
LIMIT 1) AS stack_integrations ON TRUE"""
)) AS first_recorded"""
query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""}
SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at,
created_at {extra_projection}
created_at, sessions_last_check_at {extra_projection}
FROM public.projects AS s
{extra_join}
WHERE s.deleted_at IS NULL
ORDER BY s.name {") AS raw" if recorded else ""};""", {"now": TimeUTC.now()})
ORDER BY s.name {") AS raw" if recorded else ""};""",
{"now": TimeUTC.now(), "check_delta": TimeUTC.MS_HOUR * 4})
cur.execute(query)
rows = cur.fetchall()
# if recorded is requested, check if it was saved or computed
@ -91,13 +81,17 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
u_values = []
params = {}
for i, r in enumerate(rows):
r["sessions_last_check_at"] = TimeUTC.datetime_to_timestamp(r["sessions_last_check_at"])
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
if r["first_recorded_session_at"] is None:
if r["first_recorded_session_at"] is None \
and r["sessions_last_check_at"] is not None \
and (TimeUTC.now() - r["sessions_last_check_at"]) > TimeUTC.MS_HOUR:
u_values.append(f"(%(project_id_{i})s,to_timestamp(%(first_recorded_{i})s/1000))")
params[f"project_id_{i}"] = r["project_id"]
params[f"first_recorded_{i}"] = r["first_recorded"] if r["recorded"] else None
r.pop("first_recorded_session_at")
r.pop("first_recorded")
r.pop("sessions_last_check_at")
if len(u_values) > 0:
query = cur.mogrify(f"""UPDATE public.projects
SET sessions_last_check_at=(now() at time zone 'utc'), first_recorded_session_at=u.first_recorded
@ -107,26 +101,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
else:
for r in rows:
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
if recording_state and len(rows) > 0:
project_ids = [f'({r["project_id"]})' for r in rows]
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
FROM (VALUES {",".join(project_ids)}) AS projects(project_id)
LEFT JOIN sessions USING (project_id)
WHERE sessions.start_ts >= %(startDate)s AND sessions.start_ts <= %(endDate)s
GROUP BY project_id;""",
{"startDate": TimeUTC.now(delta_days=-3), "endDate": TimeUTC.now(delta_days=1)})
cur.execute(query=query)
status = cur.fetchall()
for r in rows:
r["status"] = "red"
for s in status:
if s["project_id"] == r["project_id"]:
if TimeUTC.now(-2) <= s["last"] < TimeUTC.now(-1):
r["status"] = "yellow"
elif s["last"] >= TimeUTC.now(-1):
r["status"] = "green"
break
r.pop("sessions_last_check_at")
return helper.list_to_camel_case(rows)

View file

@ -941,7 +941,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status,
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
if project_id is None:
all_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False)
all_projects = projects.get_projects(tenant_id=tenant_id)
else:
all_projects = [
projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False,

View file

@ -29,4 +29,4 @@ def get_urls(session_id, project_id, check_existence: bool = True):
def delete_mobs(project_id, session_ids):
for session_id in session_ids:
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
s3.schedule_for_deletion(config("sessions_bucket"), k)
s3.tag_for_deletion(bucket=config("sessions_bucket"), key=k)

View file

@ -59,4 +59,4 @@ def delete_mobs(project_id, session_ids):
for session_id in session_ids:
for k in __get_mob_keys(project_id=project_id, session_id=session_id) \
+ __get_mob_keys_deprecated(session_id=session_id):
s3.schedule_for_deletion(config("sessions_bucket"), k)
s3.tag_for_deletion(bucket=config("sessions_bucket"), key=k)

View file

@ -496,8 +496,7 @@ def set_password_invitation(user_id, new_password):
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True)
c["projects"] = projects.get_projects(tenant_id=tenant_id, recorded=True)
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
return {

View file

@ -19,7 +19,7 @@ def random_string(length=36):
return "".join(random.choices(string.hexdigits, k=length))
def list_to_camel_case(items, flatten=False):
def list_to_camel_case(items: list[dict], flatten: bool = False) -> list[dict]:
for i in range(len(items)):
if flatten:
items[i] = flatten_nested_dicts(items[i])

View file

@ -1,5 +1,4 @@
import hashlib
from datetime import datetime, timedelta
from urllib.parse import urlparse
import boto3
@ -109,15 +108,19 @@ def rename(source_bucket, source_key, target_bucket, target_key):
s3.Object(source_bucket, source_key).delete()
def schedule_for_deletion(bucket, key):
def tag_for_deletion(bucket, key):
if not exists(bucket, key):
return False
# Copy the file to change the creation date, so it can be deleted X days after the tag's creation
s3 = __get_s3_resource()
s3_object = s3.Object(bucket, key)
s3_object.copy_from(CopySource={'Bucket': bucket, 'Key': key},
Expires=datetime.utcnow() + timedelta(days=config("SCH_DELETE_DAYS", cast=int, default=30)),
MetadataDirective='REPLACE')
return True
s3_target = s3.Object(bucket, key)
s3_target.copy_from(
CopySource={'Bucket': bucket, 'Key': key},
MetadataDirective='COPY',
TaggingDirective='COPY'
)
tag_file(bucket=bucket, file_key=key, tag_key='to_delete_in_days', tag_value=config("SCH_DELETE_DAYS", default='7'))
def generate_file_key(project_id, key):
@ -128,3 +131,18 @@ def generate_file_key_from_url(project_id, url):
u = urlparse(url)
new_url = u.scheme + "://" + u.netloc + u.path
return generate_file_key(project_id=project_id, key=new_url)
def tag_file(file_key, bucket, tag_key, tag_value):
return client.put_object_tagging(
Bucket=bucket,
Key=file_key,
Tagging={
'TagSet': [
{
'Key': tag_key,
'Value': tag_value
},
]
}
)

View file

@ -0,0 +1,39 @@
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
from chalicelib.core import telemetry
from chalicelib.core import weekly_report, jobs, health
async def run_scheduled_jobs() -> None:
jobs.execute_jobs()
async def weekly_report_cron() -> None:
weekly_report.cron()
async def telemetry_cron() -> None:
telemetry.compute()
async def health_cron() -> None:
health.cron()
async def weekly_health_cron() -> None:
health.weekly_cron()
cron_jobs = [
{"func": telemetry_cron, "trigger": CronTrigger(day_of_week="*"),
"misfire_grace_time": 60 * 60, "max_instances": 1},
{"func": run_scheduled_jobs, "trigger": CronTrigger(day_of_week="*", hour=0, minute=15),
"misfire_grace_time": 20, "max_instances": 1},
{"func": weekly_report_cron, "trigger": CronTrigger(day_of_week="mon", hour=5),
"misfire_grace_time": 60 * 60, "max_instances": 1},
{"func": health_cron, "trigger": IntervalTrigger(hours=0, minutes=30, start_date="2023-04-01 0:0:0", jitter=300),
"misfire_grace_time": 60 * 60, "max_instances": 1},
{"func": weekly_health_cron, "trigger": CronTrigger(day_of_week="sun", hour=5),
"misfire_grace_time": 60 * 60, "max_instances": 1}
]

View file

@ -1,15 +1,15 @@
requests==2.28.2
requests==2.29.0
urllib3==1.26.15
boto3==1.26.100
boto3==1.26.122
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.6.2
psycopg2-binary==2.9.6
elasticsearch==8.7.0
jira==3.5.0
fastapi==0.95.0
uvicorn[standard]==0.21.1
fastapi==0.95.1
uvicorn[standard]==0.22.0
python-decouple==3.8
pydantic[email]==1.10.7
apscheduler==3.10.1

View file

@ -1,17 +1,17 @@
requests==2.28.2
requests==2.29.0
urllib3==1.26.15
boto3==1.26.100
boto3==1.26.122
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.6.2
psycopg2-binary==2.9.6
elasticsearch==8.7.0
jira==3.5.0
fastapi==0.95.0
uvicorn[standard]==0.21.1
fastapi==0.95.1
uvicorn[standard]==0.22.0
python-decouple==3.8
pydantic[email]==1.10.7
apscheduler==3.10.1
redis==4.5.3
redis==4.5.4

View file

@ -14,7 +14,7 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig
custom_metrics, saved_search, integrations_global
from chalicelib.core.collaboration_msteams import MSTeams
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import helper, captcha
from chalicelib.utils import helper, captcha, s3
from or_dependencies import OR_context
from routers.base import get_routers
@ -155,7 +155,7 @@ async def add_edit_sentry(projectId: int, data: schemas.SentrySchema = Body(...)
@app.delete('/{projectId}/integrations/sentry', tags=["integrations"])
async def delete_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_sentry(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sentry.delete(tenant_id=context.tenant_id, project_id=projectId)}
@ -181,7 +181,7 @@ async def add_edit_datadog(projectId: int, data: schemas.DatadogSchema = Body(..
@app.delete('/{projectId}/integrations/datadog', tags=["integrations"])
async def delete_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_datadog.delete(tenant_id=context.tenant_id, project_id=projectId)}
@ -202,7 +202,7 @@ async def add_edit_stackdriver(projectId: int, data: schemas.StackdriverSchema =
@app.delete('/{projectId}/integrations/stackdriver', tags=["integrations"])
async def delete_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_stackdriver(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_stackdriver.delete(tenant_id=context.tenant_id, project_id=projectId)}
@ -223,7 +223,7 @@ async def add_edit_newrelic(projectId: int, data: schemas.NewrelicSchema = Body(
@app.delete('/{projectId}/integrations/newrelic', tags=["integrations"])
async def delete_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_newrelic(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_newrelic.delete(tenant_id=context.tenant_id, project_id=projectId)}
@ -244,7 +244,7 @@ async def add_edit_rollbar(projectId: int, data: schemas.RollbarSchema = Body(..
@app.delete('/{projectId}/integrations/rollbar', tags=["integrations"])
async def delete_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_rollbar.delete(tenant_id=context.tenant_id, project_id=projectId)}
@ -271,7 +271,7 @@ async def add_edit_bugsnag(projectId: int, data: schemas.BugsnagSchema = Body(..
@app.delete('/{projectId}/integrations/bugsnag', tags=["integrations"])
async def delete_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_bugsnag(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_bugsnag.delete(tenant_id=context.tenant_id, project_id=projectId)}
@ -300,7 +300,7 @@ async def add_edit_cloudwatch(projectId: int, data: schemas.CloudwatchSchema = B
@app.delete('/{projectId}/integrations/cloudwatch', tags=["integrations"])
async def delete_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_cloudwatch(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_cloudwatch.delete(tenant_id=context.tenant_id, project_id=projectId)}
@ -328,7 +328,7 @@ async def add_edit_elasticsearch(projectId: int, data: schemas.ElasticsearchSche
@app.delete('/{projectId}/integrations/elasticsearch', tags=["integrations"])
async def delete_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_elasticsearch(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_elasticsearch.delete(tenant_id=context.tenant_id, project_id=projectId)}
@ -349,7 +349,7 @@ async def add_edit_sumologic(projectId: int, data: schemas.SumologicSchema = Bod
@app.delete('/{projectId}/integrations/sumologic', tags=["integrations"])
async def delete_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_sumologic(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sumologic.delete(tenant_id=context.tenant_id, project_id=projectId)}
@ -407,7 +407,7 @@ async def add_edit_github(data: schemas.GithubSchema = Body(...),
@app.delete('/integrations/issues', tags=["integrations"])
async def delete_default_issue_tracking_tool(context: schemas.CurrentContext = Depends(OR_context)):
async def delete_default_issue_tracking_tool(_=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id,
user_id=context.user_id)
if error is not None and integration is None:
@ -416,7 +416,7 @@ async def delete_default_issue_tracking_tool(context: schemas.CurrentContext = D
@app.delete('/integrations/jira', tags=["integrations"])
async def delete_jira_cloud(context: schemas.CurrentContext = Depends(OR_context)):
async def delete_jira_cloud(_=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER,
tenant_id=context.tenant_id,
user_id=context.user_id,
@ -427,7 +427,7 @@ async def delete_jira_cloud(context: schemas.CurrentContext = Depends(OR_context
@app.delete('/integrations/github', tags=["integrations"])
async def delete_github(context: schemas.CurrentContext = Depends(OR_context)):
async def delete_github(_=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER,
tenant_id=context.tenant_id,
user_id=context.user_id,
@ -532,7 +532,8 @@ async def edit_metadata(projectId: int, index: int, data: schemas.MetadataBasicS
@app.delete('/{projectId}/metadata/{index}', tags=["metadata"])
async def delete_metadata(projectId: int, index: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_metadata(projectId: int, index: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return metadata.delete(tenant_id=context.tenant_id, project_id=projectId, index=index)
@ -607,7 +608,8 @@ async def update_alert(projectId: int, alertId: int, data: schemas.AlertSchema =
@app.delete('/{projectId}/alerts/{alertId}', tags=["alerts"])
async def delete_alert(projectId: int, alertId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_alert(projectId: int, alertId: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return alerts.delete(project_id=projectId, alert_id=alertId)
@ -684,7 +686,7 @@ async def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(
@app.delete('/projects/{projectId}', tags=['projects'])
async def delete_project(projectId, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_project(projectId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return projects.delete(tenant_id=context.tenant_id, user_id=context.user_id, project_id=projectId)
@ -763,7 +765,7 @@ async def get_slack_webhook(webhookId: int, context: schemas.CurrentContext = De
@app.delete('/integrations/slack/{webhookId}', tags=["integrations"])
async def delete_slack_integration(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_slack_integration(webhookId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)
@ -779,7 +781,7 @@ async def get_webhooks(context: schemas.CurrentContext = Depends(OR_context)):
@app.delete('/webhooks/{webhookId}', tags=["webhooks"])
async def delete_webhook(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_webhook(webhookId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)}
@ -794,7 +796,7 @@ async def reset_reinvite_member(memberId: int, context: schemas.CurrentContext =
@app.delete('/client/members/{memberId}', tags=["client"])
async def delete_member(memberId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_member(memberId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
return users.delete_member(tenant_id=context.tenant_id, user_id=context.user_id, id_to_delete=memberId)
@ -834,7 +836,8 @@ async def update_saved_search(projectId: int, search_id: int, data: schemas.Save
@app.delete('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
async def delete_saved_search(projectId: int, search_id: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_saved_search(projectId: int, search_id: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": saved_search.delete(project_id=projectId, user_id=context.user_id, search_id=search_id)}
@ -883,7 +886,8 @@ async def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborati
@app.delete('/integrations/msteams/{webhookId}', tags=["integrations"])
async def delete_msteams_integration(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_msteams_integration(webhookId: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)

View file

@ -148,8 +148,7 @@ async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[
@app.get('/projects', tags=['projects'])
async def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,
stack_integrations=True)}
return {"data": projects.get_projects(tenant_id=context.tenant_id, gdpr=True, recorded=True)}
# for backward compatibility
@ -417,7 +416,7 @@ async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNote
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
async def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId)
return data

View file

@ -1,26 +0,0 @@
from apscheduler.triggers.cron import CronTrigger
from chalicelib.core import telemetry
from chalicelib.core import weekly_report, jobs
async def run_scheduled_jobs() -> None:
jobs.execute_jobs()
async def weekly_report2() -> None:
weekly_report.cron()
async def telemetry_cron() -> None:
telemetry.compute()
cron_jobs = [
{"func": telemetry_cron, "trigger": CronTrigger(day_of_week="*"),
"misfire_grace_time": 60 * 60, "max_instances": 1},
{"func": run_scheduled_jobs, "trigger": CronTrigger(day_of_week="*", hour=0, minute=15),
"misfire_grace_time": 20, "max_instances": 1},
{"func": weekly_report2, "trigger": CronTrigger(day_of_week="mon", hour=5),
"misfire_grace_time": 60 * 60, "max_instances": 1}
]

View file

@ -39,7 +39,8 @@ async def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditD
@app.delete('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
async def delete_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_dashboard(projectId: int, dashboardId: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.delete_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)
@ -77,7 +78,7 @@ async def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId:
@app.delete('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
async def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int,
async def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.remove_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
widget_id=widgetId)
@ -264,5 +265,6 @@ async def update_custom_metric_state(projectId: int, metric_id: int,
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
async def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_custom_metric(projectId: int, metric_id: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}

View file

@ -50,7 +50,7 @@ async def get_user_details(projectKey: str, userId: str):
@app_apikey.delete('/v1/{projectKey}/users/{userId}', tags=["api"])
async def schedule_to_delete_user_data(projectKey: str, userId: str):
async def schedule_to_delete_user_data(projectKey: str, userId: str, _=Body(None)):
projectId = projects.get_internal_project_id(projectKey)
if projectId is None:
return {"errors": ["invalid projectKey"]}
@ -72,7 +72,7 @@ async def get_job(projectKey: str, jobId: int):
@app_apikey.delete('/v1/{projectKey}/jobs/{jobId}', tags=["api"])
async def cancel_job(projectKey: str, jobId: int):
async def cancel_job(projectKey: str, jobId: int, _=Body(None)):
job = jobs.get(job_id=jobId)
job_not_found = len(job.keys()) == 0

View file

@ -1,19 +1,19 @@
{
"name": "assist-server",
"version": "1.0.0",
"version": "v1.12.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "assist-server",
"version": "1.0.0",
"version": "v1.12.0",
"license": "Elastic License 2.0 (ELv2)",
"dependencies": {
"@maxmind/geoip2-node": "^3.5.0",
"express": "^4.18.2",
"jsonwebtoken": "^9.0.0",
"socket.io": "^4.6.0",
"ua-parser-js": "^1.0.33"
"socket.io": "^4.6.1",
"ua-parser-js": "^1.0.35"
}
},
"node_modules/@maxmind/geoip2-node": {
@ -45,9 +45,9 @@
}
},
"node_modules/@types/node": {
"version": "18.14.6",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz",
"integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA=="
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"node_modules/accepts": {
"version": "1.3.8",
@ -579,12 +579,12 @@
}
},
"node_modules/maxmind": {
"version": "4.3.8",
"resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.8.tgz",
"integrity": "sha512-HrfxEu5yPBPtTy/OT+W5bPQwEfLUX0EHqe2EbJiB47xQMumHqXvSP7PAwzV8Z++NRCmQwy4moQrTSt0+dH+Jmg==",
"version": "4.3.10",
"resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.10.tgz",
"integrity": "sha512-H83pPwi4OqpjPmvAVtuimVWFe6JwHdFK+UIzq4KdvQrKUMLieIrsvU/A9N8jbmOqC2JJPA+jtlFwodyqmzl/3w==",
"dependencies": {
"mmdb-lib": "2.0.2",
"tiny-lru": "9.0.3"
"tiny-lru": "10.4.1"
},
"engines": {
"node": ">=12",
@ -788,9 +788,9 @@
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/semver": {
"version": "7.3.8",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
"version": "7.4.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.4.0.tgz",
"integrity": "sha512-RgOxM8Mw+7Zus0+zcLEUn8+JfoLpj/huFTItQy2hsM4khuC1HYRDp0cU482Ewn/Fcy6bCjufD8vAj7voC66KQw==",
"dependencies": {
"lru-cache": "^6.0.0"
},
@ -948,11 +948,11 @@
}
},
"node_modules/tiny-lru": {
"version": "9.0.3",
"resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-9.0.3.tgz",
"integrity": "sha512-/i9GruRjXsnDgehxvy6iZ4AFNVxngEFbwzirhdulomMNPGPVV3ECMZOWSw0w4sRMZ9Al9m4jy08GPvRxRUGYlw==",
"version": "10.4.1",
"resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-10.4.1.tgz",
"integrity": "sha512-buLIzw7ppqymuO3pt10jHk/6QMeZLbidihMQU+N6sogF6EnBzG0qtDWIHuhw1x3dyNgVL/KTGIZsTK81+yCzLg==",
"engines": {
"node": ">=6"
"node": ">=12"
}
},
"node_modules/toidentifier": {
@ -987,9 +987,9 @@
}
},
"node_modules/ua-parser-js": {
"version": "1.0.34",
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.34.tgz",
"integrity": "sha512-K9mwJm/DaB6mRLZfw6q8IMXipcrmuT6yfhYmwhAkuh+81sChuYstYA+znlgaflUPaYUa3odxKPKGw6Vw/lANew==",
"version": "1.0.35",
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.35.tgz",
"integrity": "sha512-fKnGuqmTBnIE+/KXSzCn4db8RTigUzw1AN0DmdU6hJovUTbYJKyqj+8Mt1c4VfRDnOVJnENmfYkIPZ946UrSAA==",
"funding": [
{
"type": "opencollective",

View file

@ -1,6 +1,6 @@
{
"name": "assist-server",
"version": "v1.11.0",
"version": "v1.12.0",
"description": "assist server to get live sessions & sourcemaps reader to get stack trace",
"main": "peerjs-server.js",
"scripts": {
@ -21,7 +21,7 @@
"@maxmind/geoip2-node": "^3.5.0",
"express": "^4.18.2",
"jsonwebtoken": "^9.0.0",
"socket.io": "^4.6.0",
"ua-parser-js": "^1.0.33"
"socket.io": "^4.6.1",
"ua-parser-js": "^1.0.35"
}
}

View file

@ -9,7 +9,9 @@ const {
extractPayloadFromRequest,
sortPaginate,
getValidAttributes,
uniqueAutocomplete
uniqueAutocomplete,
getAvailableRooms,
getCompressionConfig
} = require('../utils/helper');
const {
IDENTITIES,
@ -31,15 +33,11 @@ const createSocketIOServer = function (server, prefix) {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
path: (prefix ? prefix : '') + '/socket',
...getCompressionConfig()
});
}
const getAvailableRooms = async function () {
return io.sockets.adapter.rooms.keys();
}
const respond = function (res, data) {
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
@ -48,10 +46,10 @@ const respond = function (res, data) {
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let filters = extractPayloadFromRequest(req);
let filters = await extractPayloadFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
@ -75,10 +73,10 @@ const socketsListByProject = async function (req, res) {
debug && console.log("[WS]looking for available sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let _sessionId = extractSessionIdFromRequest(req);
let filters = extractPayloadFromRequest(req);
let filters = await extractPayloadFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
@ -96,17 +94,17 @@ const socketsListByProject = async function (req, res) {
}
}
liveSessions[_projectKey] = liveSessions[_projectKey] || [];
respond(res, _sessionId === undefined ? liveSessions[_projectKey]
respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters)
: liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0]
: null);
}
const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let filters = extractPayloadFromRequest(req);
let filters = await extractPayloadFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey} = extractPeerId(peerId);
if (projectKey !== undefined) {
let connected_sockets = await io.in(peerId).fetchSockets();
@ -131,10 +129,10 @@ const socketsLiveByProject = async function (req, res) {
debug && console.log("[WS]looking for available LIVE sessions");
let _projectKey = extractProjectKeyFromRequest(req);
let _sessionId = extractSessionIdFromRequest(req);
let filters = extractPayloadFromRequest(req);
let filters = await extractPayloadFromRequest(req);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
let connected_sockets = await io.in(peerId).fetchSockets();
@ -161,11 +159,11 @@ const socketsLiveByProject = async function (req, res) {
const autocomplete = async function (req, res) {
debug && console.log("[WS]autocomplete");
let _projectKey = extractProjectKeyFromRequest(req);
let filters = extractPayloadFromRequest(req);
let filters = await extractPayloadFromRequest(req);
let results = [];
if (filters.query && Object.keys(filters.query).length > 0) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey} = extractPeerId(peerId);
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
@ -180,7 +178,6 @@ const autocomplete = async function (req, res) {
respond(res, uniqueAutocomplete(results));
}
const findSessionSocketId = async (io, peerId) => {
const connected_sockets = await io.in(peerId).fetchSockets();
for (let item of connected_sockets) {
@ -193,7 +190,8 @@ const findSessionSocketId = async (io, peerId) => {
async function sessions_agents_count(io, socket) {
let c_sessions = 0, c_agents = 0;
if (io.sockets.adapter.rooms.get(socket.peerId)) {
const rooms = await getAvailableRooms(io);
if (rooms.get(socket.peerId)) {
const connected_sockets = await io.in(socket.peerId).fetchSockets();
for (let item of connected_sockets) {
@ -212,7 +210,8 @@ async function sessions_agents_count(io, socket) {
async function get_all_agents_ids(io, socket) {
let agents = [];
if (io.sockets.adapter.rooms.get(socket.peerId)) {
const rooms = await getAvailableRooms(io);
if (rooms.get(socket.peerId)) {
const connected_sockets = await io.in(socket.peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.agent) {
@ -223,13 +222,12 @@ async function get_all_agents_ids(io, socket) {
return agents;
}
wsRouter.get(`/sockets-list`, socketsList);
wsRouter.post(`/sockets-list`, socketsList);
wsRouter.get(`/sockets-list/:projectKey/autocomplete`, autocomplete);
wsRouter.get(`/sockets-list/:projectKey`, socketsListByProject);
wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject);
wsRouter.post(`/sockets-list/:projectKey`, socketsListByProject);
wsRouter.get(`/sockets-list/:projectKey/:sessionId`, socketsListByProject);
wsRouter.get(`/sockets-live`, socketsLive);
wsRouter.post(`/sockets-live`, socketsLive);
@ -265,12 +263,13 @@ module.exports = {
}
} else if (c_sessions <= 0) {
debug && console.log(`notifying new agent about no SESSIONS`);
debug && console.log(`notifying new agent about no SESSIONS with peerId:${socket.peerId}`);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
}
await socket.join(socket.peerId);
if (io.sockets.adapter.rooms.get(socket.peerId)) {
debug && console.log(`${socket.id} joined room:${socket.peerId}, as:${socket.identity}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`);
const rooms = await getAvailableRooms(io);
if (rooms.get(socket.peerId)) {
debug && console.log(`${socket.id} joined room:${socket.peerId}, as:${socket.identity}, members:${rooms.get(socket.peerId).size}`);
}
if (socket.identity === IDENTITIES.agent) {
if (socket.handshake.query.agentInfo !== undefined) {
@ -338,8 +337,9 @@ module.exports = {
setInterval(async (io) => {
try {
let count = 0;
console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `);
const arr = Array.from(io.sockets.adapter.rooms);
const rooms = await getAvailableRooms(io);
console.log(` ====== Rooms: ${rooms.size} ====== `);
const arr = Array.from(rooms);
const filtered = arr.filter(room => !room[1].has(room[0]));
for (let i of filtered) {
let {projectKey, sessionId} = extractPeerId(i[0]);

View file

@ -125,7 +125,7 @@ const transformFilters = function (filter) {
}
return filter;
}
const extractPayloadFromRequest = function (req) {
const extractPayloadFromRequest = async function (req) {
let filters = {
"query": {}, // for autocomplete
"filter": {}, // for sessions search
@ -218,6 +218,32 @@ const uniqueAutocomplete = function (list) {
}
return _list;
}
const getAvailableRooms = async function (io) {
return io.sockets.adapter.rooms;
}
const getCompressionConfig = function () {
// WS: The theoretical overhead per socket is 19KB (11KB for compressor and 8KB for decompressor)
let perMessageDeflate = false;
if (process.env.COMPRESSION === "true") {
console.log(`WS compression: enabled`);
perMessageDeflate = {
zlibDeflateOptions: {
windowBits: 10,
memLevel: 1
},
zlibInflateOptions: {
windowBits: 10
}
}
} else {
console.log(`WS compression: disabled`);
}
return {
perMessageDeflate: perMessageDeflate,
clientNoContextTakeover: true
};
}
module.exports = {
transformFilters,
extractPeerId,
@ -230,5 +256,7 @@ module.exports = {
objectToObjectOfArrays,
extractPayloadFromRequest,
sortPaginate,
uniqueAutocomplete
uniqueAutocomplete,
getAvailableRooms,
getCompressionConfig
};

7
ee/api/.gitignore vendored
View file

@ -244,7 +244,7 @@ Pipfile.lock
/chalicelib/utils/strings.py
/chalicelib/utils/TimeUTC.py
/routers/app/__init__.py
/routers/crons/__init__.py
/crons/__init__.py
/routers/subs/__init__.py
/routers/__init__.py
/chalicelib/core/assist.py
@ -253,7 +253,7 @@ Pipfile.lock
/build.sh
/routers/base.py
/routers/core.py
/routers/crons/core_crons.py
/crons/core_crons.py
/db_changes.sql
/Dockerfile_bundle
/entrypoint.bundle.sh
@ -267,6 +267,5 @@ Pipfile.lock
/build_crons.sh
/run-dev.sh
/run-alerts-dev.sh
/routers/subs/health.py
/routers/subs/v1_api.py
#exp /chalicelib/core/dashboards.py
#exp /chalicelib/core/dashboards.py

View file

@ -15,9 +15,7 @@ from chalicelib.utils import events_queue
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from routers import core, core_dynamic, ee, saml
from routers.crons import core_crons
from routers.crons import core_dynamic_crons
from routers.crons import ee_crons
from crons import core_crons, ee_crons, core_dynamic_crons
from routers.subs import insights, metrics, v1_api_ee
from routers.subs import v1_api, health
@ -108,4 +106,4 @@ app.include_router(v1_api.app_apikey)
app.include_router(v1_api_ee.app_apikey)
app.include_router(health.public_app)
app.include_router(health.app)
app.include_router(health.app_apikey)
app.include_router(health.app_apikey)

View file

@ -2,12 +2,14 @@ print("============= CRONS =============")
import asyncio
import sys
from routers.crons import core_dynamic_crons
from crons import core_dynamic_crons
ACTIONS = {
"TELEMETRY": core_dynamic_crons.telemetry_cron,
"JOB": core_dynamic_crons.run_scheduled_jobs,
"REPORT": core_dynamic_crons.weekly_report
"REPORT": core_dynamic_crons.weekly_report,
"PROJECTS_STATS": core_dynamic_crons.health_cron,
"FIX_PROJECTS_STATS": core_dynamic_crons.weekly_health_cron
}
@ -15,7 +17,7 @@ def default_action(action):
async def _func():
print(f"{action} not found in crons-definitions")
print("possible actions:")
print(ACTIONS.keys())
print(list(ACTIONS.keys()))
return _func
@ -27,7 +29,7 @@ async def process(action):
if __name__ == '__main__':
if len(sys.argv) < 2 or len(sys.argv[1]) < 1:
print("please provide actions as argument\npossible actions:")
print(ACTIONS.keys())
print(list(ACTIONS.keys()))
else:
print(f"action: {sys.argv[1]}")
asyncio.run(process(sys.argv[1]))

View file

@ -33,6 +33,9 @@ if config("EXP_METRICS", cast=bool, default=False):
else:
from . import metrics as metrics
if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
logging.info(">>> Using experimental sessions search for metrics")
if config("EXP_ALERTS", cast=bool, default=False):
logging.info(">>> Using experimental alerts")
from . import alerts_processor_exp as alerts_processor

View file

@ -38,18 +38,19 @@ def save_record(project_id, data: schemas_ee.AssistRecordSavePayloadSchema, cont
return result
def search_records(project_id, data: schemas_ee.AssistRecordSearchPayloadSchema, context: schemas_ee.CurrentContext):
def search_records(project_id: int, data: schemas_ee.AssistRecordSearchPayloadSchema,
context: schemas_ee.CurrentContext):
conditions = ["projects.tenant_id=%(tenant_id)s",
"projects.deleted_at ISNULL",
"projects.project_id=%(project_id)s",
"assist_records.deleted_at ISNULL"]
if data.startDate:
if data.startTimestamp:
conditions.append("assist_records.created_at>=%(startDate)s")
if data.endDate:
if data.endTimestamp:
conditions.append("assist_records.created_at<=%(endDate)s")
params = {"tenant_id": context.tenant_id, "project_id": project_id,
"startDate": data.startDate, "endDate": data.endDate,
"startDate": data.startTimestamp, "endDate": data.endTimestamp,
"p_start": (data.page - 1) * data.limit, "p_limit": data.limit,
**data.dict()}
if data.user_id is not None:
@ -59,17 +60,26 @@ def search_records(project_id, data: schemas_ee.AssistRecordSearchPayloadSchema,
params["query"] = helper.values_for_operator(value=data.query,
op=schemas.SearchEventOperator._contains)
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""SELECT record_id, user_id, session_id, assist_records.created_at,
assist_records.name, duration, users.name AS created_by
query = cur.mogrify(f"""SELECT COUNT(assist_records.record_id) OVER () AS count,
record_id, user_id, session_id, assist_records.created_at,
assist_records.name, duration, users.name AS created_by
FROM assist_records
INNER JOIN projects USING (project_id)
LEFT JOIN users USING (user_id)
INNER JOIN projects USING (project_id)
LEFT JOIN users USING (user_id)
WHERE {" AND ".join(conditions)}
ORDER BY assist_records.created_at {data.order}
LIMIT %(p_limit)s OFFSET %(p_start)s;""",
params)
cur.execute(query)
results = helper.list_to_camel_case(cur.fetchall())
rows = helper.list_to_camel_case(cur.fetchall())
if len(rows) == 0:
return {"total": 0, "records": []}
results = {"total": rows[0]["count"]}
for r in rows:
r.pop("count")
results["records"] = rows
return results

View file

@ -16,7 +16,7 @@ if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
else:
from . import errors as errors
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
from chalicelib.core import sessions_legacy as sessions
else:
from chalicelib.core import sessions

View file

@ -6,6 +6,7 @@ import requests
from decouple import config
from chalicelib.utils import pg_client, ch_client
from chalicelib.utils.TimeUTC import TimeUTC
def app_connection_string(name, port, path):
@ -34,7 +35,7 @@ HEALTH_ENDPOINTS = {
}
def __check_database_pg():
def __check_database_pg(*_):
fail_response = {
"health": False,
"details": {
@ -65,11 +66,11 @@ def __check_database_pg():
}
def __not_supported():
def __not_supported(*_):
return {"errors": ["not supported"]}
def __always_healthy():
def __always_healthy(*_):
return {
"health": True,
"details": {}
@ -77,7 +78,7 @@ def __always_healthy():
def __check_be_service(service_name):
def fn():
def fn(*_):
fail_response = {
"health": False,
"details": {
@ -113,7 +114,7 @@ def __check_be_service(service_name):
return fn
def __check_redis():
def __check_redis(*_):
fail_response = {
"health": False,
"details": {"errors": ["server health-check failed"]}
@ -140,7 +141,45 @@ def __check_redis():
}
def get_health():
def __check_SSL(*_):
fail_response = {
"health": False,
"details": {
"errors": ["SSL Certificate health-check failed"]
}
}
try:
requests.get(config("SITE_URL"), verify=True, allow_redirects=True)
except Exception as e:
print("!! health failed: SSL Certificate")
print(str(e))
return fail_response
return {
"health": True,
"details": {}
}
def __get_sessions_stats(tenant_id, *_):
with pg_client.PostgresClient() as cur:
constraints = ["projects.deleted_at IS NULL"]
if tenant_id:
constraints.append("tenant_id=%(tenant_id)s")
query = cur.mogrify(f"""SELECT COALESCE(SUM(sessions_count),0) AS s_c,
COALESCE(SUM(events_count),0) AS e_c
FROM public.projects_stats
INNER JOIN public.projects USING(project_id)
WHERE {" AND ".join(constraints)};""",
{"tenant_id": tenant_id})
cur.execute(query)
row = cur.fetchone()
return {
"numberOfSessionsCaptured": row["s_c"],
"numberOfEventCaptured": row["e_c"]
}
def get_health(tenant_id=None):
health_map = {
"databases": {
"postgres": __check_database_pg,
@ -169,15 +208,127 @@ def get_health():
"sink": __check_be_service("sink"),
"sourcemaps-reader": __check_be_service("sourcemaps-reader"),
"storage": __check_be_service("storage")
}
},
"details": __get_sessions_stats,
"ssl": __check_SSL
}
for parent_key in health_map.keys():
for element_key in health_map[parent_key]:
health_map[parent_key][element_key] = health_map[parent_key][element_key]()
if isinstance(health_map[parent_key], dict):
for element_key in health_map[parent_key]:
health_map[parent_key][element_key] = health_map[parent_key][element_key](tenant_id)
else:
health_map[parent_key] = health_map[parent_key](tenant_id)
return health_map
def __check_database_ch():
def cron():
with pg_client.PostgresClient() as cur:
query = cur.mogrify("""SELECT projects.project_id,
projects.created_at,
projects.sessions_last_check_at,
projects.first_recorded_session_at,
projects_stats.last_update_at
FROM public.projects
LEFT JOIN public.projects_stats USING (project_id)
WHERE projects.deleted_at IS NULL
ORDER BY project_id;""")
cur.execute(query)
rows = cur.fetchall()
for r in rows:
insert = False
if r["last_update_at"] is None:
# never counted before, must insert
insert = True
if r["first_recorded_session_at"] is None:
if r["sessions_last_check_at"] is None:
count_start_from = r["created_at"]
else:
count_start_from = r["sessions_last_check_at"]
else:
count_start_from = r["first_recorded_session_at"]
else:
# counted before, must update
count_start_from = r["last_update_at"]
count_start_from = TimeUTC.datetime_to_timestamp(count_start_from)
params = {"project_id": r["project_id"],
"start_ts": count_start_from,
"end_ts": TimeUTC.now(),
"sessions_count": 0,
"events_count": 0}
query = cur.mogrify("""SELECT COUNT(1) AS sessions_count,
COALESCE(SUM(events_count),0) AS events_count
FROM public.sessions
WHERE project_id=%(project_id)s
AND start_ts>=%(start_ts)s
AND start_ts<=%(end_ts)s
AND duration IS NOT NULL;""",
params)
cur.execute(query)
row = cur.fetchone()
if row is not None:
params["sessions_count"] = row["sessions_count"]
params["events_count"] = row["events_count"]
if insert:
query = cur.mogrify("""INSERT INTO public.projects_stats(project_id, sessions_count, events_count, last_update_at)
VALUES (%(project_id)s, %(sessions_count)s, %(events_count)s, (now() AT TIME ZONE 'utc'::text));""",
params)
else:
query = cur.mogrify("""UPDATE public.projects_stats
SET sessions_count=sessions_count+%(sessions_count)s,
events_count=events_count+%(events_count)s,
last_update_at=(now() AT TIME ZONE 'utc'::text)
WHERE project_id=%(project_id)s;""",
params)
cur.execute(query)
# this cron is used to correct the sessions&events count every week
def weekly_cron():
with pg_client.PostgresClient(long_query=True) as cur:
query = cur.mogrify("""SELECT project_id,
projects_stats.last_update_at
FROM public.projects
LEFT JOIN public.projects_stats USING (project_id)
WHERE projects.deleted_at IS NULL
ORDER BY project_id;""")
cur.execute(query)
rows = cur.fetchall()
for r in rows:
if r["last_update_at"] is None:
continue
params = {"project_id": r["project_id"],
"end_ts": TimeUTC.now(),
"sessions_count": 0,
"events_count": 0}
query = cur.mogrify("""SELECT COUNT(1) AS sessions_count,
COALESCE(SUM(events_count),0) AS events_count
FROM public.sessions
WHERE project_id=%(project_id)s
AND start_ts<=%(end_ts)s
AND duration IS NOT NULL;""",
params)
cur.execute(query)
row = cur.fetchone()
if row is not None:
params["sessions_count"] = row["sessions_count"]
params["events_count"] = row["events_count"]
query = cur.mogrify("""UPDATE public.projects_stats
SET sessions_count=%(sessions_count)s,
events_count=%(events_count)s,
last_update_at=(now() AT TIME ZONE 'utc'::text)
WHERE project_id=%(project_id)s;""",
params)
cur.execute(query)
def __check_database_ch(*_):
fail_response = {
"health": False,
"details": {"errors": ["server health-check failed"]}
@ -210,7 +361,7 @@ def __check_database_ch():
}
}
# def __check_kafka():
# def __check_kafka(*_):
# fail_response = {
# "health": False,
# "details": {"errors": ["server health-check failed"]}

View file

@ -32,7 +32,7 @@ def __update(tenant_id, project_id, changes):
for key in changes.keys():
sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""UPDATE public.projects
query = cur.mogrify(f"""UPDATE public.projects
SET {" ,".join(sub_query)}
WHERE project_id = %(project_id)s
AND deleted_at ISNULL
@ -53,8 +53,7 @@ def __create(tenant_id, name):
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False, user_id=None):
stack_integrations = False
def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False, user_id: int = None):
with pg_client.PostgresClient() as cur:
role_query = """INNER JOIN LATERAL (SELECT 1
FROM users
@ -66,37 +65,28 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
AND (roles.all_projects OR roles_projects.project_id = s.project_id)
LIMIT 1) AS role_project ON (TRUE)"""
extra_projection = ""
extra_join = ""
if gdpr:
extra_projection += ',s.gdpr'
if recorded:
extra_projection += """,COALESCE(nullif(EXTRACT(EPOCH FROM s.first_recorded_session_at) * 1000, NULL)::BIGINT ,
extra_projection += """,\nCOALESCE(EXTRACT(EPOCH FROM s.first_recorded_session_at) * 1000::BIGINT,
(SELECT MIN(sessions.start_ts)
FROM public.sessions
WHERE sessions.project_id = s.project_id
AND sessions.start_ts >= (EXTRACT(EPOCH FROM
COALESCE(s.sessions_last_check_at, s.created_at)) * 1000-24*60*60*1000)
AND sessions.start_ts >= (EXTRACT(EPOCH
FROM COALESCE(s.sessions_last_check_at, s.created_at)) * 1000-%(check_delta)s)
AND sessions.start_ts <= %(now)s
LIMIT 1), NULL) AS first_recorded"""
if stack_integrations:
extra_projection += ',stack_integrations.count>0 AS stack_integrations'
if stack_integrations:
extra_join = """LEFT JOIN LATERAL (SELECT COUNT(*) AS count
FROM public.integrations
WHERE s.project_id = integrations.project_id
LIMIT 1) AS stack_integrations ON TRUE"""
)) AS first_recorded"""
query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""}
SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at,
created_at {extra_projection}
created_at, sessions_last_check_at {extra_projection}
FROM public.projects AS s
{extra_join}
{role_query if user_id is not None else ""}
WHERE s.tenant_id =%(tenant_id)s
AND s.deleted_at IS NULL
ORDER BY s.name {") AS raw" if recorded else ""};""",
{"tenant_id": tenant_id, "user_id": user_id, "now": TimeUTC.now()})
{"tenant_id": tenant_id, "user_id": user_id, "now": TimeUTC.now(),
"check_delta": TimeUTC.MS_HOUR * 4})
cur.execute(query)
rows = cur.fetchall()
# if recorded is requested, check if it was saved or computed
@ -104,13 +94,17 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
u_values = []
params = {}
for i, r in enumerate(rows):
r["sessions_last_check_at"] = TimeUTC.datetime_to_timestamp(r["sessions_last_check_at"])
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
if r["first_recorded_session_at"] is None:
if r["first_recorded_session_at"] is None \
and r["sessions_last_check_at"] is not None \
and (TimeUTC.now() - r["sessions_last_check_at"]) > TimeUTC.MS_HOUR:
u_values.append(f"(%(project_id_{i})s,to_timestamp(%(first_recorded_{i})s/1000))")
params[f"project_id_{i}"] = r["project_id"]
params[f"first_recorded_{i}"] = r["first_recorded"] if r["recorded"] else None
r.pop("first_recorded_session_at")
r.pop("first_recorded")
r.pop("sessions_last_check_at")
if len(u_values) > 0:
query = cur.mogrify(f"""UPDATE public.projects
SET sessions_last_check_at=(now() at time zone 'utc'), first_recorded_session_at=u.first_recorded
@ -120,26 +114,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
else:
for r in rows:
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
if recording_state and len(rows) > 0:
project_ids = [f'({r["project_id"]})' for r in rows]
query = cur.mogrify(f"""SELECT projects.project_id, COALESCE(MAX(start_ts), 0) AS last
FROM (VALUES {",".join(project_ids)}) AS projects(project_id)
LEFT JOIN sessions USING (project_id)
WHERE sessions.start_ts >= %(startDate)s AND sessions.start_ts <= %(endDate)s
GROUP BY project_id;""",
{"startDate": TimeUTC.now(delta_days=-3), "endDate": TimeUTC.now(delta_days=1)})
cur.execute(query=query)
status = cur.fetchall()
for r in rows:
r["status"] = "red"
for s in status:
if s["project_id"] == r["project_id"]:
if TimeUTC.now(-2) <= s["last"] < TimeUTC.now(-1):
r["status"] = "yellow"
elif s["last"] >= TimeUTC.now(-1):
r["status"] = "green"
break
r.pop("sessions_last_check_at")
return helper.list_to_camel_case(rows)

View file

@ -36,4 +36,4 @@ def get_urls(session_id, project_id, context: schemas_ee.CurrentContext, check_e
def delete_mobs(project_id, session_ids):
for session_id in session_ids:
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
s3.schedule_for_deletion(config("sessions_bucket"), k)
s3.tag_for_deletion(bucket=config("sessions_bucket"), key=k)

View file

@ -1272,7 +1272,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
if project_id is None:
all_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False)
all_projects = projects.get_projects(tenant_id=tenant_id)
else:
all_projects = [
projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False,

View file

@ -4,7 +4,7 @@ import re
from typing import Optional, List
from decouple import config
from fastapi import Request, Response
from fastapi import Request, Response, BackgroundTasks
from pydantic import BaseModel, Field
from starlette.background import BackgroundTask
@ -108,10 +108,10 @@ async def process_trace(action: str, path_format: str, request: Request, respons
return
current_context: CurrentContext = request.state.currentContext
body: json = None
if request.method in ["POST", "PUT"]:
if request.method in ["POST", "PUT", "DELETE"]:
try:
body = await request.json()
except json.decoder.JSONDecodeError:
except Exception:
pass
intersect = list(set(body.keys()) & set(IGNORE_IN_PAYLOAD))
for attribute in intersect:
@ -142,9 +142,9 @@ def trace(action: str, path_format: str, request: Request, response: Response):
background_task: BackgroundTask = BackgroundTask(process_trace, action=action, path_format=path_format,
request=request, response=response)
if response.background is None:
response.background = background_task
else:
response.background.add_task(background_task)
response.background = BackgroundTasks()
response.background.add_task(background_task)
async def process_traces_queue():

View file

@ -575,8 +575,7 @@ def set_password_invitation(tenant_id, user_id, new_password):
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True, user_id=user_id)
c["projects"] = projects.get_projects(tenant_id=tenant_id, recorded=True, user_id=user_id)
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
return {

View file

@ -22,7 +22,10 @@ SAML2 = {
},
"NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
"x509cert": config("sp_crt", default=""),
"privateKey": config("sp_key", default="")
"privateKey": config("sp_key", default=""),
},
"security": {
"requestedAuthnContext": False
},
"idp": None
}

View file

@ -24,7 +24,8 @@ class ClickHouseClient:
user=config("ch_user", default="default"),
password=config("ch_password", default=""),
port=config("ch_port", cast=int),
settings=settings) \
settings=settings,
compression='lz4') \
if self.__client is None else self.__client
def __enter__(self):

View file

@ -4,23 +4,14 @@ from chalicelib.utils import s3
def tag_session(file_key, tag_key='retention', tag_value='vault'):
return tag_file(file_key=file_key, bucket=config("sessions_bucket"), tag_key=tag_key, tag_value=tag_value)
bucket = config("sessions_bucket")
if not s3.exists(bucket=bucket, key=file_key):
return None
return s3.tag_file(file_key=file_key, bucket=bucket, tag_key=tag_key, tag_value=tag_value)
def tag_record(file_key, tag_key='retention', tag_value='vault'):
return tag_file(file_key=file_key, bucket=config('ASSIST_RECORDS_BUCKET'), tag_key=tag_key, tag_value=tag_value)
def tag_file(file_key, bucket, tag_key, tag_value):
return s3.client.put_object_tagging(
Bucket=bucket,
Key=file_key,
Tagging={
'TagSet': [
{
'Key': tag_key,
'Value': tag_value
},
]
}
)
bucket = config('ASSIST_RECORDS_BUCKET')
if not s3.exists(bucket=bucket, key=file_key):
return None
return s3.tag_file(file_key=file_key, bucket=bucket, tag_key=tag_key, tag_value=tag_value)

View file

@ -64,22 +64,22 @@ rm -rf ./chalicelib/utils/sql_helper.py
rm -rf ./chalicelib/utils/strings.py
rm -rf ./chalicelib/utils/TimeUTC.py
rm -rf ./routers/app/__init__.py
rm -rf ./routers/crons/__init__.py
rm -rf ./crons/__init__.py
rm -rf ./routers/subs/__init__.py
rm -rf ./routers/__init__.py
rm -rf ./chalicelib/core/assist.py
rm -rf ./auth/__init__.py
rm -rf ./auth/auth_apikey.py
rm -rf ./build.sh
rm -rf ./build_crons.sh
rm -rf ./routers/base.py
rm -rf ./routers/core.py
rm -rf ./routers/crons/core_crons.py
rm -rf ./crons/core_crons.py
rm -rf ./db_changes.sql
rm -rf ./Dockerfile_bundle
rm -rf ./entrypoint.bundle.sh
rm -rf ./chalicelib/core/heatmaps.py
rm -rf ./schemas.py
rm -rf ./routers/subs/health.py
rm -rf ./routers/subs/v1_api.py
#exp rm -rf ./chalicelib/core/custom_metrics.py
rm -rf ./chalicelib/core/performance_event.py

View file

@ -1,9 +1,10 @@
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
from decouple import config
from chalicelib.core import jobs
from chalicelib.core import telemetry, unlock
from chalicelib.core import weekly_report as weekly_report_script
from chalicelib.core import weekly_report as weekly_report_script, health
async def run_scheduled_jobs() -> None:
@ -18,12 +19,20 @@ async def telemetry_cron() -> None:
telemetry.compute()
def unlock_cron() -> None:
async def unlock_cron() -> None:
print("validating license")
unlock.check()
print(f"valid: {unlock.is_valid()}")
async def health_cron() -> None:
health.cron()
async def weekly_health_cron() -> None:
health.weekly_cron()
cron_jobs = [
{"func": unlock_cron, "trigger": CronTrigger(day="*")},
]
@ -34,6 +43,10 @@ SINGLE_CRONS = [
{"func": run_scheduled_jobs, "trigger": CronTrigger(day_of_week="*", hour=0, minute=15),
"misfire_grace_time": 20, "max_instances": 1},
{"func": weekly_report, "trigger": CronTrigger(day_of_week="mon", hour=5),
"misfire_grace_time": 60 * 60, "max_instances": 1},
{"func": health_cron, "trigger": IntervalTrigger(hours=0, minutes=30, start_date="2023-04-01 0:0:0", jitter=300),
"misfire_grace_time": 60 * 60, "max_instances": 1},
{"func": weekly_health_cron, "trigger": CronTrigger(day_of_week="sun", hour=5),
"misfire_grace_time": 60 * 60, "max_instances": 1}
]

View file

@ -1,18 +1,19 @@
requests==2.28.2
requests==2.29.0
urllib3==1.26.15
boto3==1.26.100
boto3==1.26.122
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.6.2
psycopg2-binary==2.9.6
elasticsearch==8.7.0
jira==3.5.0
fastapi==0.95.0
uvicorn[standard]==0.21.1
fastapi==0.95.1
uvicorn[standard]==0.22.0
python-decouple==3.8
pydantic[email]==1.10.7
apscheduler==3.10.1
clickhouse-driver==0.2.5
clickhouse-driver[lz4]==0.2.5
python-multipart==0.0.5

View file

@ -1,21 +1,15 @@
requests==2.28.2
requests==2.29.0
urllib3==1.26.15
boto3==1.26.112
boto3==1.26.122
pyjwt==2.6.0
psycopg2-binary==2.9.6
elasticsearch==8.7.0
jira==3.5.0
fastapi==0.95.0
uvicorn[standard]==0.21.1
python-decouple==3.8
pydantic[email]==1.10.7
apscheduler==3.10.1
clickhouse-driver==0.2.5
clickhouse-driver[lz4]==0.2.5
python-multipart==0.0.6
sentry-sdk==1.17.0
stripe==5.3.0
clickhouse-driver[lz4]==0.2.5

View file

@ -1,22 +1,23 @@
requests==2.28.2
requests==2.29.0
urllib3==1.26.15
boto3==1.26.100
boto3==1.26.122
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.6.2
psycopg2-binary==2.9.6
elasticsearch==8.7.0
jira==3.5.0
fastapi==0.95.0
uvicorn[standard]==0.21.1
fastapi==0.95.1
uvicorn[standard]==0.22.0
python-decouple==3.8
pydantic[email]==1.10.7
apscheduler==3.10.1
clickhouse-driver==0.2.5
clickhouse-driver[lz4]==0.2.5
python3-saml==1.15.0
python-multipart==0.0.6
redis==4.5.3
#confluent-kafka==2.0.2
redis==4.5.4
#confluent-kafka==2.1.0

View file

@ -159,8 +159,8 @@ async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[
@app.get('/projects', tags=['projects'])
async def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,
stack_integrations=True, user_id=context.user_id)}
return {"data": projects.get_projects(tenant_id=context.tenant_id, gdpr=True,
recorded=True, user_id=context.user_id)}
# for backward compatibility
@ -442,7 +442,7 @@ async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNote
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
async def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId)
return data

View file

@ -47,7 +47,7 @@ async def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...),
@app.delete('/client/roles/{roleId}', tags=["client", "roles"])
async def delete_role(roleId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
async def delete_role(roleId: int, _=Body(None), context: schemas_ee.CurrentContext = Depends(OR_context)):
data = roles.delete(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId)
if "errors" in data:
return data
@ -112,7 +112,8 @@ async def update_record(projectId: int, recordId: int, data: schemas_ee.AssistRe
@app.delete('/{projectId}/assist/records/{recordId}', tags=["assist"])
async def delete_record(projectId: int, recordId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
async def delete_record(projectId: int, recordId: int, _=Body(None),
context: schemas_ee.CurrentContext = Depends(OR_context)):
result = assist_records.delete_record(project_id=projectId, record_id=recordId, context=context)
if "errors" in result:
return result

View file

@ -0,0 +1,23 @@
from fastapi import Depends
from fastapi import HTTPException, status
import schemas
from chalicelib.core import health, tenants
from or_dependencies import OR_context
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/healthz', tags=["health-check"])
async def get_global_health_status(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": health.get_health(tenant_id=context.tenant_id)}
if not tenants.tenants_exists(use_pool=False):
@public_app.get('/health', tags=["health-check"])
async def get_public_health_status():
if tenants.tenants_exists():
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Not Found")
return await get_global_health_status()

View file

@ -41,7 +41,8 @@ async def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditD
@app.delete('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"])
async def delete_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_dashboard(projectId: int, dashboardId: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.delete_dashboard(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId)
@ -79,7 +80,7 @@ async def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId:
@app.delete('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"])
async def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int,
async def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return dashboards.remove_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId,
widget_id=widgetId)
@ -266,5 +267,6 @@ async def update_custom_metric_state(projectId: int, metric_id: int,
@app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"])
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
async def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_custom_metric(projectId: int, metric_id: int, _=Body(None),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}

6
ee/api/run-crons-dev.sh Executable file
View file

@ -0,0 +1,6 @@
#!/bin/zsh
APP_NAME=crons \
PG_MINCONN=2 \
PG_MAXCONN=10 \
PG_POOL=false \
python app_crons.py $@

View file

@ -135,10 +135,7 @@ class AssistRecordSavePayloadSchema(AssistRecordPayloadSchema):
key: str = Field(...)
class AssistRecordSearchPayloadSchema(schemas._PaginatedSchema):
limit: int = Field(default=200, gt=0)
startDate: Optional[int] = Field(default=None)
endDate: Optional[int] = Field(default=None)
class AssistRecordSearchPayloadSchema(schemas._PaginatedSchema, schemas._TimedSchema):
user_id: Optional[int] = Field(default=None)
query: Optional[str] = Field(default=None)
order: Literal["asc", "desc"] = Field(default="desc")

View file

@ -1,22 +1,22 @@
{
"name": "assist-server",
"version": "v1.11.0-ee",
"version": "v1.12.0-ee",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "assist-server",
"version": "v1.11.0-ee",
"version": "v1.12.0-ee",
"license": "Elastic License 2.0 (ELv2)",
"dependencies": {
"@maxmind/geoip2-node": "^3.5.0",
"@socket.io/redis-adapter": "^8.1.0",
"express": "^4.18.2",
"jsonwebtoken": "^9.0.0",
"redis": "^4.6.4",
"socket.io": "^4.6.0",
"ua-parser-js": "^1.0.33",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.19.0"
"redis": "^4.6.6",
"socket.io": "^4.6.1",
"ua-parser-js": "^1.0.35",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.23.0"
}
},
"node_modules/@maxmind/geoip2-node": {
@ -38,9 +38,9 @@
}
},
"node_modules/@redis/client": {
"version": "1.5.6",
"resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.6.tgz",
"integrity": "sha512-dFD1S6je+A47Lj22jN/upVU2fj4huR7S9APd7/ziUXsIXDL+11GPYti4Suv5y8FuXaN+0ZG4JF+y1houEJ7ToA==",
"version": "1.5.7",
"resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.7.tgz",
"integrity": "sha512-gaOBOuJPjK5fGtxSseaKgSvjiZXQCdLlGg9WYQst+/GRUjmXaiB5kVkeQMRtPc7Q2t93XZcJfBMSwzs/XS9UZw==",
"dependencies": {
"cluster-key-slot": "1.1.2",
"generic-pool": "3.9.0",
@ -117,9 +117,9 @@
}
},
"node_modules/@types/node": {
"version": "18.15.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.1.tgz",
"integrity": "sha512-U2TWca8AeHSmbpi314QBESRk7oPjSZjDsR+c+H4ECC1l+kFgpZf8Ydhv3SJpPy51VyZHHqxlb6mTTqYNNRVAIw=="
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"node_modules/accepts": {
"version": "1.3.8",
@ -688,12 +688,12 @@
}
},
"node_modules/maxmind": {
"version": "4.3.8",
"resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.8.tgz",
"integrity": "sha512-HrfxEu5yPBPtTy/OT+W5bPQwEfLUX0EHqe2EbJiB47xQMumHqXvSP7PAwzV8Z++NRCmQwy4moQrTSt0+dH+Jmg==",
"version": "4.3.10",
"resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.10.tgz",
"integrity": "sha512-H83pPwi4OqpjPmvAVtuimVWFe6JwHdFK+UIzq4KdvQrKUMLieIrsvU/A9N8jbmOqC2JJPA+jtlFwodyqmzl/3w==",
"dependencies": {
"mmdb-lib": "2.0.2",
"tiny-lru": "9.0.3"
"tiny-lru": "10.4.1"
},
"engines": {
"node": ">=12",
@ -878,12 +878,12 @@
}
},
"node_modules/redis": {
"version": "4.6.5",
"resolved": "https://registry.npmjs.org/redis/-/redis-4.6.5.tgz",
"integrity": "sha512-O0OWA36gDQbswOdUuAhRL6mTZpHFN525HlgZgDaVNgCJIAZR3ya06NTESb0R+TUZ+BFaDpz6NnnVvoMx9meUFg==",
"version": "4.6.6",
"resolved": "https://registry.npmjs.org/redis/-/redis-4.6.6.tgz",
"integrity": "sha512-aLs2fuBFV/VJ28oLBqYykfnhGGkFxvx0HdCEBYdJ99FFbSEMZ7c1nVKwR6ZRv+7bb7JnC0mmCzaqu8frgOYhpA==",
"dependencies": {
"@redis/bloom": "1.2.0",
"@redis/client": "1.5.6",
"@redis/client": "1.5.7",
"@redis/graph": "1.1.0",
"@redis/json": "1.0.4",
"@redis/search": "1.1.2",
@ -915,9 +915,9 @@
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/semver": {
"version": "7.3.8",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
"version": "7.4.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.4.0.tgz",
"integrity": "sha512-RgOxM8Mw+7Zus0+zcLEUn8+JfoLpj/huFTItQy2hsM4khuC1HYRDp0cU482Ewn/Fcy6bCjufD8vAj7voC66KQw==",
"dependencies": {
"lru-cache": "^6.0.0"
},
@ -1046,11 +1046,11 @@
}
},
"node_modules/tiny-lru": {
"version": "9.0.3",
"resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-9.0.3.tgz",
"integrity": "sha512-/i9GruRjXsnDgehxvy6iZ4AFNVxngEFbwzirhdulomMNPGPVV3ECMZOWSw0w4sRMZ9Al9m4jy08GPvRxRUGYlw==",
"version": "10.4.1",
"resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-10.4.1.tgz",
"integrity": "sha512-buLIzw7ppqymuO3pt10jHk/6QMeZLbidihMQU+N6sogF6EnBzG0qtDWIHuhw1x3dyNgVL/KTGIZsTK81+yCzLg==",
"engines": {
"node": ">=6"
"node": ">=12"
}
},
"node_modules/toidentifier": {
@ -1085,9 +1085,9 @@
}
},
"node_modules/ua-parser-js": {
"version": "1.0.34",
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.34.tgz",
"integrity": "sha512-K9mwJm/DaB6mRLZfw6q8IMXipcrmuT6yfhYmwhAkuh+81sChuYstYA+znlgaflUPaYUa3odxKPKGw6Vw/lANew==",
"version": "1.0.35",
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.35.tgz",
"integrity": "sha512-fKnGuqmTBnIE+/KXSzCn4db8RTigUzw1AN0DmdU6hJovUTbYJKyqj+8Mt1c4VfRDnOVJnENmfYkIPZ946UrSAA==",
"funding": [
{
"type": "opencollective",
@ -1127,8 +1127,8 @@
}
},
"node_modules/uWebSockets.js": {
"version": "20.19.0",
"resolved": "git+ssh://git@github.com/uNetworking/uWebSockets.js.git#42c9c0d5d31f46ca4115dc75672b0037ec970f28"
"version": "20.20.0",
"resolved": "git+ssh://git@github.com/uNetworking/uWebSockets.js.git#70291680f6052af5306191b6ed88bceb58e49aa2"
},
"node_modules/vary": {
"version": "1.1.2",

View file

@ -1,6 +1,6 @@
{
"name": "assist-server",
"version": "v1.11.0-ee",
"version": "v1.12.0-ee",
"description": "assist server to get live sessions & sourcemaps reader to get stack trace",
"main": "peerjs-server.js",
"scripts": {
@ -22,9 +22,9 @@
"@socket.io/redis-adapter": "^8.1.0",
"express": "^4.18.2",
"jsonwebtoken": "^9.0.0",
"redis": "^4.6.4",
"socket.io": "^4.6.0",
"ua-parser-js": "^1.0.33",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.19.0"
"redis": "^4.6.6",
"socket.io": "^4.6.1",
"ua-parser-js": "^1.0.35",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.23.0"
}
}

View file

@ -19,7 +19,9 @@ const {
const {
extractProjectKeyFromRequest,
extractSessionIdFromRequest,
extractPayloadFromRequest
extractPayloadFromRequest,
getCompressionConfig,
getAvailableRooms
} = require('../utils/helper-ee');
const {createAdapter} = require("@socket.io/redis-adapter");
const {createClient} = require("redis");
@ -39,7 +41,8 @@ const createSocketIOServer = function (server, prefix) {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
path: (prefix ? prefix : '') + '/socket',
...getCompressionConfig()
});
} else {
io = new _io.Server({
@ -48,9 +51,8 @@ const createSocketIOServer = function (server, prefix) {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
// transports: ['websocket'],
// upgrade: false
path: (prefix ? prefix : '') + '/socket',
...getCompressionConfig()
});
io.attachApp(server);
}
@ -68,9 +70,6 @@ const uniqueSessions = function (data) {
return resArr;
}
const getAvailableRooms = async function () {
return io.of('/').adapter.allRooms();
}
const respond = function (res, data) {
let result = {data}
@ -86,10 +85,9 @@ const respond = function (res, data) {
const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
@ -115,8 +113,8 @@ const socketsListByProject = async function (req, res) {
let _sessionId = extractSessionIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
@ -134,7 +132,7 @@ const socketsListByProject = async function (req, res) {
}
}
liveSessions[_projectKey] = liveSessions[_projectKey] || [];
respond(res, _sessionId === undefined ? liveSessions[_projectKey]
respond(res, _sessionId === undefined ? sortPaginate(liveSessions[_projectKey], filters)
: liveSessions[_projectKey].length > 0 ? liveSessions[_projectKey][0]
: null);
}
@ -143,8 +141,8 @@ const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey} = extractPeerId(peerId);
if (projectKey !== undefined) {
let connected_sockets = await io.in(peerId).fetchSockets();
@ -172,8 +170,8 @@ const socketsLiveByProject = async function (req, res) {
let _sessionId = extractSessionIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
let connected_sockets = await io.in(peerId).fetchSockets();
@ -204,8 +202,8 @@ const autocomplete = async function (req, res) {
let filters = await extractPayloadFromRequest(req);
let results = [];
if (filters.query && Object.keys(filters.query).length > 0) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey} = extractPeerId(peerId);
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
@ -232,7 +230,7 @@ const findSessionSocketId = async (io, peerId) => {
async function sessions_agents_count(io, socket) {
let c_sessions = 0, c_agents = 0;
let rooms = await io.of('/').adapter.allRooms();
const rooms = await getAvailableRooms(io);
if (rooms.has(socket.peerId)) {
const connected_sockets = await io.in(socket.peerId).fetchSockets();
@ -252,7 +250,7 @@ async function sessions_agents_count(io, socket) {
async function get_all_agents_ids(io, socket) {
let agents = [];
let rooms = await io.of('/').adapter.allRooms();
const rooms = await getAvailableRooms(io);
if (rooms.has(socket.peerId)) {
const connected_sockets = await io.in(socket.peerId).fetchSockets();
for (let item of connected_sockets) {
@ -278,7 +276,6 @@ wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject);
wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject);
wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject);
module.exports = {
wsRouter,
start: (server, prefix) => {
@ -306,12 +303,11 @@ module.exports = {
}
} else if (c_sessions <= 0) {
debug && console.log(`notifying new agent about no SESSIONS`);
debug && console.log(`notifying new agent about no SESSIONS with peerId:${socket.peerId}`);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
}
// await io.of('/').adapter.join(socket.id, socket.peerId);
await socket.join(socket.peerId);
let rooms = await io.of('/').adapter.allRooms();
const rooms = await getAvailableRooms(io);
if (rooms.has(socket.peerId)) {
let connectedSockets = await io.in(socket.peerId).fetchSockets();
debug && console.log(`${socket.id} joined room:${socket.peerId}, as:${socket.identity}, members:${connectedSockets.length}`);
@ -381,7 +377,7 @@ module.exports = {
console.log("WS server started");
setInterval(async (io) => {
try {
let rooms = await io.of('/').adapter.allRooms();
const rooms = await getAvailableRooms(io);
let validRooms = [];
console.log(` ====== Rooms: ${rooms.size} ====== `);
// const arr = Array.from(rooms)

View file

@ -20,6 +20,8 @@ const {
extractProjectKeyFromRequest,
extractSessionIdFromRequest,
extractPayloadFromRequest,
getCompressionConfig,
getAvailableRooms
} = require('../utils/helper-ee');
const wsRouter = express.Router();
@ -34,7 +36,8 @@ const createSocketIOServer = function (server, prefix) {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
path: (prefix ? prefix : '') + '/socket',
...getCompressionConfig()
});
} else {
io = new _io.Server({
@ -43,18 +46,13 @@ const createSocketIOServer = function (server, prefix) {
origin: "*",
methods: ["GET", "POST", "PUT"]
},
path: (prefix ? prefix : '') + '/socket'
// transports: ['websocket'],
// upgrade: false
path: (prefix ? prefix : '') + '/socket',
...getCompressionConfig()
});
io.attachApp(server);
}
}
const getAvailableRooms = async function () {
return io.sockets.adapter.rooms.keys();
}
const respond = function (res, data) {
let result = {data}
if (process.env.uws !== "true") {
@ -70,8 +68,8 @@ const socketsList = async function (req, res) {
debug && console.log("[WS]looking for all available sessions");
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey !== undefined) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
@ -97,8 +95,8 @@ const socketsListByProject = async function (req, res) {
let _sessionId = extractSessionIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
liveSessions[projectKey] = liveSessions[projectKey] || [];
@ -125,8 +123,8 @@ const socketsLive = async function (req, res) {
debug && console.log("[WS]looking for all available LIVE sessions");
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey} = extractPeerId(peerId);
if (projectKey !== undefined) {
let connected_sockets = await io.in(peerId).fetchSockets();
@ -153,8 +151,8 @@ const socketsLiveByProject = async function (req, res) {
let _sessionId = extractSessionIdFromRequest(req);
let filters = await extractPayloadFromRequest(req, res);
let liveSessions = {};
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey, sessionId} = extractPeerId(peerId);
if (projectKey === _projectKey && (_sessionId === undefined || _sessionId === sessionId)) {
let connected_sockets = await io.in(peerId).fetchSockets();
@ -184,8 +182,8 @@ const autocomplete = async function (req, res) {
let filters = await extractPayloadFromRequest(req);
let results = [];
if (filters.query && Object.keys(filters.query).length > 0) {
let rooms = await getAvailableRooms();
for (let peerId of rooms) {
let rooms = await getAvailableRooms(io);
for (let peerId of rooms.keys()) {
let {projectKey} = extractPeerId(peerId);
if (projectKey === _projectKey) {
let connected_sockets = await io.in(peerId).fetchSockets();
@ -212,7 +210,8 @@ const findSessionSocketId = async (io, peerId) => {
async function sessions_agents_count(io, socket) {
let c_sessions = 0, c_agents = 0;
if (io.sockets.adapter.rooms.get(socket.peerId)) {
const rooms = await getAvailableRooms(io);
if (rooms.get(socket.peerId)) {
const connected_sockets = await io.in(socket.peerId).fetchSockets();
for (let item of connected_sockets) {
@ -231,7 +230,8 @@ async function sessions_agents_count(io, socket) {
async function get_all_agents_ids(io, socket) {
let agents = [];
if (io.sockets.adapter.rooms.get(socket.peerId)) {
const rooms = await getAvailableRooms(io);
if (rooms.get(socket.peerId)) {
const connected_sockets = await io.in(socket.peerId).fetchSockets();
for (let item of connected_sockets) {
if (item.handshake.query.identity === IDENTITIES.agent) {
@ -256,7 +256,6 @@ wsRouter.get(`/sockets-live/:projectKey`, socketsLiveByProject);
wsRouter.post(`/sockets-live/:projectKey`, socketsLiveByProject);
wsRouter.get(`/sockets-live/:projectKey/:sessionId`, socketsLiveByProject);
module.exports = {
wsRouter,
start: (server, prefix) => {
@ -284,12 +283,13 @@ module.exports = {
}
} else if (c_sessions <= 0) {
debug && console.log(`notifying new agent about no SESSIONS`);
debug && console.log(`notifying new agent about no SESSIONS with peerId:${socket.peerId}`);
io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
}
await socket.join(socket.peerId);
if (io.sockets.adapter.rooms.get(socket.peerId)) {
debug && console.log(`${socket.id} joined room:${socket.peerId}, as:${socket.identity}, members:${io.sockets.adapter.rooms.get(socket.peerId).size}`);
const rooms = await getAvailableRooms(io);
if (rooms.get(socket.peerId)) {
debug && console.log(`${socket.id} joined room:${socket.peerId}, as:${socket.identity}, members:${rooms.get(socket.peerId).size}`);
}
if (socket.identity === IDENTITIES.agent) {
if (socket.handshake.query.agentInfo !== undefined) {
@ -357,8 +357,9 @@ module.exports = {
setInterval(async (io) => {
try {
let count = 0;
console.log(` ====== Rooms: ${io.sockets.adapter.rooms.size} ====== `);
const arr = Array.from(io.sockets.adapter.rooms);
const rooms = await getAvailableRooms(io);
console.log(` ====== Rooms: ${rooms.size} ====== `);
const arr = Array.from(rooms);
const filtered = arr.filter(room => !room[1].has(room[0]));
for (let i of filtered) {
let {projectKey, sessionId} = extractPeerId(i[0]);

View file

@ -1,3 +1,4 @@
const uWS = require("uWebSockets.js");
const helper = require('./helper');
let debug = process.env.debug === "1";
const getBodyFromUWSResponse = async function (res) {
@ -95,8 +96,36 @@ const extractPayloadFromRequest = async function (req, res) {
debug && console.log("payload/filters:" + JSON.stringify(filters))
return Object.keys(filters).length > 0 ? filters : undefined;
}
const getAvailableRooms = async function (io) {
if (process.env.redis === "true") {
return io.of('/').adapter.allRooms();
} else {
return helper.getAvailableRooms(io);
}
}
const getCompressionConfig = function () {
if (process.env.uws !== "true") {
return helper.getCompressionConfig();
} else {
// uWS: The theoretical overhead per socket is 32KB (8KB for compressor and for 24KB decompressor)
if (process.env.COMPRESSION === "true") {
console.log(`uWS compression: enabled`);
return {
compression: uWS.DEDICATED_COMPRESSOR_8KB,
decompression: uWS.DEDICATED_DECOMPRESSOR_1KB
};
} else {
console.log(`uWS compression: disabled`);
return {};
}
}
}
module.exports = {
extractProjectKeyFromRequest,
extractSessionIdFromRequest,
extractPayloadFromRequest
extractPayloadFromRequest,
getCompressionConfig,
getAvailableRooms
};

View file

@ -0,0 +1,7 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.12.0-ee';
ALTER TABLE experimental.events
MODIFY COLUMN issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21));
ALTER TABLE experimental.issues
MODIFY COLUMN type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21);

View file

@ -1,4 +1,4 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.11.0-ee';
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.12.0-ee';
CREATE DATABASE IF NOT EXISTS experimental;
CREATE TABLE IF NOT EXISTS experimental.autocomplete
@ -75,7 +75,7 @@ CREATE TABLE IF NOT EXISTS experimental.events
success Nullable(UInt8),
request_body Nullable(String),
response_body Nullable(String),
issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20)),
issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21)),
issue_id Nullable(String),
error_tags_keys Array(String),
error_tags_values Array(Nullable(String)),
@ -197,7 +197,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues
(
project_id UInt16,
issue_id String,
type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20),
type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21),
context_string String,
context_keys Array(String),
context_values Array(Nullable(String)),

View file

@ -0,0 +1,39 @@
DO
$$
DECLARE
previous_version CONSTANT text := 'v1.11.0-ee';
next_version CONSTANT text := 'v1.12.0-ee';
BEGIN
IF (SELECT openreplay_version()) = previous_version THEN
raise notice 'valid previous DB version';
ELSEIF (SELECT openreplay_version()) = next_version THEN
raise notice 'new version detected, nothing to do';
ELSE
RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version());
END IF;
END ;
$$
LANGUAGE plpgsql;
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.12.0-ee'
$$ LANGUAGE sql IMMUTABLE;
ALTER TYPE issue_type ADD VALUE IF NOT EXISTS 'app_crash';
CREATE TABLE IF NOT EXISTS public.projects_stats
(
project_id integer NOT NULL,
created_at timestamp default (now() AT TIME ZONE 'utc'::text),
sessions_count integer NOT NULL DEFAULT 0,
events_count bigint NOT NULL DEFAULT 0,
last_update_at timestamp default (now() AT TIME ZONE 'utc'::text),
primary key (project_id, created_at)
);
CREATE INDEX IF NOT EXISTS projects_stats_project_id_idx ON public.projects_stats (project_id);
COMMIT;

View file

@ -7,7 +7,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.11.0-ee'
SELECT 'v1.12.0-ee'
$$ LANGUAGE sql IMMUTABLE;
@ -131,7 +131,8 @@ $$
('users'),
('webhooks'),
('sessions_notes'),
('assist_records'))
('assist_records'),
('projects_stats'))
select bool_and(exists(select *
from information_schema.tables t
where table_schema = 'public'
@ -415,7 +416,8 @@ $$
'ml_slow_resources',
'custom',
'js_exception',
'mouse_thrashing'
'mouse_thrashing',
'app_crash'
);
END IF;
@ -862,6 +864,31 @@ $$
is_public boolean NOT NULL DEFAULT FALSE
);
CREATE TABLE IF NOT EXISTS public.assist_records
(
record_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL,
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE SET NULL,
created_at bigint NOT NULL DEFAULT (EXTRACT(EPOCH FROM now() at time zone 'utc') * 1000)::bigint,
deleted_at timestamp without time zone NULL DEFAULT NULL,
name text NOT NULL,
file_key text NOT NULL,
duration integer NOT NULL
);
CREATE TABLE IF NOT EXISTS public.projects_stats
(
project_id integer NOT NULL,
created_at timestamp default (now() AT TIME ZONE 'utc'::text),
sessions_count integer NOT NULL DEFAULT 0,
events_count bigint NOT NULL DEFAULT 0,
last_update_at timestamp default (now() AT TIME ZONE 'utc'::text),
primary key (project_id, created_at)
);
CREATE INDEX IF NOT EXISTS projects_stats_project_id_idx ON public.projects_stats (project_id);
RAISE NOTICE 'Created missing public schema tables';
END IF;
END;
@ -1212,18 +1239,6 @@ $$
CREATE INDEX IF NOT EXISTS requests_path_nn_gin_idx ON events_common.requests USING GIN (path gin_trgm_ops) WHERE path IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL;
CREATE TABLE IF NOT EXISTS assist_records
(
record_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL,
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE SET NULL,
created_at bigint NOT NULL DEFAULT (EXTRACT(EPOCH FROM now() at time zone 'utc') * 1000)::bigint,
deleted_at timestamp without time zone NULL DEFAULT NULL,
name text NOT NULL,
file_key text NOT NULL,
duration integer NOT NULL
);
END IF;
END;
$$

View file

@ -1,12 +1,12 @@
{
"name": "peers-server",
"version": "v1.11.0",
"version": "v1.12.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "peers-server",
"version": "v1.11.0",
"version": "v1.12.0",
"license": "Elastic License 2.0 (ELv2)",
"dependencies": {
"express": "^4.18.2",
@ -57,9 +57,9 @@
"integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA=="
},
"node_modules/@types/node": {
"version": "18.15.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.5.tgz",
"integrity": "sha512-Ark2WDjjZO7GmvsyFFf81MXuGTA/d6oP38anyxWOL6EREyBKAxKoFHwBhaZxCfLRLpO8JgVXwqOwSwa7jRcjew=="
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"node_modules/@types/qs": {
"version": "6.9.7",

View file

@ -1,6 +1,6 @@
{
"name": "peers-server",
"version": "v1.11.0",
"version": "v1.12.0",
"description": "assist server to get live sessions & sourcemaps reader to get stack trace",
"main": "peerjs-server.js",
"scripts": {

View file

@ -26,7 +26,7 @@ telemetry:
env:
ACTION: "TELEMETRY"
report:
# Ref: https://crontab.guru/#0_5_*_*_1
# https://crontab.guru/#0_5_*_*_1
# Monday morning 5am
cron: "0 5 * * 1"
image:
@ -47,6 +47,28 @@ sessionsCleaner:
tag: ""
env:
ACTION: "JOB"
projectsStats:
# https://crontab.guru/#*/18_*_*_*_*
# Every 18 min
cron: "*/18 * * * *"
image:
repository: "{{ .Values.global.openReplayContainerRegistry }}/crons"
pullPolicy: Always
# Overrides the image tag whose default is the chart appVersion.
tag: ""
env:
ACTION: "PROJECTS_STATS"
fixProjectsStats:
# https://crontab.guru/#0_5_*_*_0
# Sunday at 5am
cron: "0 5 * * 0"
image:
repository: "{{ .Values.global.openReplayContainerRegistry }}/crons"
pullPolicy: Always
# Overrides the image tag whose default is the chart appVersion.
tag: ""
env:
ACTION: "FIX_PROJECTS_STATS"
# Common env values are from chalice for the crons
chalice:

View file

@ -0,0 +1,39 @@
DO
$$
DECLARE
previous_version CONSTANT text := 'v1.11.0';
next_version CONSTANT text := 'v1.12.0';
BEGIN
IF (SELECT openreplay_version()) = previous_version THEN
raise notice 'valid previous DB version';
ELSEIF (SELECT openreplay_version()) = next_version THEN
raise notice 'new version detected, nothing to do';
ELSE
RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version());
END IF;
END ;
$$
LANGUAGE plpgsql;
BEGIN;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.12.0'
$$ LANGUAGE sql IMMUTABLE;
ALTER TYPE issue_type ADD VALUE IF NOT EXISTS 'app_crash';
CREATE TABLE IF NOT EXISTS public.projects_stats
(
project_id integer NOT NULL,
created_at timestamp default (now() AT TIME ZONE 'utc'::text),
sessions_count integer NOT NULL DEFAULT 0,
events_count bigint NOT NULL DEFAULT 0,
last_update_at timestamp default (now() AT TIME ZONE 'utc'::text),
primary key (project_id, created_at)
);
CREATE INDEX IF NOT EXISTS projects_stats_project_id_idx ON public.projects_stats (project_id);
COMMIT;

View file

@ -6,7 +6,7 @@ CREATE SCHEMA IF NOT EXISTS events;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
SELECT 'v1.11.0'
SELECT 'v1.12.0'
$$ LANGUAGE sql IMMUTABLE;
@ -317,7 +317,8 @@ $$
'ml_slow_resources',
'custom',
'js_exception',
'mouse_thrashing'
'mouse_thrashing',
'app_crash'
);
CREATE TABLE issues
@ -961,6 +962,18 @@ $$
is_public boolean NOT NULL DEFAULT FALSE
);
CREATE TABLE public.projects_stats
(
project_id integer NOT NULL,
created_at timestamp default (now() AT TIME ZONE 'utc'::text),
sessions_count integer NOT NULL DEFAULT 0,
events_count bigint NOT NULL DEFAULT 0,
last_update_at timestamp default (now() AT TIME ZONE 'utc'::text),
primary key (project_id, created_at)
);
CREATE INDEX IF NOT EXISTS projects_stats_project_id_idx ON public.projects_stats (project_id);
raise notice 'DB created';
END IF;
END;

View file

@ -1,15 +1,15 @@
{
"name": "sourcemaps-reader",
"version": "v1.11.0",
"version": "v1.12.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "sourcemaps-reader",
"version": "v1.11.0",
"version": "v1.12.0",
"license": "Elastic License 2.0 (ELv2)",
"dependencies": {
"aws-sdk": "^2.1314.0",
"aws-sdk": "^2.1367.0",
"express": "^4.18.2",
"source-map": "^0.7.4"
}
@ -43,9 +43,9 @@
}
},
"node_modules/aws-sdk": {
"version": "2.1333.0",
"resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1333.0.tgz",
"integrity": "sha512-MvOuleNeRryJtkCGXGEWDHPqqgxuqdi4/hGzJEpn9tnjsW9LNK8UgFPpYzUZ24ZO/3S+jiUh8DMMrL5nVGnagg==",
"version": "2.1367.0",
"resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1367.0.tgz",
"integrity": "sha512-ZlN3iXazEVPwjmQzC1TfkRUPOKruF6RkAFnVz4hOPjQQT91RYi2lCRWtipWk4ZoONBLX7gFLGUgIfiHjf/A+iA==",
"dependencies": {
"buffer": "4.9.2",
"events": "1.1.1",
@ -56,7 +56,7 @@
"url": "0.10.3",
"util": "^0.12.4",
"uuid": "8.0.0",
"xml2js": "0.4.19"
"xml2js": "0.5.0"
},
"engines": {
"node": ">= 10.0.0"
@ -843,18 +843,21 @@
}
},
"node_modules/xml2js": {
"version": "0.4.19",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz",
"integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==",
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz",
"integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==",
"dependencies": {
"sax": ">=0.6.0",
"xmlbuilder": "~9.0.1"
"xmlbuilder": "~11.0.0"
},
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/xmlbuilder": {
"version": "9.0.7",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz",
"integrity": "sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ==",
"version": "11.0.1",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
"integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==",
"engines": {
"node": ">=4.0"
}

View file

@ -1,6 +1,6 @@
{
"name": "sourcemaps-reader",
"version": "v1.11.0",
"version": "v1.12.0",
"description": "assist server to get live sessions & sourcemaps reader to get stack trace",
"main": "peerjs-server.js",
"scripts": {
@ -18,7 +18,7 @@
},
"homepage": "https://github.com/openreplay/openreplay#readme",
"dependencies": {
"aws-sdk": "^2.1314.0",
"aws-sdk": "^2.1367.0",
"express": "^4.18.2",
"source-map": "^0.7.4"
}