Merge remote-tracking branch 'origin/api-v1.8.2' into dev
# Conflicts: # ee/scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql
This commit is contained in:
commit
e5f9f5ebd6
69 changed files with 2779 additions and 700 deletions
|
|
@ -1,28 +1,22 @@
|
|||
FROM python:3.10-alpine
|
||||
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
|
||||
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
|
||||
RUN apk add --no-cache build-base nodejs npm tini
|
||||
RUN apk add --no-cache build-base tini
|
||||
ARG envarg
|
||||
# Add Tini
|
||||
# Startup daemon
|
||||
ENV SOURCE_MAP_VERSION=0.7.4 \
|
||||
APP_NAME=chalice \
|
||||
LISTEN_PORT=8000 \
|
||||
MAPPING_WASM=/work/sourcemap-reader/mappings.wasm \
|
||||
ENTERPRISE_BUILD=${envarg}
|
||||
|
||||
ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm /mappings.wasm
|
||||
|
||||
WORKDIR /work_tmp
|
||||
COPY requirements.txt /work_tmp/requirements.txt
|
||||
RUN pip install --no-cache-dir --upgrade -r /work_tmp/requirements.txt
|
||||
COPY sourcemap-reader/*.json /work_tmp/
|
||||
RUN cd /work_tmp && npm install
|
||||
|
||||
WORKDIR /work
|
||||
COPY . .
|
||||
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. \
|
||||
&& mv /mappings.wasm ${MAPPING_WASM} && chmod 644 ${MAPPING_WASM}
|
||||
RUN mv env.default .env
|
||||
|
||||
RUN adduser -u 1001 openreplay -D
|
||||
USER 1001
|
||||
|
|
|
|||
64
api/app.py
64
api/app.py
|
|
@ -20,22 +20,14 @@ app.add_middleware(GZipMiddleware, minimum_size=1000)
|
|||
|
||||
@app.middleware('http')
|
||||
async def or_middleware(request: Request, call_next):
|
||||
global OR_SESSION_TOKEN
|
||||
OR_SESSION_TOKEN = request.headers.get('vnd.openreplay.com.sid', request.headers.get('vnd.asayer.io.sid'))
|
||||
|
||||
try:
|
||||
if helper.TRACK_TIME:
|
||||
import time
|
||||
now = int(time.time() * 1000)
|
||||
response: StreamingResponse = await call_next(request)
|
||||
if helper.TRACK_TIME:
|
||||
now = int(time.time() * 1000) - now
|
||||
if now > 500:
|
||||
print(f"Execution time: {now} ms")
|
||||
except Exception as e:
|
||||
pg_client.close()
|
||||
raise e
|
||||
pg_client.close()
|
||||
if helper.TRACK_TIME:
|
||||
import time
|
||||
now = int(time.time() * 1000)
|
||||
response: StreamingResponse = await call_next(request)
|
||||
if helper.TRACK_TIME:
|
||||
now = int(time.time() * 1000) - now
|
||||
if now > 500:
|
||||
logging.info(f"Execution time: {now} ms")
|
||||
return response
|
||||
|
||||
|
||||
|
|
@ -61,14 +53,38 @@ app.include_router(metrics.app)
|
|||
app.include_router(insights.app)
|
||||
app.include_router(v1_api.app_apikey)
|
||||
|
||||
Schedule = AsyncIOScheduler()
|
||||
Schedule.start()
|
||||
loglevel = config("LOGLEVEL", default=logging.INFO)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
logging.basicConfig(level=loglevel)
|
||||
ap_logger = logging.getLogger('apscheduler')
|
||||
ap_logger.setLevel(loglevel)
|
||||
app.schedule = AsyncIOScheduler()
|
||||
|
||||
for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs:
|
||||
Schedule.add_job(id=job["func"].__name__, **job)
|
||||
|
||||
for job in Schedule.get_jobs():
|
||||
print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
|
||||
@app.on_event("startup")
|
||||
async def startup():
|
||||
logging.info(">>>>> starting up <<<<<")
|
||||
await pg_client.init()
|
||||
app.schedule.start()
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO))
|
||||
for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs:
|
||||
app.schedule.add_job(id=job["func"].__name__, **job)
|
||||
|
||||
ap_logger.info(">Scheduled jobs:")
|
||||
for job in app.schedule.get_jobs():
|
||||
ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
|
||||
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown():
|
||||
logging.info(">>>>> shutting down <<<<<")
|
||||
app.schedule.shutdown(wait=False)
|
||||
await pg_client.terminate()
|
||||
|
||||
|
||||
@app.get('/private/shutdown', tags=["private"])
|
||||
async def stop_server():
|
||||
logging.info("Requested shutdown")
|
||||
await shutdown()
|
||||
import os, signal
|
||||
os.kill(1, signal.SIGTERM)
|
||||
|
|
|
|||
|
|
@ -3,11 +3,12 @@ import logging
|
|||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from decouple import config
|
||||
from fastapi import FastAPI
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
from chalicelib.core import alerts_processor
|
||||
|
||||
app = FastAPI(root_path="/alerts", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""))
|
||||
print("============= ALERTS =============")
|
||||
logging.info("============= ALERTS =============")
|
||||
|
||||
|
||||
@app.get("/")
|
||||
|
|
@ -16,12 +17,39 @@ async def root():
|
|||
|
||||
|
||||
app.schedule = AsyncIOScheduler()
|
||||
app.schedule.start()
|
||||
app.schedule.add_job(id="alerts_processor", **{"func": alerts_processor.process, "trigger": "interval",
|
||||
"minutes": config("ALERTS_INTERVAL", cast=int, default=5),
|
||||
"misfire_grace_time": 20})
|
||||
for job in app.schedule.get_jobs():
|
||||
print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO))
|
||||
loglevel = config("LOGLEVEL", default=logging.INFO)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
logging.basicConfig(level=loglevel)
|
||||
ap_logger = logging.getLogger('apscheduler')
|
||||
ap_logger.setLevel(loglevel)
|
||||
app.schedule = AsyncIOScheduler()
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup():
|
||||
logging.info(">>>>> starting up <<<<<")
|
||||
await pg_client.init()
|
||||
app.schedule.start()
|
||||
app.schedule.add_job(id="alerts_processor", **{"func": alerts_processor.process, "trigger": "interval",
|
||||
"minutes": config("ALERTS_INTERVAL", cast=int, default=5),
|
||||
"misfire_grace_time": 20})
|
||||
|
||||
ap_logger.info(">Scheduled jobs:")
|
||||
for job in app.schedule.get_jobs():
|
||||
ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
|
||||
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown():
|
||||
logging.info(">>>>> shutting down <<<<<")
|
||||
app.schedule.shutdown(wait=False)
|
||||
await pg_client.terminate()
|
||||
|
||||
|
||||
@app.get('/private/shutdown', tags=["private"])
|
||||
async def stop_server():
|
||||
logging.info("Requested shutdown")
|
||||
await shutdown()
|
||||
import os, signal
|
||||
os.kill(1, signal.SIGTERM)
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@ class ProjectAuthorizer:
|
|||
current_user: schemas.CurrentContext = await OR_context(request)
|
||||
value = request.path_params[self.project_identifier]
|
||||
if (self.project_identifier == "projectId" \
|
||||
and not (isinstance(value, int) or isinstance(value, str) and value.isnumeric())
|
||||
and projects.get_project(project_id=value, tenant_id=current_user.tenant_id) is None) \
|
||||
and (not (isinstance(value, int) or isinstance(value, str) and value.isnumeric())
|
||||
or projects.get_project(project_id=value, tenant_id=current_user.tenant_id) is None)) \
|
||||
or (self.project_identifier == "projectKey" \
|
||||
and projects.get_internal_project_id(project_key=value) is None):
|
||||
print("project not found")
|
||||
|
|
|
|||
|
|
@ -20,8 +20,6 @@ check_prereq() {
|
|||
function build_api(){
|
||||
cp -R ../api ../_api
|
||||
cd ../_api
|
||||
cp -R ../utilities/utils ../sourcemap-reader/.
|
||||
cp -R ../sourcemap-reader .
|
||||
tag=""
|
||||
# Copy enterprise code
|
||||
[[ $1 == "ee" ]] && {
|
||||
|
|
|
|||
|
|
@ -61,10 +61,10 @@ def __get_live_sessions_ws(project_id, data):
|
|||
return {"total": 0, "sessions": []}
|
||||
live_peers = results.json().get("data", [])
|
||||
except requests.exceptions.Timeout:
|
||||
print("Timeout getting Assist response")
|
||||
print("!! Timeout getting Assist response")
|
||||
live_peers = {"total": 0, "sessions": []}
|
||||
except Exception as e:
|
||||
print("issue getting Live-Assist response")
|
||||
print("!! Issue getting Live-Assist response")
|
||||
print(str(e))
|
||||
print("expected JSON, received:")
|
||||
try:
|
||||
|
|
@ -89,11 +89,11 @@ def __get_agent_token(project_id, project_key, session_id):
|
|||
"projectId": project_id,
|
||||
"sessionId": session_id,
|
||||
"iat": iat // 1000,
|
||||
"exp": iat // 1000 + config("JWT_EXP_DELTA_SECONDS", cast=int) + TimeUTC.get_utc_offset() // 1000,
|
||||
"exp": iat // 1000 + config("ASSIST_JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000,
|
||||
"iss": config("JWT_ISSUER"),
|
||||
"aud": f"openreplay:agent"
|
||||
},
|
||||
key=config("jwt_secret"),
|
||||
key=config("ASSIST_JWT_SECRET"),
|
||||
algorithm=config("jwt_algorithm")
|
||||
)
|
||||
|
||||
|
|
@ -116,7 +116,7 @@ def get_live_session_by_id(project_id, session_id):
|
|||
print("!! Timeout getting Assist response")
|
||||
return None
|
||||
except Exception as e:
|
||||
print("issue getting Assist response")
|
||||
print("!! Issue getting Assist response")
|
||||
print(str(e))
|
||||
print("expected JSON, received:")
|
||||
try:
|
||||
|
|
@ -139,10 +139,10 @@ def is_live(project_id, session_id, project_key=None):
|
|||
return False
|
||||
results = results.json().get("data")
|
||||
except requests.exceptions.Timeout:
|
||||
print("Timeout getting Assist response")
|
||||
print("!! Timeout getting Assist response")
|
||||
return False
|
||||
except Exception as e:
|
||||
print("issue getting Assist response")
|
||||
print("!! Issue getting Assist response")
|
||||
print(str(e))
|
||||
print("expected JSON, received:")
|
||||
try:
|
||||
|
|
@ -168,10 +168,10 @@ def autocomplete(project_id, q: str, key: str = None):
|
|||
return {"errors": [f"Something went wrong wile calling assist:{results.text}"]}
|
||||
results = results.json().get("data", [])
|
||||
except requests.exceptions.Timeout:
|
||||
print("Timeout getting Assist response")
|
||||
print("!! Timeout getting Assist response")
|
||||
return {"errors": ["Assist request timeout"]}
|
||||
except Exception as e:
|
||||
print("issue getting Assist response")
|
||||
print("!! Issue getting Assist response")
|
||||
print(str(e))
|
||||
print("expected JSON, received:")
|
||||
try:
|
||||
|
|
@ -250,7 +250,7 @@ def session_exists(project_id, session_id):
|
|||
print("!! Timeout getting Assist response")
|
||||
return False
|
||||
except Exception as e:
|
||||
print("issue getting Assist response")
|
||||
print("!! Issue getting Assist response")
|
||||
print(str(e))
|
||||
print("expected JSON, received:")
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ def jwt_authorizer(token):
|
|||
token[1],
|
||||
config("jwt_secret"),
|
||||
algorithms=config("jwt_algorithm"),
|
||||
audience=[f"plugin:{helper.get_stage_name()}", f"front:{helper.get_stage_name()}"]
|
||||
audience=[ f"front:{helper.get_stage_name()}"]
|
||||
)
|
||||
except jwt.ExpiredSignatureError:
|
||||
print("! JWT Expired signature")
|
||||
|
|
@ -42,7 +42,7 @@ def generate_jwt(id, tenant_id, iat, aud):
|
|||
payload={
|
||||
"userId": id,
|
||||
"tenantId": tenant_id,
|
||||
"exp": iat // 1000 + config("ASSIST_JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000,
|
||||
"exp": iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000,
|
||||
"iss": config("JWT_ISSUER"),
|
||||
"iat": iat // 1000,
|
||||
"aud": aud
|
||||
|
|
|
|||
|
|
@ -35,24 +35,57 @@ class Slack:
|
|||
return True
|
||||
|
||||
@classmethod
|
||||
def send_text(cls, tenant_id, webhook_id, text, **args):
|
||||
def send_text_attachments(cls, tenant_id, webhook_id, text, **args):
|
||||
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
|
||||
if integration is None:
|
||||
return {"errors": ["slack integration not found"]}
|
||||
print("====> sending slack notification")
|
||||
r = requests.post(
|
||||
url=integration["endpoint"],
|
||||
json={
|
||||
"attachments": [
|
||||
{
|
||||
"text": text,
|
||||
"ts": datetime.now().timestamp(),
|
||||
**args
|
||||
}
|
||||
]
|
||||
})
|
||||
print(r)
|
||||
print(r.text)
|
||||
try:
|
||||
r = requests.post(
|
||||
url=integration["endpoint"],
|
||||
json={
|
||||
"attachments": [
|
||||
{
|
||||
"text": text,
|
||||
"ts": datetime.now().timestamp(),
|
||||
**args
|
||||
}
|
||||
]
|
||||
},
|
||||
timeout=5)
|
||||
if r.status_code != 200:
|
||||
print(f"!! issue sending slack text attachments; webhookId:{webhook_id} code:{r.status_code}")
|
||||
print(r.text)
|
||||
return None
|
||||
except requests.exceptions.Timeout:
|
||||
print(f"!! Timeout sending slack text attachments webhookId:{webhook_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"!! Issue sending slack text attachments webhookId:{webhook_id}")
|
||||
print(str(e))
|
||||
return None
|
||||
return {"data": r.text}
|
||||
|
||||
@classmethod
|
||||
def send_raw(cls, tenant_id, webhook_id, body):
|
||||
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
|
||||
if integration is None:
|
||||
return {"errors": ["slack integration not found"]}
|
||||
try:
|
||||
r = requests.post(
|
||||
url=integration["endpoint"],
|
||||
json=body,
|
||||
timeout=5)
|
||||
if r.status_code != 200:
|
||||
print(f"!! issue sending slack raw; webhookId:{webhook_id} code:{r.status_code}")
|
||||
print(r.text)
|
||||
return None
|
||||
except requests.exceptions.Timeout:
|
||||
print(f"!! Timeout sending slack raw webhookId:{webhook_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"!! Issue sending slack raw webhookId:{webhook_id}")
|
||||
print(str(e))
|
||||
return None
|
||||
return {"data": r.text}
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -83,6 +83,19 @@ def __process_tags(row):
|
|||
def get_details(project_id, error_id, user_id, **data):
|
||||
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
|
||||
pg_sub_query24.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False,
|
||||
startTime_arg_name="startDate30",
|
||||
endTime_arg_name="endDate30", project_key="sessions.project_id")
|
||||
pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s")
|
||||
pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s")
|
||||
pg_sub_query30_session.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30",
|
||||
endTime_arg_name="endDate30", project_key="errors.project_id")
|
||||
pg_sub_query30_err.append("sessions.project_id = %(project_id)s")
|
||||
pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s")
|
||||
pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s")
|
||||
pg_sub_query30_err.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_err.append("source ='js_exception'")
|
||||
pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30")
|
||||
pg_sub_query30.append("error_id = %(error_id)s")
|
||||
pg_basic_query = __get_basic_constraints(time_constraint=False)
|
||||
|
|
@ -121,50 +134,49 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
device_partition,
|
||||
country_partition,
|
||||
chart24,
|
||||
chart30
|
||||
chart30,
|
||||
custom_tags
|
||||
FROM (SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
COUNT(DISTINCT user_uuid) AS users,
|
||||
COUNT(DISTINCT user_id) AS users,
|
||||
COUNT(DISTINCT session_id) AS sessions
|
||||
FROM public.errors
|
||||
INNER JOIN events.errors AS s_errors USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE error_id = %(error_id)s
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY error_id, name, message) AS details
|
||||
INNER JOIN (SELECT error_id,
|
||||
MAX(timestamp) AS last_occurrence,
|
||||
INNER JOIN (SELECT MAX(timestamp) AS last_occurrence,
|
||||
MIN(timestamp) AS first_occurrence
|
||||
FROM events.errors
|
||||
WHERE error_id = %(error_id)s
|
||||
GROUP BY error_id) AS time_details USING (error_id)
|
||||
INNER JOIN (SELECT error_id,
|
||||
session_id AS last_session_id,
|
||||
user_os,
|
||||
user_os_version,
|
||||
user_browser,
|
||||
user_browser_version,
|
||||
user_device,
|
||||
user_device_type,
|
||||
user_uuid
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
GROUP BY error_id) AS time_details ON (TRUE)
|
||||
INNER JOIN (SELECT session_id AS last_session_id,
|
||||
coalesce(custom_tags, '[]')::jsonb AS custom_tags
|
||||
FROM events.errors
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags
|
||||
FROM errors_tags
|
||||
WHERE errors_tags.error_id = %(error_id)s
|
||||
AND errors_tags.session_id = errors.session_id
|
||||
AND errors_tags.message_id = errors.message_id) AS errors_tags ON (TRUE)
|
||||
WHERE error_id = %(error_id)s
|
||||
ORDER BY errors.timestamp DESC
|
||||
LIMIT 1) AS last_session_details USING (error_id)
|
||||
LIMIT 1) AS last_session_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_browser AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors
|
||||
INNER JOIN sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_browser
|
||||
ORDER BY count DESC) AS count_per_browser_query
|
||||
INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition
|
||||
FROM (SELECT user_browser_version AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
AND sessions.user_browser = count_per_browser_query.name
|
||||
GROUP BY user_browser_version
|
||||
ORDER BY count DESC) AS version_details
|
||||
|
|
@ -174,13 +186,13 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
FROM (SELECT user_os AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_os
|
||||
ORDER BY count DESC) AS count_per_os_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
|
||||
FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
AND sessions.user_os = count_per_os_details.name
|
||||
GROUP BY user_os_version
|
||||
ORDER BY count DESC) AS count_per_version_details
|
||||
|
|
@ -191,7 +203,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
FROM (SELECT user_device_type AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_device_type
|
||||
ORDER BY count DESC) AS count_per_device_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition
|
||||
|
|
@ -201,7 +213,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
ELSE user_device END AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
AND sessions.user_device_type = count_per_device_details.name
|
||||
GROUP BY user_device
|
||||
ORDER BY count DESC) AS count_per_device_v_details
|
||||
|
|
@ -211,7 +223,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
FROM (SELECT user_country AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_country
|
||||
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from typing import List
|
|||
import schemas
|
||||
from chalicelib.core import events, metadata, events_ios, \
|
||||
sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \
|
||||
sessions_devtool
|
||||
sessions_devtool, sessions_notes
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
|
||||
SESSION_PROJECTION_COLS = """s.project_id,
|
||||
|
|
@ -40,8 +40,8 @@ def __group_metadata(session, project_metadata):
|
|||
return meta
|
||||
|
||||
|
||||
def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False,
|
||||
live=True):
|
||||
def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False,
|
||||
group_metadata=False, live=True):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
extra_query = []
|
||||
if include_fav_viewed:
|
||||
|
|
@ -64,7 +64,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
|
|||
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
|
||||
WHERE s.project_id = %(project_id)s
|
||||
AND s.session_id = %(session_id)s;""",
|
||||
{"project_id": project_id, "session_id": session_id, "userId": user_id}
|
||||
{"project_id": project_id, "session_id": session_id, "userId": context.user_id}
|
||||
)
|
||||
# print("===============")
|
||||
# print(query)
|
||||
|
|
@ -95,14 +95,16 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
|
|||
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
|
||||
session_id=session_id)
|
||||
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)
|
||||
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id)
|
||||
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id)
|
||||
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
|
||||
start_ts=data["startTs"], duration=data["duration"])
|
||||
|
||||
data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id,
|
||||
session_id=session_id, user_id=context.user_id)
|
||||
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
||||
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
|
||||
data['live'] = live and assist.is_live(project_id=project_id,
|
||||
session_id=session_id,
|
||||
data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id,
|
||||
project_key=data["projectKey"])
|
||||
data["inDB"] = True
|
||||
return data
|
||||
|
|
|
|||
|
|
@ -1,37 +1,39 @@
|
|||
import schemas
|
||||
from chalicelib.core import sessions
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
|
||||
def add_favorite_session(project_id, user_id, session_id):
|
||||
def add_favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
INSERT INTO public.user_favorite_sessions(user_id, session_id)
|
||||
VALUES (%(userId)s,%(session_id)s);""",
|
||||
{"userId": user_id, "session_id": session_id})
|
||||
{"userId": context.user_id, "session_id": session_id})
|
||||
)
|
||||
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
|
||||
include_fav_viewed=True)
|
||||
return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id,
|
||||
full_data=False, include_fav_viewed=True)
|
||||
|
||||
|
||||
def remove_favorite_session(project_id, user_id, session_id):
|
||||
def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
DELETE FROM public.user_favorite_sessions
|
||||
WHERE user_id = %(userId)s
|
||||
AND session_id = %(session_id)s;""",
|
||||
{"userId": user_id, "session_id": session_id})
|
||||
{"userId": context.user_id, "session_id": session_id})
|
||||
)
|
||||
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
|
||||
include_fav_viewed=True)
|
||||
return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id,
|
||||
full_data=False, include_fav_viewed=True)
|
||||
|
||||
|
||||
def favorite_session(project_id, user_id, session_id):
|
||||
if favorite_session_exists(user_id=user_id, session_id=session_id):
|
||||
return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
|
||||
def favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
if favorite_session_exists(user_id=context.user_id, session_id=session_id):
|
||||
return remove_favorite_session(context=context, project_id=project_id,
|
||||
session_id=session_id)
|
||||
|
||||
return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
|
||||
return add_favorite_session(context=context, project_id=project_id, session_id=session_id)
|
||||
|
||||
|
||||
def favorite_session_exists(user_id, session_id):
|
||||
|
|
@ -61,4 +63,4 @@ def get_start_end_timestamp(project_id, user_id):
|
|||
{"userId": user_id, "project_id": project_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return (0, 0) if r is None else (r["max_start_ts"], r["min_start_ts"])
|
||||
return (0, 0) if r is None else (r["min_start_ts"], r["max_start_ts"])
|
||||
|
|
|
|||
|
|
@ -26,6 +26,26 @@ def get_urls(project_id, session_id):
|
|||
return results
|
||||
|
||||
|
||||
def get_urls_depercated(session_id):
|
||||
return [
|
||||
client.generate_presigned_url(
|
||||
'get_object',
|
||||
Params={
|
||||
'Bucket': config("sessions_bucket"),
|
||||
'Key': str(session_id)
|
||||
},
|
||||
ExpiresIn=100000
|
||||
),
|
||||
client.generate_presigned_url(
|
||||
'get_object',
|
||||
Params={
|
||||
'Bucket': config("sessions_bucket"),
|
||||
'Key': str(session_id) + "e"
|
||||
},
|
||||
ExpiresIn=100000
|
||||
)]
|
||||
|
||||
|
||||
def get_ios(session_id):
|
||||
return client.generate_presigned_url(
|
||||
'get_object',
|
||||
|
|
|
|||
166
api/chalicelib/core/sessions_notes.py
Normal file
166
api/chalicelib/core/sessions_notes.py
Normal file
|
|
@ -0,0 +1,166 @@
|
|||
from urllib.parse import urljoin
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sessions
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def get_note(tenant_id, project_id, user_id, note_id, share=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS creator_name
|
||||
{",(SELECT name FROM users WHERE user_id=%(share)s AND deleted_at ISNULL) AS share_name" if share else ""}
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.note_id = %(note_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND (sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public);""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
|
||||
"note_id": note_id, "share": share})
|
||||
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
row = helper.dict_to_camel_case(row)
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return row
|
||||
|
||||
|
||||
def get_session_notes(tenant_id, project_id, session_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*
|
||||
FROM sessions_notes
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND sessions_notes.session_id = %(session_id)s
|
||||
AND (sessions_notes.user_id = %(user_id)s
|
||||
OR sessions_notes.is_public)
|
||||
ORDER BY created_at DESC;""",
|
||||
{"project_id": project_id, "user_id": user_id,
|
||||
"tenant_id": tenant_id, "session_id": session_id})
|
||||
|
||||
cur.execute(query=query)
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return rows
|
||||
|
||||
|
||||
def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL"]
|
||||
extra_params = {}
|
||||
if data.tags and len(data.tags) > 0:
|
||||
k = "tag_value"
|
||||
conditions.append(
|
||||
sessions._multiple_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k))
|
||||
extra_params = sessions._multiple_values(data.tags, value_key=k)
|
||||
if data.shared_only:
|
||||
conditions.append("sessions_notes.is_public")
|
||||
elif data.mine_only:
|
||||
conditions.append("sessions_notes.user_id = %(user_id)s")
|
||||
else:
|
||||
conditions.append("(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)")
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*
|
||||
FROM sessions_notes
|
||||
WHERE {" AND ".join(conditions)}
|
||||
ORDER BY created_at {data.order}
|
||||
LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params})
|
||||
|
||||
cur.execute(query=query)
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return rows
|
||||
|
||||
|
||||
def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public)
|
||||
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s)
|
||||
RETURNING *;""",
|
||||
{"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()})
|
||||
cur.execute(query)
|
||||
result = helper.dict_to_camel_case(cur.fetchone())
|
||||
if result:
|
||||
result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"])
|
||||
return result
|
||||
|
||||
|
||||
def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema):
|
||||
sub_query = []
|
||||
if data.message is not None:
|
||||
sub_query.append("message = %(message)s")
|
||||
if data.tag is not None and len(data.tag) > 0:
|
||||
sub_query.append("tag = %(tag)s")
|
||||
if data.is_public is not None:
|
||||
sub_query.append("is_public = %(is_public)s")
|
||||
if data.timestamp is not None:
|
||||
sub_query.append("timestamp = %(timestamp)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""UPDATE public.sessions_notes
|
||||
SET
|
||||
{" ,".join(sub_query)}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(user_id)s
|
||||
AND note_id = %(note_id)s
|
||||
AND deleted_at ISNULL
|
||||
RETURNING *;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict()})
|
||||
)
|
||||
row = helper.dict_to_camel_case(cur.fetchone())
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return row
|
||||
|
||||
|
||||
def delete(tenant_id, user_id, project_id, note_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" UPDATE public.sessions_notes
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE note_id = %(note_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND user_id = %(user_id)s
|
||||
AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id})
|
||||
)
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
|
||||
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
|
||||
if note is None:
|
||||
return {"errors": ["Note not found"]}
|
||||
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/sessions/{note['sessionId']}")
|
||||
title = f"<{session_url}|Note for session {note['sessionId']}>"
|
||||
|
||||
blocks = [{"type": "section",
|
||||
"fields": [{"type": "mrkdwn",
|
||||
"text": title}]},
|
||||
{"type": "section",
|
||||
"fields": [{"type": "plain_text",
|
||||
"text": note["message"]}]}]
|
||||
if note["tag"]:
|
||||
blocks.append({"type": "context",
|
||||
"elements": [{"type": "plain_text",
|
||||
"text": f"Tag: *{note['tag']}*"}]})
|
||||
bottom = f"Created by {note['creatorName'].capitalize()}"
|
||||
if user_id != note["userId"]:
|
||||
bottom += f"\nSent by {note['shareName']}: "
|
||||
blocks.append({"type": "context",
|
||||
"elements": [{"type": "plain_text",
|
||||
"text": bottom}]})
|
||||
return Slack.send_raw(
|
||||
tenant_id=tenant_id,
|
||||
webhook_id=webhook_id,
|
||||
body={"blocks": blocks}
|
||||
)
|
||||
|
|
@ -1,7 +1,5 @@
|
|||
import json
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import users, telemetry, tenants
|
||||
from chalicelib.utils import captcha
|
||||
|
|
@ -20,55 +18,41 @@ def create_step1(data: schemas.UserSignupSchema):
|
|||
print(f"=====================> {email}")
|
||||
password = data.password
|
||||
|
||||
print("Verifying email validity")
|
||||
if email is None or len(email) < 5 or not helper.is_valid_email(email):
|
||||
if email is None or len(email) < 5:
|
||||
errors.append("Invalid email address.")
|
||||
else:
|
||||
print("Verifying email existance")
|
||||
if users.email_exists(email):
|
||||
errors.append("Email address already in use.")
|
||||
if users.get_deleted_user_by_email(email) is not None:
|
||||
errors.append("Email address previously deleted.")
|
||||
|
||||
print("Verifying captcha")
|
||||
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
|
||||
errors.append("Invalid captcha.")
|
||||
|
||||
print("Verifying password validity")
|
||||
if len(password) < 6:
|
||||
errors.append("Password is too short, it must be at least 6 characters long.")
|
||||
|
||||
print("Verifying fullname validity")
|
||||
fullname = data.fullname
|
||||
if fullname is None or len(fullname) < 1 or not helper.is_alphabet_space_dash(fullname):
|
||||
errors.append("Invalid full name.")
|
||||
|
||||
print("Verifying company's name validity")
|
||||
company_name = data.organizationName
|
||||
if company_name is None or len(company_name) < 1:
|
||||
errors.append("invalid organization's name")
|
||||
|
||||
print("Verifying project's name validity")
|
||||
project_name = data.projectName
|
||||
if project_name is None or len(project_name) < 1:
|
||||
project_name = "my first project"
|
||||
organization_name = data.organizationName
|
||||
if organization_name is None or len(organization_name) < 1:
|
||||
errors.append("Invalid organization name.")
|
||||
|
||||
if len(errors) > 0:
|
||||
print("==> error")
|
||||
print(f"==> error for email:{data.email}, fullname:{data.fullname}, organizationName:{data.organizationName}")
|
||||
print(errors)
|
||||
return {"errors": errors}
|
||||
print("No errors detected")
|
||||
|
||||
project_name = "my first project"
|
||||
params = {
|
||||
"email": email, "password": password,
|
||||
"fullname": fullname,
|
||||
"projectName": project_name,
|
||||
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}),
|
||||
"organizationName": company_name
|
||||
"email": email, "password": password, "fullname": fullname, "projectName": project_name,
|
||||
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), "organizationName": organization_name
|
||||
}
|
||||
query = f"""\
|
||||
WITH t AS (
|
||||
INSERT INTO public.tenants (name, version_number)
|
||||
VALUES (%(organizationName)s, (SELECT openreplay_version()))
|
||||
query = f"""WITH t AS (
|
||||
INSERT INTO public.tenants (name)
|
||||
VALUES (%(organizationName)s)
|
||||
RETURNING api_key
|
||||
),
|
||||
u AS (
|
||||
|
|
@ -106,7 +90,7 @@ def create_step1(data: schemas.UserSignupSchema):
|
|||
}
|
||||
c = {
|
||||
"tenantId": 1,
|
||||
"name": company_name,
|
||||
"name": organization_name,
|
||||
"apiKey": api_key,
|
||||
"remainingTrial": 14,
|
||||
"trialEnded": False,
|
||||
|
|
|
|||
|
|
@ -4,17 +4,6 @@ from decouple import config
|
|||
from chalicelib.core.collaboration_slack import Slack
|
||||
|
||||
|
||||
def send(notification, destination):
|
||||
if notification is None:
|
||||
return
|
||||
return Slack.send_text(tenant_id=notification["tenantId"],
|
||||
webhook_id=destination,
|
||||
text=notification["description"] \
|
||||
+ f"\n<{config('SITE_URL')}{notification['buttonUrl']}|{notification['buttonText']}>",
|
||||
title=notification["title"],
|
||||
title_link=notification["buttonUrl"], )
|
||||
|
||||
|
||||
def send_batch(notifications_list):
|
||||
if notifications_list is None or len(notifications_list) == 0:
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
from decouple import config
|
||||
from chalicelib.utils import helper
|
||||
|
||||
from chalicelib.utils import s3
|
||||
import hashlib
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.core import sourcemaps_parser
|
||||
from chalicelib.utils import s3
|
||||
|
||||
|
||||
def __get_key(project_id, url):
|
||||
|
|
@ -73,6 +73,11 @@ def format_payload(p, truncate_to_first=False):
|
|||
return []
|
||||
|
||||
|
||||
def url_exists(url):
|
||||
r = requests.head(url, allow_redirects=False)
|
||||
return r.status_code == 200 and r.headers.get("Content-Type") != "text/html"
|
||||
|
||||
|
||||
def get_traces_group(project_id, payload):
|
||||
frames = format_payload(payload)
|
||||
|
||||
|
|
@ -80,25 +85,45 @@ def get_traces_group(project_id, payload):
|
|||
payloads = {}
|
||||
all_exists = True
|
||||
for i, u in enumerate(frames):
|
||||
key = __get_key(project_id, u["absPath"]) # use filename instead?
|
||||
file_exists_in_bucket = False
|
||||
file_exists_in_server = False
|
||||
file_url = u["absPath"]
|
||||
key = __get_key(project_id, file_url) # use filename instead?
|
||||
params_idx = file_url.find("?")
|
||||
if file_url and len(file_url) > 0 \
|
||||
and not (file_url[:params_idx] if params_idx > -1 else file_url).endswith(".js"):
|
||||
print(f"{u['absPath']} sourcemap is not a JS file")
|
||||
payloads[key] = None
|
||||
continue
|
||||
|
||||
if key not in payloads:
|
||||
file_exists = s3.exists(config('sourcemaps_bucket'), key)
|
||||
all_exists = all_exists and file_exists
|
||||
if not file_exists:
|
||||
print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3")
|
||||
file_exists_in_bucket = s3.exists(config('sourcemaps_bucket'), key)
|
||||
if not file_exists_in_bucket:
|
||||
print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3 looking in server")
|
||||
if not file_url.endswith(".map"):
|
||||
file_url += '.map'
|
||||
file_exists_in_server = url_exists(file_url)
|
||||
file_exists_in_bucket = file_exists_in_server
|
||||
all_exists = all_exists and file_exists_in_bucket
|
||||
if not file_exists_in_bucket and not file_exists_in_server:
|
||||
print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3 nor server")
|
||||
payloads[key] = None
|
||||
else:
|
||||
payloads[key] = []
|
||||
results[i] = dict(u)
|
||||
results[i]["frame"] = dict(u)
|
||||
if payloads[key] is not None:
|
||||
payloads[key].append({"resultIndex": i,
|
||||
payloads[key].append({"resultIndex": i, "frame": dict(u), "URL": file_url,
|
||||
"position": {"line": u["lineNo"], "column": u["colNo"]},
|
||||
"frame": dict(u)})
|
||||
"isURL": file_exists_in_server})
|
||||
|
||||
for key in payloads.keys():
|
||||
if payloads[key] is None:
|
||||
continue
|
||||
key_results = sourcemaps_parser.get_original_trace(key=key, positions=[o["position"] for o in payloads[key]])
|
||||
key_results = sourcemaps_parser.get_original_trace(
|
||||
key=payloads[key][0]["URL"] if payloads[key][0]["isURL"] else key,
|
||||
positions=[o["position"] for o in payloads[key]],
|
||||
is_url=payloads[key][0]["isURL"])
|
||||
if key_results is None:
|
||||
all_exists = False
|
||||
continue
|
||||
|
|
@ -123,16 +148,17 @@ MAX_COLUMN_OFFSET = 60
|
|||
def fetch_missed_contexts(frames):
|
||||
source_cache = {}
|
||||
for i in range(len(frames)):
|
||||
if len(frames[i]["context"]) != 0:
|
||||
if len(frames[i]["context"]) > 0:
|
||||
continue
|
||||
if frames[i]["frame"]["absPath"] in source_cache:
|
||||
file = source_cache[frames[i]["frame"]["absPath"]]
|
||||
file_abs_path = frames[i]["frame"]["absPath"]
|
||||
if file_abs_path in source_cache:
|
||||
file = source_cache[file_abs_path]
|
||||
else:
|
||||
file = s3.get_file(config('js_cache_bucket'), get_js_cache_path(frames[i]["frame"]["absPath"]))
|
||||
file_path = get_js_cache_path(file_abs_path)
|
||||
file = s3.get_file(config('js_cache_bucket'), file_path)
|
||||
if file is None:
|
||||
print(
|
||||
f"File {get_js_cache_path(frames[i]['frame']['absPath'])} not found in {config('js_cache_bucket')}")
|
||||
source_cache[frames[i]["frame"]["absPath"]] = file
|
||||
print(f"Missing abs_path: {file_abs_path}, file {file_path} not found in {config('js_cache_bucket')}")
|
||||
source_cache[file_abs_path] = file
|
||||
if file is None:
|
||||
continue
|
||||
lines = file.split("\n")
|
||||
|
|
|
|||
|
|
@ -11,14 +11,14 @@ if '%s' in SMR_URL:
|
|||
SMR_URL = SMR_URL % "smr"
|
||||
|
||||
|
||||
def get_original_trace(key, positions):
|
||||
def get_original_trace(key, positions, is_url=False):
|
||||
payload = {
|
||||
"key": key,
|
||||
"positions": positions,
|
||||
"padding": 5,
|
||||
"bucket": config('sourcemaps_bucket')
|
||||
"bucket": config('sourcemaps_bucket'),
|
||||
"isURL": is_url
|
||||
}
|
||||
|
||||
try:
|
||||
r = requests.post(SMR_URL, json=payload, timeout=config("sourcemapTimeout", cast=int, default=5))
|
||||
if r.status_code != 200:
|
||||
|
|
|
|||
|
|
@ -20,15 +20,19 @@ def process_data(data):
|
|||
|
||||
|
||||
def compute():
|
||||
with pg_client.PostgresClient() as cur:
|
||||
with pg_client.PostgresClient(long_query=True) as cur:
|
||||
cur.execute(
|
||||
f"""UPDATE public.tenants
|
||||
SET t_integrations = COALESCE((SELECT COUNT(DISTINCT provider) FROM public.integrations) +
|
||||
(SELECT COUNT(*) FROM public.webhooks WHERE type = 'slack') +
|
||||
(SELECT COUNT(*) FROM public.jira_cloud), 0),
|
||||
t_projects=COALESCE((SELECT COUNT(*) FROM public.projects WHERE deleted_at ISNULL), 0),
|
||||
t_sessions=COALESCE((SELECT COUNT(*) FROM public.sessions), 0),
|
||||
t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0)
|
||||
t_sessions=t_sessions + COALESCE((SELECT COUNT(*)
|
||||
FROM public.sessions
|
||||
WHERE start_ts >= (SELECT last_telemetry FROM tenants)
|
||||
AND start_ts <=CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT)), 0),
|
||||
t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0),
|
||||
last_telemetry=CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT)
|
||||
RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out,
|
||||
(SELECT openreplay_version()) AS version_number,(SELECT email FROM public.users WHERE role = 'owner' LIMIT 1);"""
|
||||
)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ def get_by_tenant_id(tenant_id):
|
|||
api_key,
|
||||
created_at,
|
||||
'{license.EDITION}' AS edition,
|
||||
version_number,
|
||||
openreplay_version() AS version_number,
|
||||
opt_out
|
||||
FROM public.tenants
|
||||
LIMIT 1;""",
|
||||
|
|
|
|||
|
|
@ -252,9 +252,8 @@ def generate_new_api_key(user_id):
|
|||
cur.mogrify(
|
||||
f"""UPDATE public.users
|
||||
SET api_key=generate_api_key(20)
|
||||
WHERE
|
||||
users.user_id = %(userId)s
|
||||
AND deleted_at IS NULL
|
||||
WHERE users.user_id = %(userId)s
|
||||
AND deleted_at IS NULL
|
||||
RETURNING api_key;""",
|
||||
{"userId": user_id})
|
||||
)
|
||||
|
|
@ -295,6 +294,39 @@ def edit(user_id_to_update, tenant_id, changes: schemas.EditUserSchema, editor_i
|
|||
return {"data": user}
|
||||
|
||||
|
||||
def edit_member(user_id_to_update, tenant_id, changes: schemas.EditUserSchema, editor_id):
|
||||
user = get_member(user_id=user_id_to_update, tenant_id=tenant_id)
|
||||
if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]:
|
||||
admin = get(tenant_id=tenant_id, user_id=editor_id)
|
||||
if not admin["superAdmin"] and not admin["admin"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
_changes = {}
|
||||
if editor_id == user_id_to_update:
|
||||
if changes.admin is not None:
|
||||
if user["superAdmin"]:
|
||||
changes.admin = None
|
||||
elif changes.admin != user["admin"]:
|
||||
return {"errors": ["cannot change your own role"]}
|
||||
|
||||
if changes.email is not None and changes.email != user["email"]:
|
||||
if email_exists(changes.email):
|
||||
return {"errors": ["email already exists."]}
|
||||
if get_deleted_user_by_email(changes.email) is not None:
|
||||
return {"errors": ["email previously deleted."]}
|
||||
_changes["email"] = changes.email
|
||||
|
||||
if changes.name is not None and len(changes.name) > 0:
|
||||
_changes["name"] = changes.name
|
||||
|
||||
if changes.admin is not None:
|
||||
_changes["role"] = "admin" if changes.admin else "member"
|
||||
|
||||
if len(_changes.keys()) > 0:
|
||||
update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes)
|
||||
return {"data": get_member(user_id=user_id_to_update, tenant_id=tenant_id)}
|
||||
return {"data": user}
|
||||
|
||||
|
||||
def get_by_email_only(email):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
|
|
@ -342,11 +374,42 @@ def get_by_email_reset(email, reset_token):
|
|||
return helper.dict_to_camel_case(r)
|
||||
|
||||
|
||||
def get_member(tenant_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(
|
||||
f"""SELECT
|
||||
users.user_id,
|
||||
users.email,
|
||||
users.role,
|
||||
users.name,
|
||||
users.created_at,
|
||||
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
|
||||
DATE_PART('day',timezone('utc'::text, now()) \
|
||||
- COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation,
|
||||
basic_authentication.password IS NOT NULL AS joined,
|
||||
invitation_token
|
||||
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
|
||||
WHERE users.deleted_at IS NULL AND users.user_id=%(user_id)s
|
||||
ORDER BY name, user_id""", {"user_id": user_id})
|
||||
)
|
||||
u = helper.dict_to_camel_case(cur.fetchone())
|
||||
if u:
|
||||
u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"])
|
||||
if u["invitationToken"]:
|
||||
u["invitationLink"] = __get_invitation_link(u.pop("invitationToken"))
|
||||
else:
|
||||
u["invitationLink"] = None
|
||||
|
||||
return u
|
||||
|
||||
|
||||
def get_members(tenant_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
f"""SELECT
|
||||
users.user_id AS id,
|
||||
users.user_id,
|
||||
users.email,
|
||||
users.role,
|
||||
users.name,
|
||||
|
|
@ -360,7 +423,7 @@ def get_members(tenant_id):
|
|||
invitation_token
|
||||
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
|
||||
WHERE users.deleted_at IS NULL
|
||||
ORDER BY name, id"""
|
||||
ORDER BY name, user_id"""
|
||||
)
|
||||
r = cur.fetchall()
|
||||
if len(r):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
import requests
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import HTTPException
|
||||
from starlette import status
|
||||
|
||||
|
||||
class github_formatters:
|
||||
|
||||
|
|
@ -120,9 +123,9 @@ class githubV3Request:
|
|||
pages = get_response_links(response)
|
||||
result = response.json()
|
||||
if response.status_code != 200:
|
||||
print("!-------- error")
|
||||
print(f"=>GITHUB Exception")
|
||||
print(result)
|
||||
raise Exception(result["message"])
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"GITHUB: {result['message']}")
|
||||
if isinstance(result, dict):
|
||||
return result
|
||||
results += result
|
||||
|
|
|
|||
|
|
@ -4,52 +4,18 @@ import re
|
|||
import string
|
||||
from typing import Union
|
||||
|
||||
import requests
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
local_prefix = 'local-'
|
||||
from decouple import config
|
||||
|
||||
|
||||
def get_version_number():
|
||||
return config("version")
|
||||
|
||||
|
||||
def get_stage_name():
|
||||
stage = config("STAGE")
|
||||
return stage[len(local_prefix):] if stage.startswith(local_prefix) else stage
|
||||
return "OpenReplay"
|
||||
|
||||
|
||||
def is_production():
|
||||
return get_stage_name() == "production"
|
||||
|
||||
|
||||
def is_staging():
|
||||
return get_stage_name() == "staging"
|
||||
|
||||
|
||||
def is_onprem():
|
||||
return not is_production() and not is_staging()
|
||||
|
||||
|
||||
def is_local():
|
||||
return config("STAGE").startswith(local_prefix)
|
||||
|
||||
|
||||
def generate_salt():
|
||||
return "".join(random.choices(string.hexdigits, k=36))
|
||||
|
||||
|
||||
def unique_ordered_list(array):
|
||||
uniq = []
|
||||
[uniq.append(x) for x in array if x not in uniq]
|
||||
return uniq
|
||||
|
||||
|
||||
def unique_unordered_list(array):
|
||||
return list(set(array))
|
||||
def random_string(length=36):
|
||||
return "".join(random.choices(string.hexdigits, k=length))
|
||||
|
||||
|
||||
def list_to_camel_case(items, flatten=False):
|
||||
|
|
@ -130,12 +96,6 @@ def key_to_snake_case(name, delimiter='_', split_number=False):
|
|||
TRACK_TIME = True
|
||||
|
||||
|
||||
def __sbool_to_bool(value):
|
||||
if value is None or not isinstance(value, str):
|
||||
return False
|
||||
return value.lower() in ["true", "yes", "1"]
|
||||
|
||||
|
||||
def allow_captcha():
|
||||
return config("captcha_server", default=None) is not None and config("captcha_key", default=None) is not None \
|
||||
and len(config("captcha_server")) > 0 and len(config("captcha_key")) > 0
|
||||
|
|
@ -210,54 +170,11 @@ def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator
|
|||
return value
|
||||
|
||||
|
||||
def is_valid_email(email):
|
||||
return re.match(r"[^@]+@[^@]+\.[^@]+", email) is not None
|
||||
|
||||
|
||||
def is_valid_http_url(url):
|
||||
regex = re.compile(
|
||||
r'^(?:http|ftp)s?://' # http:// or https://
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
|
||||
r'localhost|' # localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||
r'(?::\d+)?' # optional port
|
||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||
|
||||
return re.match(regex, url) is not None
|
||||
|
||||
|
||||
def is_valid_url(url):
|
||||
regex = re.compile(
|
||||
# r'^(?:http|ftp)s?://' # http:// or https://
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
|
||||
r'localhost|' # localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||
r'(?::\d+)?' # optional port
|
||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||
|
||||
return re.match(regex, url) is not None
|
||||
|
||||
|
||||
def is_alphabet_space(word):
|
||||
r = re.compile("^[a-zA-Z ]*$")
|
||||
return r.match(word) is not None
|
||||
|
||||
|
||||
def is_alphabet_latin_space(word):
|
||||
r = re.compile("^[a-zA-Z\u00C0-\u00D6\u00D8-\u00f6\u00f8-\u00ff\s ]*$")
|
||||
return r.match(word) is not None
|
||||
|
||||
|
||||
def is_alphabet_space_dash(word):
|
||||
r = re.compile("^[a-zA-Z -]*$")
|
||||
return r.match(word) is not None
|
||||
|
||||
|
||||
def is_alphanumeric_space(word):
|
||||
r = re.compile("^[a-zA-Z0-9._\- ]*$")
|
||||
return r.match(word) is not None
|
||||
|
||||
|
||||
def merge_lists_by_key(l1, l2, key):
|
||||
merged = {}
|
||||
for item in l1 + l2:
|
||||
|
|
@ -310,9 +227,6 @@ def explode_widget(data, key=None):
|
|||
return result
|
||||
|
||||
|
||||
TEMP_PATH = "./" if is_local() else "/tmp/"
|
||||
|
||||
|
||||
def get_issue_title(issue_type):
|
||||
return {'click_rage': "Click Rage",
|
||||
'dead_click': "Dead Click",
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ class JiraManager:
|
|||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_projects()
|
||||
print(f"=>Exception {e.text}")
|
||||
print(f"=>JIRA Exception {e.text}")
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"JIRA: {e.text}")
|
||||
projects_dict_list = []
|
||||
for project in projects:
|
||||
|
|
|
|||
|
|
@ -20,8 +20,6 @@ PG_CONFIG = dict(_PG_CONFIG)
|
|||
if config("PG_TIMEOUT", cast=int, default=0) > 0:
|
||||
PG_CONFIG["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int) * 1000}"
|
||||
|
||||
logging.info(f">PG_POOL:{config('PG_POOL', default=None)}")
|
||||
|
||||
|
||||
class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
|
||||
def __init__(self, minconn, maxconn, *args, **kwargs):
|
||||
|
|
@ -83,10 +81,6 @@ def make_pool():
|
|||
raise error
|
||||
|
||||
|
||||
if config('PG_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
class PostgresClient:
|
||||
connection = None
|
||||
cursor = None
|
||||
|
|
@ -109,7 +103,7 @@ class PostgresClient:
|
|||
elif not config('PG_POOL', cast=bool, default=True):
|
||||
single_config = dict(_PG_CONFIG)
|
||||
single_config["application_name"] += "-NOPOOL"
|
||||
single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=3 * 60) * 1000}"
|
||||
single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=30) * 1000}"
|
||||
self.connection = psycopg2.connect(**single_config)
|
||||
else:
|
||||
self.connection = postgreSQL_pool.getconn()
|
||||
|
|
@ -117,6 +111,7 @@ class PostgresClient:
|
|||
def __enter__(self):
|
||||
if self.cursor is None:
|
||||
self.cursor = self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
|
||||
self.cursor.recreate = self.recreate_cursor
|
||||
return self.cursor
|
||||
|
||||
def __exit__(self, *args):
|
||||
|
|
@ -141,6 +136,26 @@ class PostgresClient:
|
|||
and not self.unlimited_query:
|
||||
postgreSQL_pool.putconn(self.connection)
|
||||
|
||||
def recreate_cursor(self):
|
||||
try:
|
||||
self.cursor.close()
|
||||
except Exception as error:
|
||||
logging.error("Error while closing cursor for recreation", error)
|
||||
self.cursor = None
|
||||
self.__enter__()
|
||||
|
||||
def close():
|
||||
pass
|
||||
|
||||
async def init():
|
||||
logging.info(f">PG_POOL:{config('PG_POOL', default=None)}")
|
||||
if config('PG_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
async def terminate():
|
||||
global postgreSQL_pool
|
||||
if postgreSQL_pool is not None:
|
||||
try:
|
||||
postgreSQL_pool.closeall()
|
||||
logging.info("Closed all connexions to PostgreSQL")
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
logging.error("Error while closing all connexions to PostgreSQL", error)
|
||||
|
|
|
|||
|
|
@ -70,7 +70,6 @@ def get_file(source_bucket, source_key):
|
|||
)
|
||||
except ClientError as ex:
|
||||
if ex.response['Error']['Code'] == 'NoSuchKey':
|
||||
print(f'======> No object found - returning None for \nbucket:{source_bucket}\nkey:{source_key}')
|
||||
return None
|
||||
else:
|
||||
raise ex
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
#!/bin/sh
|
||||
cd sourcemap-reader
|
||||
nohup npm start &
|
||||
cd ..
|
||||
|
||||
uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ change_password_link=/reset-password?invitation=%s&&pass=%s
|
|||
invitation_link=/api/users/invitation?token=%s
|
||||
js_cache_bucket=sessions-assets
|
||||
jwt_algorithm=HS512
|
||||
JWT_EXP_DELTA_SECONDS=2592000
|
||||
JWT_EXPIRATION=2592000
|
||||
JWT_ISSUER=openreplay-oss
|
||||
jwt_secret="SET A RANDOM STRING HERE"
|
||||
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
|
||||
|
|
@ -38,7 +38,7 @@ PG_POOL=true
|
|||
sessions_bucket=mobs
|
||||
sessions_region=us-east-1
|
||||
sourcemaps_bucket=sourcemaps
|
||||
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps
|
||||
sourcemaps_reader=http://sourcemaps-reader-openreplay.app.svc.cluster.local:9000/sourcemaps/%s/sourcemaps
|
||||
STAGE=default-foss
|
||||
version_number=1.4.0
|
||||
FS_DIR=/mnt/efs
|
||||
|
|
@ -46,7 +46,7 @@ EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob
|
|||
EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
|
||||
SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs
|
||||
SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe
|
||||
DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
|
||||
DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs
|
||||
PRESIGNED_URL_EXPIRATION=3600
|
||||
ASSIST_JWT_EXPIRATION=1800
|
||||
ASSIST_JWT_EXPIRATION=144000
|
||||
ASSIST_JWT_SECRET=
|
||||
|
|
@ -1,22 +1,19 @@
|
|||
from typing import Union
|
||||
|
||||
from decouple import config
|
||||
from fastapi import Depends, Body, BackgroundTasks, HTTPException
|
||||
from fastapi.responses import FileResponse
|
||||
from fastapi import Depends, Body, HTTPException
|
||||
from starlette import status
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \
|
||||
alerts, funnels, issues, integrations_manager, metadata, \
|
||||
log_tool_elasticsearch, log_tool_datadog, \
|
||||
log_tool_stackdriver, reset_password, sessions_favorite, \
|
||||
log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \
|
||||
log_tool_stackdriver, reset_password, log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, sessions, \
|
||||
log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \
|
||||
assist, heatmaps, mobile, signup, tenants, errors_viewed, boarding, notifications, webhook, users, \
|
||||
custom_metrics, saved_search, integrations_global, sessions_viewed, errors_favorite
|
||||
assist, mobile, signup, tenants, boarding, notifications, webhook, users, \
|
||||
custom_metrics, saved_search, integrations_global
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.utils import email_helper, helper, captcha
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils import helper, captcha
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
|
|
@ -52,7 +49,6 @@ def login(data: schemas.UserLoginSchema = Body(...)):
|
|||
|
||||
|
||||
@app.post('/{projectId}/sessions/search', tags=["sessions"])
|
||||
@app.post('/{projectId}/sessions/search2', tags=["sessions"])
|
||||
def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
|
||||
|
|
@ -100,7 +96,6 @@ def get_integrations_status(projectId: int, context: schemas.CurrentContext = De
|
|||
|
||||
|
||||
@app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
|
||||
@app.put('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"])
|
||||
def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str,
|
||||
data: schemas.IntegrationNotificationSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
@ -129,7 +124,6 @@ def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_cont
|
|||
|
||||
|
||||
@app.post('/{projectId}/integrations/sentry', tags=["integrations"])
|
||||
@app.put('/{projectId}/integrations/sentry', tags=["integrations"])
|
||||
def add_edit_sentry(projectId: int, data: schemas.SentrySchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
|
||||
|
|
@ -156,7 +150,6 @@ def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_con
|
|||
|
||||
|
||||
@app.post('/{projectId}/integrations/datadog', tags=["integrations"])
|
||||
@app.put('/{projectId}/integrations/datadog', tags=["integrations"])
|
||||
def add_edit_datadog(projectId: int, data: schemas.DatadogSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
|
||||
|
|
@ -178,7 +171,6 @@ def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR
|
|||
|
||||
|
||||
@app.post('/{projectId}/integrations/stackdriver', tags=["integrations"])
|
||||
@app.put('/{projectId}/integrations/stackdriver', tags=["integrations"])
|
||||
def add_edit_stackdriver(projectId: int, data: schemas.StackdriverSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
|
||||
|
|
@ -200,7 +192,6 @@ def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_co
|
|||
|
||||
|
||||
@app.post('/{projectId}/integrations/newrelic', tags=["integrations"])
|
||||
@app.put('/{projectId}/integrations/newrelic', tags=["integrations"])
|
||||
def add_edit_newrelic(projectId: int, data: schemas.NewrelicSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
|
||||
|
|
@ -222,7 +213,6 @@ def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_con
|
|||
|
||||
|
||||
@app.post('/{projectId}/integrations/rollbar', tags=["integrations"])
|
||||
@app.put('/{projectId}/integrations/rollbar', tags=["integrations"])
|
||||
def add_edit_rollbar(projectId: int, data: schemas.RollbarSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
|
||||
|
|
@ -250,7 +240,6 @@ def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_con
|
|||
|
||||
|
||||
@app.post('/{projectId}/integrations/bugsnag', tags=["integrations"])
|
||||
@app.put('/{projectId}/integrations/bugsnag', tags=["integrations"])
|
||||
def add_edit_bugsnag(projectId: int, data: schemas.BugsnagSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
|
||||
|
|
@ -280,7 +269,6 @@ def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_
|
|||
|
||||
|
||||
@app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"])
|
||||
@app.put('/{projectId}/integrations/cloudwatch', tags=["integrations"])
|
||||
def add_edit_cloudwatch(projectId: int, data: schemas.CloudwatchSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
|
||||
|
|
@ -308,7 +296,6 @@ def test_elasticsearch_connection(data: schemas.ElasticsearchBasicSchema = Body(
|
|||
|
||||
|
||||
@app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"])
|
||||
@app.put('/{projectId}/integrations/elasticsearch', tags=["integrations"])
|
||||
def add_edit_elasticsearch(projectId: int, data: schemas.ElasticsearchSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
|
|
@ -331,7 +318,6 @@ def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_c
|
|||
|
||||
|
||||
@app.post('/{projectId}/integrations/sumologic', tags=["integrations"])
|
||||
@app.put('/{projectId}/integrations/sumologic', tags=["integrations"])
|
||||
def add_edit_sumologic(projectId: int, data: schemas.SumologicSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
|
||||
|
|
@ -372,7 +358,6 @@ def get_integration_status_github(context: schemas.CurrentContext = Depends(OR_c
|
|||
|
||||
|
||||
@app.post('/integrations/jira', tags=["integrations"])
|
||||
@app.put('/integrations/jira', tags=["integrations"])
|
||||
def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not data.url.endswith('atlassian.net'):
|
||||
|
|
@ -386,7 +371,6 @@ def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...),
|
|||
|
||||
|
||||
@app.post('/integrations/github', tags=["integrations"])
|
||||
@app.put('/integrations/github', tags=["integrations"])
|
||||
def add_edit_github(data: schemas.GithubSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER,
|
||||
|
|
@ -461,7 +445,6 @@ def get_all_assignments(projectId: int, context: schemas.CurrentContext = Depend
|
|||
|
||||
|
||||
@app.post('/{projectId}/sessions2/{sessionId}/assign/projects/{integrationProjectId}', tags=["assignment"])
|
||||
@app.put('/{projectId}/sessions2/{sessionId}/assign/projects/{integrationProjectId}', tags=["assignment"])
|
||||
def create_issue_assignment(projectId: int, sessionId: int, integrationProjectId,
|
||||
data: schemas.AssignmentSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
@ -484,14 +467,12 @@ def get_gdpr(projectId: int, context: schemas.CurrentContext = Depends(OR_contex
|
|||
|
||||
|
||||
@app.post('/{projectId}/gdpr', tags=["projects", "gdpr"])
|
||||
@app.put('/{projectId}/gdpr', tags=["projects", "gdpr"])
|
||||
def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": projects.edit_gdpr(project_id=projectId, gdpr=data.dict())}
|
||||
|
||||
|
||||
@public_app.post('/password/reset-link', tags=["reset password"])
|
||||
@public_app.put('/password/reset-link', tags=["reset password"])
|
||||
def reset_password_handler(data: schemas.ForgetPasswordPayloadSchema = Body(...)):
|
||||
if len(data.email) < 5:
|
||||
return {"errors": ["please provide a valid email address"]}
|
||||
|
|
@ -504,21 +485,18 @@ def get_metadata(projectId: int, context: schemas.CurrentContext = Depends(OR_co
|
|||
|
||||
|
||||
@app.post('/{projectId}/metadata/list', tags=["metadata"])
|
||||
@app.put('/{projectId}/metadata/list', tags=["metadata"])
|
||||
def add_edit_delete_metadata(projectId: int, data: schemas.MetadataListSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return metadata.add_edit_delete(tenant_id=context.tenant_id, project_id=projectId, new_metas=data.list)
|
||||
|
||||
|
||||
@app.post('/{projectId}/metadata', tags=["metadata"])
|
||||
@app.put('/{projectId}/metadata', tags=["metadata"])
|
||||
def add_metadata(projectId: int, data: schemas.MetadataBasicSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return metadata.add(tenant_id=context.tenant_id, project_id=projectId, new_name=data.key)
|
||||
|
||||
|
||||
@app.post('/{projectId}/metadata/{index}', tags=["metadata"])
|
||||
@app.put('/{projectId}/metadata/{index}', tags=["metadata"])
|
||||
def edit_metadata(projectId: int, index: int, data: schemas.MetadataBasicSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return metadata.edit(tenant_id=context.tenant_id, project_id=projectId, index=index,
|
||||
|
|
@ -552,7 +530,6 @@ def get_capture_status(projectId: int, context: schemas.CurrentContext = Depends
|
|||
|
||||
|
||||
@app.post('/{projectId}/sample_rate', tags=["projects"])
|
||||
@app.put('/{projectId}/sample_rate', tags=["projects"])
|
||||
def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": projects.update_capture_status(project_id=projectId, changes=data.dict())}
|
||||
|
|
@ -574,7 +551,6 @@ def errors_merge(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
|
||||
|
||||
@app.post('/{projectId}/alerts', tags=["alerts"])
|
||||
@app.put('/{projectId}/alerts', tags=["alerts"])
|
||||
def create_alert(projectId: int, data: schemas.AlertSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return alerts.create(projectId, data)
|
||||
|
|
@ -597,7 +573,6 @@ def get_alert(projectId: int, alertId: int, context: schemas.CurrentContext = De
|
|||
|
||||
|
||||
@app.post('/{projectId}/alerts/{alertId}', tags=["alerts"])
|
||||
@app.put('/{projectId}/alerts/{alertId}', tags=["alerts"])
|
||||
def update_alert(projectId: int, alertId: int, data: schemas.AlertSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return alerts.update(alertId, data)
|
||||
|
|
@ -609,7 +584,6 @@ def delete_alert(projectId: int, alertId: int, context: schemas.CurrentContext =
|
|||
|
||||
|
||||
@app.post('/{projectId}/funnels', tags=["funnels"])
|
||||
@app.put('/{projectId}/funnels', tags=["funnels"])
|
||||
def add_funnel(projectId: int, data: schemas.FunnelSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return funnels.create(project_id=projectId,
|
||||
|
|
@ -653,7 +627,6 @@ def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, s
|
|||
|
||||
|
||||
@app.post('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"])
|
||||
@app.put('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"])
|
||||
def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
|
|
@ -668,7 +641,6 @@ def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDat
|
|||
|
||||
|
||||
@app.post('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
|
||||
@app.put('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"])
|
||||
def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
|
|
@ -685,7 +657,6 @@ def get_funnel_sessions(projectId: int, funnelId: int, rangeValue: str = None, s
|
|||
|
||||
|
||||
@app.post('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
|
||||
@app.put('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"])
|
||||
def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": funnels.get_sessions_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId,
|
||||
|
|
@ -705,7 +676,6 @@ def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = Non
|
|||
|
||||
|
||||
@app.post('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', tags=["funnels"])
|
||||
@app.put('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', tags=["funnels"])
|
||||
def get_funnel_issue_sessions(projectId: int, funnelId: int, issueId: str,
|
||||
data: schemas.FunnelSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
@ -729,7 +699,6 @@ def get_funnel(projectId: int, funnelId: int, context: schemas.CurrentContext =
|
|||
|
||||
|
||||
@app.post('/{projectId}/funnels/{funnelId}', tags=["funnels"])
|
||||
@app.put('/{projectId}/funnels/{funnelId}', tags=["funnels"])
|
||||
def edit_funnel(projectId: int, funnelId: int, data: schemas.UpdateFunnelSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return funnels.update(funnel_id=funnelId,
|
||||
|
|
@ -762,7 +731,6 @@ def get_weekly_report_config(context: schemas.CurrentContext = Depends(OR_contex
|
|||
|
||||
|
||||
@app.post('/config/weekly_report', tags=["weekly report config"])
|
||||
@app.put('/config/weekly_report', tags=["weekly report config"])
|
||||
def edit_weekly_report_config(data: schemas.WeeklyReportConfigSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": weekly_report.edit_config(user_id=context.user_id, weekly_report=data.weekly_report)}
|
||||
|
|
@ -797,21 +765,19 @@ def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignPayload
|
|||
return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data.keys)}
|
||||
|
||||
|
||||
@public_app.put('/signup', tags=['signup'])
|
||||
@public_app.post('/signup', tags=['signup'])
|
||||
@public_app.put('/signup', tags=['signup'])
|
||||
def signup_handler(data: schemas.UserSignupSchema = Body(...)):
|
||||
return signup.create_step1(data)
|
||||
|
||||
|
||||
@app.post('/projects', tags=['projects'])
|
||||
@app.put('/projects', tags=['projects'])
|
||||
def create_project(data: schemas.CreateProjectSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return projects.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data)
|
||||
|
||||
|
||||
@app.post('/projects/{projectId}', tags=['projects'])
|
||||
@app.put('/projects/{projectId}', tags=['projects'])
|
||||
def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return projects.edit(tenant_id=context.tenant_id, user_id=context.user_id, data=data, project_id=projectId)
|
||||
|
|
@ -829,8 +795,8 @@ def generate_new_tenant_token(context: schemas.CurrentContext = Depends(OR_conte
|
|||
}
|
||||
|
||||
|
||||
@app.put('/client', tags=['client'])
|
||||
@app.post('/client', tags=['client'])
|
||||
@app.put('/client', tags=['client'])
|
||||
def edit_client(data: schemas.UpdateTenantSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return tenants.update(tenant_id=context.tenant_id, user_id=context.user_id, data=data)
|
||||
|
|
@ -852,7 +818,6 @@ def view_notifications(notificationId: int, context: schemas.CurrentContext = De
|
|||
|
||||
|
||||
@app.post('/notifications/view', tags=['notifications'])
|
||||
@app.put('/notifications/view', tags=['notifications'])
|
||||
def batch_view_notifications(data: schemas.NotificationsViewSchema,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": notifications.view_notification(notification_ids=data.ids,
|
||||
|
|
@ -903,7 +868,6 @@ def delete_slack_integration(integrationId: int, context: schemas.CurrentContext
|
|||
|
||||
|
||||
@app.post('/webhooks', tags=["webhooks"])
|
||||
@app.put('/webhooks', tags=["webhooks"])
|
||||
def add_edit_webhook(data: schemas.CreateEditWebhookSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": webhook.add_edit(tenant_id=context.tenant_id, data=data.dict(), replace_none=True)}
|
||||
|
|
@ -940,7 +904,6 @@ def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context
|
|||
|
||||
|
||||
@app.post('/account/password', tags=["account"])
|
||||
@app.put('/account/password', tags=["account"])
|
||||
def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.change_password(email=context.email, old_password=data.old_password,
|
||||
|
|
@ -949,7 +912,6 @@ def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
|
|||
|
||||
|
||||
@app.post('/{projectId}/saved_search', tags=["savedSearch"])
|
||||
@app.put('/{projectId}/saved_search', tags=["savedSearch"])
|
||||
def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return saved_search.create(project_id=projectId, user_id=context.user_id, data=data)
|
||||
|
|
@ -966,7 +928,6 @@ def get_saved_search(projectId: int, search_id: int, context: schemas.CurrentCon
|
|||
|
||||
|
||||
@app.post('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
|
||||
@app.put('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
|
||||
def update_saved_search(projectId: int, search_id: int, data: schemas.SavedSearchSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": saved_search.update(user_id=context.user_id, search_id=search_id, data=data, project_id=projectId)}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from starlette.responses import RedirectResponse, FileResponse
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \
|
||||
sessions_favorite, assist
|
||||
sessions_favorite, assist, sessions_notes
|
||||
from chalicelib.core import sessions_viewed
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
from chalicelib.core import webhook
|
||||
|
|
@ -46,7 +46,6 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
|
||||
|
||||
@app.post('/account', tags=["account"])
|
||||
@app.put('/account', tags=["account"])
|
||||
def edit_account(data: schemas.EditUserSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data,
|
||||
|
|
@ -70,8 +69,8 @@ def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_con
|
|||
return {"data": data}
|
||||
|
||||
|
||||
@app.put('/integrations/slack', tags=['integrations'])
|
||||
@app.post('/integrations/slack', tags=['integrations'])
|
||||
@app.put('/integrations/slack', tags=['integrations'])
|
||||
def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
n = Slack.add_channel(tenant_id=context.tenant_id, url=data.url, name=data.name)
|
||||
if n is None:
|
||||
|
|
@ -81,7 +80,6 @@ def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentConte
|
|||
return {"data": n}
|
||||
|
||||
|
||||
@app.put('/integrations/slack/{integrationId}', tags=['integrations'])
|
||||
@app.post('/integrations/slack/{integrationId}', tags=['integrations'])
|
||||
def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
@ -98,7 +96,6 @@ def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = B
|
|||
|
||||
|
||||
@app.post('/client/members', tags=["client"])
|
||||
@app.put('/client/members', tags=["client"])
|
||||
def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(),
|
||||
|
|
@ -123,7 +120,6 @@ def process_invitation_link(token: str):
|
|||
|
||||
|
||||
@public_app.post('/password/reset', tags=["users"])
|
||||
@public_app.put('/password/reset', tags=["users"])
|
||||
def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)):
|
||||
if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8:
|
||||
return {"errors": ["please provide a valid invitation & pass"]}
|
||||
|
|
@ -136,12 +132,11 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
|
|||
return users.set_password_invitation(new_password=data.password, user_id=user["userId"])
|
||||
|
||||
|
||||
@app.put('/client/members/{memberId}', tags=["client"])
|
||||
@app.post('/client/members/{memberId}', tags=["client"])
|
||||
def edit_member(memberId: int, data: schemas.EditMemberSchema,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
|
||||
user_id_to_update=memberId)
|
||||
return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
|
||||
user_id_to_update=memberId)
|
||||
|
||||
|
||||
@app.get('/metadata/session_search', tags=["metadata"])
|
||||
|
|
@ -175,8 +170,8 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
|
|||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(sessionId, str):
|
||||
return {"errors": ["session not found"]}
|
||||
data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id,
|
||||
include_fav_viewed=True, group_metadata=True)
|
||||
data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True,
|
||||
include_fav_viewed=True, group_metadata=True, context=context)
|
||||
if data is None:
|
||||
return {"errors": ["session not found"]}
|
||||
if data.get("inDB"):
|
||||
|
|
@ -213,8 +208,7 @@ def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int,
|
|||
|
||||
@app.get('/{projectId}/errors/{errorId}', tags=['errors'])
|
||||
def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
|
||||
density30: int = 30,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
|
||||
**{"density24": density24, "density30": density30})
|
||||
if data.get("data") is not None:
|
||||
|
|
@ -265,8 +259,8 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
|
|||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
|
||||
if data is None:
|
||||
data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True,
|
||||
user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False)
|
||||
data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId,
|
||||
full_data=True, include_fav_viewed=True, group_metadata=True, live=False)
|
||||
if data is None:
|
||||
return {"errors": ["session not found"]}
|
||||
if data.get("inDB"):
|
||||
|
|
@ -275,8 +269,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
|
|||
return {'data': data}
|
||||
|
||||
|
||||
@app.get('/{projectId}/unprocessed/{sessionId}', tags=["assist"])
|
||||
@app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"])
|
||||
@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"])
|
||||
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
not_found = {"errors": ["Replay file not found"]}
|
||||
|
|
@ -296,8 +289,7 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
|||
return FileResponse(path=path, media_type="application/octet-stream")
|
||||
|
||||
|
||||
@app.get('/{projectId}/unprocessed/{sessionId}/devtools', tags=["assist"])
|
||||
@app.get('/{projectId}/assist/sessions/{sessionId}/devtools', tags=["assist"])
|
||||
@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"])
|
||||
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
not_found = {"errors": ["Devtools file not found"]}
|
||||
|
|
@ -324,12 +316,10 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema =
|
|||
|
||||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"])
|
||||
@app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"])
|
||||
def add_remove_favorite_session2(projectId: int, sessionId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
"data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id,
|
||||
session_id=sessionId)}
|
||||
"data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"])
|
||||
|
|
@ -359,9 +349,7 @@ def assign_session(projectId: int, sessionId: int, issueId: str,
|
|||
|
||||
|
||||
@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
|
||||
@app.put('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
|
||||
@app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
|
||||
@app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
|
||||
def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId,
|
||||
|
|
@ -372,3 +360,64 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem
|
|||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
|
||||
def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
return {"errors": ["Session not found"]}
|
||||
data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id, data=data)
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
|
||||
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
|
||||
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, data=data)
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
|
||||
def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId)
|
||||
return data
|
||||
|
||||
|
||||
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"])
|
||||
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, webhook_id=webhookId)
|
||||
|
||||
|
||||
@app.post('/{projectId}/notes', tags=["sessions", "notes"])
|
||||
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
|
||||
user_id=context.user_id, data=data)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {'data': data}
|
||||
|
|
|
|||
|
|
@ -28,7 +28,6 @@ class UserLoginSchema(_Grecaptcha):
|
|||
class UserSignupSchema(UserLoginSchema):
|
||||
fullname: str = Field(...)
|
||||
organizationName: str = Field(...)
|
||||
projectName: str = Field(default="my first project")
|
||||
|
||||
class Config:
|
||||
alias_generator = attribute_to_camel_case
|
||||
|
|
@ -1084,3 +1083,41 @@ class IntegrationType(str, Enum):
|
|||
stackdriver = "STACKDRIVER"
|
||||
cloudwatch = "CLOUDWATCH"
|
||||
newrelic = "NEWRELIC"
|
||||
|
||||
|
||||
class SearchNoteSchema(_PaginatedSchema):
|
||||
sort: str = Field(default="createdAt")
|
||||
order: SortOrderType = Field(default=SortOrderType.desc)
|
||||
tags: Optional[List[str]] = Field(default=[])
|
||||
shared_only: bool = Field(default=False)
|
||||
mine_only: bool = Field(default=False)
|
||||
|
||||
class Config:
|
||||
alias_generator = attribute_to_camel_case
|
||||
|
||||
|
||||
class SessionNoteSchema(BaseModel):
|
||||
message: str = Field(..., min_length=2)
|
||||
tag: Optional[str] = Field(default=None)
|
||||
timestamp: int = Field(default=-1)
|
||||
is_public: bool = Field(default=False)
|
||||
|
||||
class Config:
|
||||
alias_generator = attribute_to_camel_case
|
||||
|
||||
|
||||
class SessionUpdateNoteSchema(SessionNoteSchema):
|
||||
message: Optional[str] = Field(default=None, min_length=2)
|
||||
timestamp: Optional[int] = Field(default=None, ge=-1)
|
||||
is_public: Optional[bool] = Field(default=None)
|
||||
|
||||
@root_validator
|
||||
def validator(cls, values):
|
||||
assert len(values.keys()) > 0, "at least 1 attribute should be provided for update"
|
||||
c = 0
|
||||
for v in values.values():
|
||||
if v is not None and (not isinstance(v, str) or len(v) > 0):
|
||||
c += 1
|
||||
break
|
||||
assert c > 0, "at least 1 value should be provided for update"
|
||||
return values
|
||||
|
|
|
|||
|
|
@ -1,26 +1,20 @@
|
|||
FROM python:3.10-alpine
|
||||
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
|
||||
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
|
||||
RUN apk add --no-cache build-base libressl libffi-dev libressl-dev libxslt-dev libxml2-dev xmlsec-dev xmlsec nodejs npm tini
|
||||
RUN apk add --no-cache build-base libressl libffi-dev libressl-dev libxslt-dev libxml2-dev xmlsec-dev xmlsec tini
|
||||
ARG envarg
|
||||
ENV SOURCE_MAP_VERSION=0.7.4 \
|
||||
APP_NAME=chalice \
|
||||
LISTEN_PORT=8000 \
|
||||
MAPPING_WASM=/work/sourcemap-reader/mappings.wasm \
|
||||
ENTERPRISE_BUILD=${envarg}
|
||||
|
||||
ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm /mappings.wasm
|
||||
|
||||
WORKDIR /work_tmp
|
||||
COPY requirements.txt /work_tmp/requirements.txt
|
||||
RUN pip install --no-cache-dir --upgrade -r /work_tmp/requirements.txt
|
||||
COPY sourcemap-reader/*.json /work_tmp/
|
||||
RUN cd /work_tmp && npm install
|
||||
|
||||
WORKDIR /work
|
||||
COPY . .
|
||||
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. \
|
||||
&& mv /mappings.wasm ${MAPPING_WASM} && chmod 644 ${MAPPING_WASM}
|
||||
RUN mv env.default .env
|
||||
|
||||
RUN adduser -u 1001 openreplay -D
|
||||
USER 1001
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from fastapi.middleware.gzip import GZipMiddleware
|
|||
from starlette import status
|
||||
from starlette.responses import StreamingResponse, JSONResponse
|
||||
|
||||
from chalicelib.core import traces
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from routers import core, core_dynamic, ee, saml
|
||||
|
|
@ -27,21 +28,14 @@ async def or_middleware(request: Request, call_next):
|
|||
if not unlock.is_valid():
|
||||
return JSONResponse(content={"errors": ["expired license"]}, status_code=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
global OR_SESSION_TOKEN
|
||||
OR_SESSION_TOKEN = request.headers.get('vnd.openreplay.com.sid', request.headers.get('vnd.asayer.io.sid'))
|
||||
try:
|
||||
if helper.TRACK_TIME:
|
||||
import time
|
||||
now = int(time.time() * 1000)
|
||||
response: StreamingResponse = await call_next(request)
|
||||
if helper.TRACK_TIME:
|
||||
now = int(time.time() * 1000) - now
|
||||
if now > 500:
|
||||
print(f"Execution time: {now} ms")
|
||||
except Exception as e:
|
||||
pg_client.close()
|
||||
raise e
|
||||
pg_client.close()
|
||||
if helper.TRACK_TIME:
|
||||
import time
|
||||
now = int(time.time() * 1000)
|
||||
response: StreamingResponse = await call_next(request)
|
||||
if helper.TRACK_TIME:
|
||||
now = int(time.time() * 1000) - now
|
||||
if now > 500:
|
||||
logging.info(f"Execution time: {now} ms")
|
||||
return response
|
||||
|
||||
|
||||
|
|
@ -74,18 +68,41 @@ app.include_router(insights.app)
|
|||
app.include_router(v1_api.app_apikey)
|
||||
app.include_router(v1_api_ee.app_apikey)
|
||||
|
||||
app.queue_system = queue.Queue()
|
||||
loglevel = config("LOGLEVEL", default=logging.INFO)
|
||||
print(f">Loglevel set to: {loglevel}")
|
||||
logging.basicConfig(level=loglevel)
|
||||
ap_logger = logging.getLogger('apscheduler')
|
||||
ap_logger.setLevel(loglevel)
|
||||
app.schedule = AsyncIOScheduler()
|
||||
app.schedule.start()
|
||||
app.queue_system = queue.Queue()
|
||||
|
||||
for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs:
|
||||
app.schedule.add_job(id=job["func"].__name__, **job)
|
||||
from chalicelib.core import traces
|
||||
|
||||
app.schedule.add_job(id="trace_worker", **traces.cron_jobs[0])
|
||||
@app.on_event("startup")
|
||||
async def startup():
|
||||
logging.info(">>>>> starting up <<<<<")
|
||||
await pg_client.init()
|
||||
app.schedule.start()
|
||||
|
||||
for job in app.schedule.get_jobs():
|
||||
print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
|
||||
for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs + traces.cron_jobs:
|
||||
app.schedule.add_job(id=job["func"].__name__, **job)
|
||||
|
||||
ap_logger.info(">Scheduled jobs:")
|
||||
for job in app.schedule.get_jobs():
|
||||
ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
|
||||
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown():
|
||||
logging.info(">>>>> shutting down <<<<<")
|
||||
app.schedule.shutdown(wait=True)
|
||||
await traces.process_traces_queue()
|
||||
await pg_client.terminate()
|
||||
|
||||
|
||||
@app.get('/private/shutdown', tags=["private"])
|
||||
async def stop_server():
|
||||
logging.info("Requested shutdown")
|
||||
await shutdown()
|
||||
import os, signal
|
||||
os.kill(1, signal.SIGTERM)
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO))
|
||||
|
|
|
|||
|
|
@ -18,7 +18,11 @@ else:
|
|||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
print(">>> Using experimental error search")
|
||||
from . import errors as errors_legacy
|
||||
from . import errors_exp as errors
|
||||
|
||||
if config("EXP_ERRORS_GET", cast=bool, default=False):
|
||||
print(">>> Using experimental error get")
|
||||
else:
|
||||
from . import errors as errors
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def jwt_authorizer(token):
|
|||
token[1],
|
||||
config("jwt_secret"),
|
||||
algorithms=config("jwt_algorithm"),
|
||||
audience=[f"plugin:{helper.get_stage_name()}", f"front:{helper.get_stage_name()}"]
|
||||
audience=[f"front:{helper.get_stage_name()}"]
|
||||
)
|
||||
except jwt.ExpiredSignatureError:
|
||||
print("! JWT Expired signature")
|
||||
|
|
@ -38,13 +38,12 @@ def jwt_context(context):
|
|||
}
|
||||
|
||||
|
||||
def generate_jwt(id, tenant_id, iat, aud, exp=None):
|
||||
def generate_jwt(id, tenant_id, iat, aud):
|
||||
token = jwt.encode(
|
||||
payload={
|
||||
"userId": id,
|
||||
"tenantId": tenant_id,
|
||||
"exp": iat // 1000 + int(config("JWT_EXP_DELTA_SECONDS")) + TimeUTC.get_utc_offset() // 1000 \
|
||||
if exp is None else exp+ TimeUTC.get_utc_offset() // 1000,
|
||||
"exp": iat // 1000 + config("JWT_EXPIRATION", cast=int) + TimeUTC.get_utc_offset() // 1000,
|
||||
"iss": config("JWT_ISSUER"),
|
||||
"iat": iat // 1000,
|
||||
"aud": aud
|
||||
|
|
|
|||
|
|
@ -90,6 +90,18 @@ def __process_tags(row):
|
|||
def get_details(project_id, error_id, user_id, **data):
|
||||
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
|
||||
pg_sub_query24.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30",
|
||||
endTime_arg_name="endDate30",project_key="sessions.project_id")
|
||||
pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s")
|
||||
pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s")
|
||||
pg_sub_query30_session.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30",
|
||||
endTime_arg_name="endDate30",project_key="errors.project_id")
|
||||
pg_sub_query30_err.append("sessions.project_id = %(project_id)s")
|
||||
pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s")
|
||||
pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s")
|
||||
pg_sub_query30_err.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_err.append("source ='js_exception'")
|
||||
pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30")
|
||||
pg_sub_query30.append("error_id = %(error_id)s")
|
||||
pg_basic_query = __get_basic_constraints(time_constraint=False)
|
||||
|
|
@ -128,50 +140,49 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
device_partition,
|
||||
country_partition,
|
||||
chart24,
|
||||
chart30
|
||||
chart30,
|
||||
custom_tags
|
||||
FROM (SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
COUNT(DISTINCT user_uuid) AS users,
|
||||
COUNT(DISTINCT user_id) AS users,
|
||||
COUNT(DISTINCT session_id) AS sessions
|
||||
FROM public.errors
|
||||
INNER JOIN events.errors AS s_errors USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE error_id = %(error_id)s
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY error_id, name, message) AS details
|
||||
INNER JOIN (SELECT error_id,
|
||||
MAX(timestamp) AS last_occurrence,
|
||||
INNER JOIN (SELECT MAX(timestamp) AS last_occurrence,
|
||||
MIN(timestamp) AS first_occurrence
|
||||
FROM events.errors
|
||||
WHERE error_id = %(error_id)s
|
||||
GROUP BY error_id) AS time_details USING (error_id)
|
||||
INNER JOIN (SELECT error_id,
|
||||
session_id AS last_session_id,
|
||||
user_os,
|
||||
user_os_version,
|
||||
user_browser,
|
||||
user_browser_version,
|
||||
user_device,
|
||||
user_device_type,
|
||||
user_uuid
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
GROUP BY error_id) AS time_details ON (TRUE)
|
||||
INNER JOIN (SELECT session_id AS last_session_id,
|
||||
coalesce(custom_tags, '[]')::jsonb AS custom_tags
|
||||
FROM events.errors
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags
|
||||
FROM errors_tags
|
||||
WHERE errors_tags.error_id = %(error_id)s
|
||||
AND errors_tags.session_id = errors.session_id
|
||||
AND errors_tags.message_id = errors.message_id) AS errors_tags ON (TRUE)
|
||||
WHERE error_id = %(error_id)s
|
||||
ORDER BY errors.timestamp DESC
|
||||
LIMIT 1) AS last_session_details USING (error_id)
|
||||
LIMIT 1) AS last_session_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_browser AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors
|
||||
INNER JOIN sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_browser
|
||||
ORDER BY count DESC) AS count_per_browser_query
|
||||
INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition
|
||||
FROM (SELECT user_browser_version AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
AND sessions.user_browser = count_per_browser_query.name
|
||||
GROUP BY user_browser_version
|
||||
ORDER BY count DESC) AS version_details
|
||||
|
|
@ -181,13 +192,13 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
FROM (SELECT user_os AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_os
|
||||
ORDER BY count DESC) AS count_per_os_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
|
||||
FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
AND sessions.user_os = count_per_os_details.name
|
||||
GROUP BY user_os_version
|
||||
ORDER BY count DESC) AS count_per_version_details
|
||||
|
|
@ -198,7 +209,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
FROM (SELECT user_device_type AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_device_type
|
||||
ORDER BY count DESC) AS count_per_device_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition
|
||||
|
|
@ -208,7 +219,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
ELSE user_device END AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
AND sessions.user_device_type = count_per_device_details.name
|
||||
GROUP BY user_device
|
||||
ORDER BY count DESC) AS count_per_device_v_details
|
||||
|
|
@ -218,7 +229,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
FROM (SELECT user_country AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
WHERE {" AND ".join(pg_sub_query30_session)}
|
||||
GROUP BY user_country
|
||||
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24
|
||||
|
|
|
|||
|
|
@ -2,10 +2,12 @@ import json
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import metrics, metadata
|
||||
from chalicelib.core import errors_legacy
|
||||
from chalicelib.core import sourcemaps, sessions
|
||||
from chalicelib.utils import ch_client, metrics_helper, exp_ch_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from decouple import config
|
||||
|
||||
|
||||
def _multiple_values(values, value_key="value"):
|
||||
|
|
@ -113,6 +115,18 @@ def __flatten_sort_key_count_version(data, merge_nested=False):
|
|||
]
|
||||
|
||||
|
||||
def __transform_map_to_tag(data, key1, key2, requested_key):
|
||||
result = []
|
||||
for i in data:
|
||||
if requested_key == 0 and i.get(key1) is None and i.get(key2) is None:
|
||||
result.append({"name": "all", "count": int(i.get("count"))})
|
||||
elif requested_key == 1 and i.get(key1) is not None and i.get(key2) is None:
|
||||
result.append({"name": i.get(key1), "count": int(i.get("count"))})
|
||||
elif requested_key == 2 and i.get(key1) is not None and i.get(key2) is not None:
|
||||
result.append({"name": i.get(key2), "count": int(i.get("count"))})
|
||||
return result
|
||||
|
||||
|
||||
def __flatten_sort_key_count(data):
|
||||
if data is None:
|
||||
return []
|
||||
|
|
@ -148,19 +162,85 @@ def __process_tags(row):
|
|||
]
|
||||
|
||||
|
||||
def get_details(project_id, error_id, user_id, **data):
|
||||
# now=TimeUTC.now()
|
||||
def __process_tags_map(row):
|
||||
browsers_partition = row.pop("browsers_partition")
|
||||
os_partition = row.pop("os_partition")
|
||||
device_partition = row.pop("device_partition")
|
||||
country_partition = row.pop("country_partition")
|
||||
return [
|
||||
{"name": "browser",
|
||||
"partitions": __transform_map_to_tag(data=browsers_partition,
|
||||
key1="browser",
|
||||
key2="browser_version",
|
||||
requested_key=1)},
|
||||
{"name": "browser.ver",
|
||||
"partitions": __transform_map_to_tag(data=browsers_partition,
|
||||
key1="browser",
|
||||
key2="browser_version",
|
||||
requested_key=2)},
|
||||
{"name": "OS",
|
||||
"partitions": __transform_map_to_tag(data=os_partition,
|
||||
key1="os",
|
||||
key2="os_version",
|
||||
requested_key=1)
|
||||
},
|
||||
{"name": "OS.ver",
|
||||
"partitions": __transform_map_to_tag(data=os_partition,
|
||||
key1="os",
|
||||
key2="os_version",
|
||||
requested_key=2)},
|
||||
{"name": "device.family",
|
||||
"partitions": __transform_map_to_tag(data=device_partition,
|
||||
key1="device_type",
|
||||
key2="device",
|
||||
requested_key=1)},
|
||||
{"name": "device",
|
||||
"partitions": __transform_map_to_tag(data=device_partition,
|
||||
key1="device_type",
|
||||
key2="device",
|
||||
requested_key=2)},
|
||||
{"name": "country", "partitions": __transform_map_to_tag(data=country_partition,
|
||||
key1="country",
|
||||
key2="",
|
||||
requested_key=1)}
|
||||
]
|
||||
|
||||
|
||||
def get_details_deprecated(project_id, error_id, user_id, **data):
|
||||
if not config("EXP_ERRORS_GET", cast=bool, default=False):
|
||||
return errors_legacy.get_details(project_id, error_id, user_id, **data)
|
||||
|
||||
MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(0)
|
||||
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
|
||||
MAIN_EVENTS_TABLE_24 = exp_ch_helper.get_main_events_table(TimeUTC.now())
|
||||
|
||||
ch_sub_query24 = __get_basic_constraints(startTime_arg_name="startDate24", endTime_arg_name="endDate24")
|
||||
ch_sub_query24.append("error_id = %(error_id)s")
|
||||
ch_sub_query30 = __get_basic_constraints(startTime_arg_name="startDate30", endTime_arg_name="endDate30")
|
||||
pg_sub_query30_err = __get_basic_constraints(time_constraint=True, startTime_arg_name="startDate30",
|
||||
endTime_arg_name="endDate30", project_key="errors.project_id",
|
||||
table_name="errors")
|
||||
pg_sub_query30_err.append("sessions.project_id = toUInt16(%(project_id)s)")
|
||||
pg_sub_query30_err.append("sessions.datetime >= toDateTime(%(startDate30)s/1000)")
|
||||
pg_sub_query30_err.append("sessions.datetime <= toDateTime(%(endDate30)s/1000)")
|
||||
pg_sub_query30_err.append("error_id = %(error_id)s")
|
||||
pg_sub_query30_err.append("source ='js_exception'")
|
||||
ch_sub_query30 = __get_basic_constraints(startTime_arg_name="startDate30", endTime_arg_name="endDate30",
|
||||
project_key="errors.project_id")
|
||||
ch_sub_query30.append("error_id = %(error_id)s")
|
||||
ch_basic_query = __get_basic_constraints(time_constraint=False)
|
||||
ch_basic_query.append("error_id = %(error_id)s")
|
||||
ch_basic_query_session = ch_basic_query[:]
|
||||
ch_basic_query_session.append("sessions.project_id = toUInt16(%(project_id)s)")
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
data["startDate24"] = TimeUTC.now(-1)
|
||||
data["endDate24"] = TimeUTC.now()
|
||||
data["startDate30"] = TimeUTC.now(-30)
|
||||
data["endDate30"] = TimeUTC.now()
|
||||
# # TODO: remove time limits
|
||||
# data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000
|
||||
# data["endDate24"] = 1650470729000
|
||||
# data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000
|
||||
# data["endDate30"] = 1650470729000
|
||||
density24 = int(data.get("density24", 24))
|
||||
step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24)
|
||||
density30 = int(data.get("density30", 30))
|
||||
|
|
@ -196,28 +276,27 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
message,
|
||||
COUNT(DISTINCT user_uuid) AS users,
|
||||
COUNT(DISTINCT session_id) AS sessions
|
||||
FROM errors
|
||||
WHERE error_id = %(error_id)s
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY error_id, name, message) AS details
|
||||
INNER JOIN (SELECT error_id,
|
||||
toUnixTimestamp(MAX(datetime)) * 1000 AS last_occurrence,
|
||||
toUnixTimestamp(MIN(datetime)) * 1000 AS first_occurrence
|
||||
FROM errors
|
||||
WHERE error_id = %(error_id)s
|
||||
toUnixTimestamp(max(datetime)) * 1000 AS last_occurrence,
|
||||
toUnixTimestamp(min(datetime)) * 1000 AS first_occurrence
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors
|
||||
WHERE {" AND ".join(ch_basic_query)}
|
||||
GROUP BY error_id) AS time_details
|
||||
ON details.error_id = time_details.error_id
|
||||
INNER JOIN (SELECT error_id, session_id AS last_session_id, user_os, user_os_version, user_browser, user_browser_version, user_device, user_device_type, user_uuid
|
||||
FROM errors
|
||||
WHERE error_id = %(error_id)s
|
||||
ORDER BY datetime DESC
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id)
|
||||
WHERE {" AND ".join(ch_basic_query_session)}
|
||||
ORDER BY errors.datetime DESC
|
||||
LIMIT 1) AS last_session_details ON last_session_details.error_id = details.error_id
|
||||
INNER JOIN (SELECT %(error_id)s AS error_id,
|
||||
groupArray(
|
||||
[[[user_browser]], [[toString(count_per_browser)]],versions_partition]) AS browsers_partition
|
||||
INNER JOIN (SELECT %(error_id)s AS error_id,
|
||||
groupArray([[[user_browser]], [[toString(count_per_browser)]],versions_partition]) AS browsers_partition
|
||||
FROM (SELECT user_browser,
|
||||
COUNT(session_id) AS count_per_browser
|
||||
FROM errors
|
||||
WHERE {" AND ".join(ch_basic_query)}
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY user_browser
|
||||
ORDER BY count_per_browser DESC) AS count_per_browser_query
|
||||
INNER JOIN (SELECT user_browser,
|
||||
|
|
@ -225,63 +304,61 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
FROM (SELECT user_browser,
|
||||
user_browser_version,
|
||||
COUNT(session_id) AS count_per_version
|
||||
FROM errors
|
||||
WHERE {" AND ".join(ch_basic_query)}
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY user_browser, user_browser_version
|
||||
ORDER BY count_per_version DESC) AS version_details
|
||||
GROUP BY user_browser ) AS browser_version_details USING (user_browser)) AS browser_details
|
||||
ON browser_details.error_id = details.error_id
|
||||
INNER JOIN (SELECT %(error_id)s AS error_id,
|
||||
INNER JOIN (SELECT %(error_id)s AS error_id,
|
||||
groupArray([[[user_os]], [[toString(count_per_os)]],versions_partition]) AS os_partition
|
||||
FROM (SELECT user_os,
|
||||
COUNT(session_id) AS count_per_os
|
||||
FROM errors
|
||||
WHERE {" AND ".join(ch_basic_query)}
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY user_os
|
||||
ORDER BY count_per_os DESC) AS count_per_os_details
|
||||
INNER JOIN (SELECT user_os,
|
||||
groupArray([user_os_version, toString(count_per_version)]) AS versions_partition
|
||||
FROM (SELECT user_os, user_os_version, COUNT(session_id) AS count_per_version
|
||||
FROM errors
|
||||
WHERE {" AND ".join(ch_basic_query)}
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY user_os, user_os_version
|
||||
ORDER BY count_per_version DESC) AS count_per_version_details
|
||||
GROUP BY user_os ) AS os_version_details USING (user_os)) AS os_details
|
||||
ON os_details.error_id = details.error_id
|
||||
INNER JOIN (SELECT %(error_id)s AS error_id,
|
||||
groupArray(
|
||||
[[[toString(user_device_type)]], [[toString(count_per_device)]],versions_partition]) AS device_partition
|
||||
INNER JOIN (SELECT %(error_id)s AS error_id,
|
||||
groupArray([[[toString(user_device_type)]], [[toString(count_per_device)]],versions_partition]) AS device_partition
|
||||
FROM (SELECT user_device_type,
|
||||
COUNT(session_id) AS count_per_device
|
||||
FROM errors
|
||||
WHERE {" AND ".join(ch_basic_query)}
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY user_device_type
|
||||
ORDER BY count_per_device DESC) AS count_per_device_details
|
||||
INNER JOIN (SELECT user_device_type,
|
||||
groupArray([user_device, toString(count_per_device)]) AS versions_partition
|
||||
FROM (SELECT user_device_type,
|
||||
COALESCE(user_device,'unknown') AS user_device,
|
||||
coalesce(user_device,'unknown') AS user_device,
|
||||
COUNT(session_id) AS count_per_device
|
||||
FROM errors
|
||||
WHERE {" AND ".join(ch_basic_query)}
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY user_device_type, user_device
|
||||
ORDER BY count_per_device DESC) AS count_per_device_details
|
||||
GROUP BY user_device_type ) AS device_version_details USING (user_device_type)) AS device_details
|
||||
ON device_details.error_id = details.error_id
|
||||
INNER JOIN (SELECT %(error_id)s AS error_id,
|
||||
groupArray(
|
||||
[[[toString(user_country)]], [[toString(count_per_country)]]]) AS country_partition
|
||||
INNER JOIN (SELECT %(error_id)s AS error_id,
|
||||
groupArray([[[toString(user_country)]], [[toString(count_per_country)]]]) AS country_partition
|
||||
FROM (SELECT user_country,
|
||||
COUNT(session_id) AS count_per_country
|
||||
FROM errors
|
||||
WHERE {" AND ".join(ch_basic_query)}
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors INNER JOIN {MAIN_SESSIONS_TABLE} AS sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30_err)}
|
||||
GROUP BY user_country
|
||||
ORDER BY count_per_country DESC) AS count_per_country_details) AS country_details
|
||||
ON country_details.error_id = details.error_id
|
||||
INNER JOIN (SELECT %(error_id)s AS error_id, groupArray([timestamp, count]) AS chart24
|
||||
FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size24)s second)) * 1000 AS timestamp,
|
||||
COUNT(DISTINCT session_id) AS count
|
||||
FROM errors
|
||||
FROM {MAIN_EVENTS_TABLE_24} AS errors
|
||||
WHERE {" AND ".join(ch_sub_query24)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details) AS chart_details24
|
||||
|
|
@ -289,14 +366,14 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
INNER JOIN (SELECT %(error_id)s AS error_id, groupArray([timestamp, count]) AS chart30
|
||||
FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL %(step_size30)s second)) * 1000 AS timestamp,
|
||||
COUNT(DISTINCT session_id) AS count
|
||||
FROM errors
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors
|
||||
WHERE {" AND ".join(ch_sub_query30)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details) AS chart_details30
|
||||
ON details.error_id = chart_details30.error_id;"""
|
||||
|
||||
# print("--------------------")
|
||||
# print(main_ch_query % params)
|
||||
# print(ch.format(main_ch_query, params))
|
||||
# print("--------------------")
|
||||
row = ch.execute(query=main_ch_query, params=params)
|
||||
if len(row) == 0:
|
||||
|
|
@ -306,7 +383,7 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT error_id, status, session_id, start_ts,
|
||||
parent_error_id,session_id, user_anonymous_id,
|
||||
parent_error_id, user_anonymous_id,
|
||||
user_id, user_uuid, user_browser, user_browser_version,
|
||||
user_os, user_os_version, user_device, payload,
|
||||
FALSE AS favorite,
|
||||
|
|
@ -338,11 +415,208 @@ def get_details(project_id, error_id, user_id, **data):
|
|||
row["favorite"] = False
|
||||
row["viewed"] = False
|
||||
row["chart24"] = __rearrange_chart_details(start_at=data["startDate24"], end_at=data["endDate24"],
|
||||
density=density24,
|
||||
chart=row["chart24"])
|
||||
density=density24, chart=row["chart24"])
|
||||
row["chart30"] = __rearrange_chart_details(start_at=data["startDate30"], end_at=data["endDate30"],
|
||||
density=density30,
|
||||
chart=row["chart30"])
|
||||
density=density30, chart=row["chart30"])
|
||||
return {"data": helper.dict_to_camel_case(row)}
|
||||
|
||||
|
||||
def get_details(project_id, error_id, user_id, **data):
|
||||
if not config("EXP_ERRORS_GET", cast=bool, default=False):
|
||||
return errors_legacy.get_details(project_id, error_id, user_id, **data)
|
||||
|
||||
MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(0)
|
||||
MAIN_ERR_SESS_TABLE = exp_ch_helper.get_main_js_errors_sessions_table(0)
|
||||
MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0)
|
||||
MAIN_EVENTS_TABLE_24 = exp_ch_helper.get_main_events_table(TimeUTC.now())
|
||||
|
||||
ch_sub_query24 = __get_basic_constraints(startTime_arg_name="startDate24", endTime_arg_name="endDate24")
|
||||
ch_sub_query24.append("error_id = %(error_id)s")
|
||||
# pg_sub_query30_err = __get_basic_constraints(time_constraint=True, startTime_arg_name="startDate30",
|
||||
# endTime_arg_name="endDate30", project_key="errors.project_id",
|
||||
# table_name="errors")
|
||||
# pg_sub_query30_err.append("sessions.project_id = toUInt16(%(project_id)s)")
|
||||
# pg_sub_query30_err.append("sessions.datetime >= toDateTime(%(startDate30)s/1000)")
|
||||
# pg_sub_query30_err.append("sessions.datetime <= toDateTime(%(endDate30)s/1000)")
|
||||
# pg_sub_query30_err.append("error_id = %(error_id)s")
|
||||
# pg_sub_query30_err.append("source ='js_exception'")
|
||||
ch_sub_query30 = __get_basic_constraints(startTime_arg_name="startDate30", endTime_arg_name="endDate30",
|
||||
project_key="errors.project_id")
|
||||
ch_sub_query30.append("error_id = %(error_id)s")
|
||||
ch_basic_query = __get_basic_constraints(time_constraint=False)
|
||||
ch_basic_query.append("error_id = %(error_id)s")
|
||||
# ch_basic_query_session = ch_basic_query[:]
|
||||
# ch_basic_query_session.append("sessions.project_id = toUInt16(%(project_id)s)")
|
||||
with ch_client.ClickHouseClient() as ch:
|
||||
data["startDate24"] = TimeUTC.now(-1)
|
||||
data["endDate24"] = TimeUTC.now()
|
||||
data["startDate30"] = TimeUTC.now(-30)
|
||||
data["endDate30"] = TimeUTC.now()
|
||||
# # TODO: remove time limits
|
||||
# data["startDate24"] = 1650470729000 - 24 * 60 * 60 * 1000
|
||||
# data["endDate24"] = 1650470729000
|
||||
# data["startDate30"] = 1650470729000 - 30 * 60 * 60 * 1000
|
||||
# data["endDate30"] = 1650470729000
|
||||
density24 = int(data.get("density24", 24))
|
||||
step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24)
|
||||
density30 = int(data.get("density30", 30))
|
||||
step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30)
|
||||
params = {
|
||||
"startDate24": data['startDate24'],
|
||||
"endDate24": data['endDate24'],
|
||||
"startDate30": data['startDate30'],
|
||||
"endDate30": data['endDate30'],
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size24": step_size24,
|
||||
"step_size30": step_size30,
|
||||
"error_id": error_id}
|
||||
|
||||
main_ch_query = f"""\
|
||||
WITH pre_processed AS (SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
session_id,
|
||||
datetime,
|
||||
user_id,
|
||||
user_browser,
|
||||
user_browser_version,
|
||||
user_os,
|
||||
user_os_version,
|
||||
user_device_type,
|
||||
user_device,
|
||||
user_country,
|
||||
error_tags_keys,
|
||||
error_tags_values
|
||||
FROM {MAIN_ERR_SESS_TABLE} AS errors
|
||||
WHERE {" AND ".join(ch_basic_query)}
|
||||
)
|
||||
SELECT %(error_id)s AS error_id, name, message,users,
|
||||
first_occurrence,last_occurrence,last_session_id,
|
||||
sessions,browsers_partition,os_partition,device_partition,
|
||||
country_partition,chart24,chart30,custom_tags
|
||||
FROM (SELECT error_id,
|
||||
name,
|
||||
message
|
||||
FROM pre_processed
|
||||
LIMIT 1) AS details
|
||||
INNER JOIN (SELECT COUNT(DISTINCT user_id) AS users,
|
||||
COUNT(DISTINCT session_id) AS sessions
|
||||
FROM pre_processed
|
||||
WHERE datetime >= toDateTime(%(startDate30)s / 1000)
|
||||
AND datetime <= toDateTime(%(endDate30)s / 1000)
|
||||
) AS last_month_stats ON TRUE
|
||||
INNER JOIN (SELECT toUnixTimestamp(max(datetime)) * 1000 AS last_occurrence,
|
||||
toUnixTimestamp(min(datetime)) * 1000 AS first_occurrence
|
||||
FROM pre_processed) AS time_details ON TRUE
|
||||
INNER JOIN (SELECT session_id AS last_session_id,
|
||||
arrayMap((key, value)->(map(key, value)), error_tags_keys, error_tags_values) AS custom_tags
|
||||
FROM pre_processed
|
||||
ORDER BY datetime DESC
|
||||
LIMIT 1) AS last_session_details ON TRUE
|
||||
INNER JOIN (SELECT groupArray(details) AS browsers_partition
|
||||
FROM (SELECT COUNT(1) AS count,
|
||||
coalesce(nullIf(user_browser,''),toNullable('unknown')) AS browser,
|
||||
coalesce(nullIf(user_browser_version,''),toNullable('unknown')) AS browser_version,
|
||||
map('browser', browser,
|
||||
'browser_version', browser_version,
|
||||
'count', toString(count)) AS details
|
||||
FROM pre_processed
|
||||
GROUP BY ROLLUP(browser, browser_version)
|
||||
ORDER BY browser nulls first, browser_version nulls first, count DESC) AS mapped_browser_details
|
||||
) AS browser_details ON TRUE
|
||||
INNER JOIN (SELECT groupArray(details) AS os_partition
|
||||
FROM (SELECT COUNT(1) AS count,
|
||||
coalesce(nullIf(user_os,''),toNullable('unknown')) AS os,
|
||||
coalesce(nullIf(user_os_version,''),toNullable('unknown')) AS os_version,
|
||||
map('os', os,
|
||||
'os_version', os_version,
|
||||
'count', toString(count)) AS details
|
||||
FROM pre_processed
|
||||
GROUP BY ROLLUP(os, os_version)
|
||||
ORDER BY os nulls first, os_version nulls first, count DESC) AS mapped_os_details
|
||||
) AS os_details ON TRUE
|
||||
INNER JOIN (SELECT groupArray(details) AS device_partition
|
||||
FROM (SELECT COUNT(1) AS count,
|
||||
coalesce(nullIf(user_device,''),toNullable('unknown')) AS user_device,
|
||||
map('device_type', toString(user_device_type),
|
||||
'device', user_device,
|
||||
'count', toString(count)) AS details
|
||||
FROM pre_processed
|
||||
GROUP BY ROLLUP(user_device_type, user_device)
|
||||
ORDER BY user_device_type nulls first, user_device nulls first, count DESC
|
||||
) AS count_per_device_details
|
||||
) AS mapped_device_details ON TRUE
|
||||
INNER JOIN (SELECT groupArray(details) AS country_partition
|
||||
FROM (SELECT COUNT(1) AS count,
|
||||
map('country', toString(user_country),
|
||||
'count', toString(count)) AS details
|
||||
FROM pre_processed
|
||||
GROUP BY user_country
|
||||
ORDER BY count DESC) AS count_per_country_details
|
||||
) AS mapped_country_details ON TRUE
|
||||
INNER JOIN (SELECT groupArray(map('timestamp', timestamp, 'count', count)) AS chart24
|
||||
FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL 3756 second)) *
|
||||
1000 AS timestamp,
|
||||
COUNT(DISTINCT session_id) AS count
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors
|
||||
WHERE {" AND ".join(ch_sub_query24)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details
|
||||
) AS chart_details24 ON TRUE
|
||||
INNER JOIN (SELECT groupArray(map('timestamp', timestamp, 'count', count)) AS chart30
|
||||
FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL 3724 second)) *
|
||||
1000 AS timestamp,
|
||||
COUNT(DISTINCT session_id) AS count
|
||||
FROM {MAIN_EVENTS_TABLE} AS errors
|
||||
WHERE {" AND ".join(ch_sub_query30)}
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details
|
||||
) AS chart_details30 ON TRUE;"""
|
||||
|
||||
# print("--------------------")
|
||||
# print(ch.format(main_ch_query, params))
|
||||
# print("--------------------")
|
||||
row = ch.execute(query=main_ch_query, params=params)
|
||||
if len(row) == 0:
|
||||
return {"errors": ["error not found"]}
|
||||
row = row[0]
|
||||
|
||||
row["tags"] = __process_tags_map(row)
|
||||
|
||||
query = f"""SELECT session_id, toUnixTimestamp(datetime) * 1000 AS start_ts,
|
||||
user_anonymous_id,user_id, user_uuid, user_browser, user_browser_version,
|
||||
user_os, user_os_version, user_device, FALSE AS favorite, True AS viewed
|
||||
FROM {MAIN_SESSIONS_TABLE} AS sessions
|
||||
WHERE project_id = toUInt16(%(project_id)s)
|
||||
AND session_id = %(session_id)s
|
||||
ORDER BY datetime DESC
|
||||
LIMIT 1;"""
|
||||
params = {"project_id": project_id, "session_id": row["last_session_id"], "userId": user_id}
|
||||
# print("--------------------")
|
||||
# print(ch.format(query, params))
|
||||
# print("--------------------")
|
||||
status = ch.execute(query=query, params=params)
|
||||
|
||||
if status is not None:
|
||||
status = status[0]
|
||||
# row["stack"] = format_first_stack_frame(status).pop("stack")
|
||||
# row["status"] = status.pop("status")
|
||||
# row["parent_error_id"] = status.pop("parent_error_id")
|
||||
row["favorite"] = status.pop("favorite")
|
||||
row["viewed"] = status.pop("viewed")
|
||||
row["last_hydrated_session"] = status
|
||||
else:
|
||||
# row["stack"] = []
|
||||
row["last_hydrated_session"] = None
|
||||
# row["status"] = "untracked"
|
||||
# row["parent_error_id"] = None
|
||||
row["favorite"] = False
|
||||
row["viewed"] = False
|
||||
row["chart24"] = metrics.__complete_missing_steps(start_time=data["startDate24"], end_time=data["endDate24"],
|
||||
density=density24, rows=row["chart24"], neutral={"count": 0})
|
||||
row["chart30"] = metrics.__complete_missing_steps(start_time=data["startDate30"], end_time=data["endDate30"],
|
||||
density=density30, rows=row["chart30"], neutral={"count": 0})
|
||||
return {"data": helper.dict_to_camel_case(row)}
|
||||
|
||||
|
||||
|
|
@ -423,7 +697,7 @@ def get_details_chart(project_id, error_id, user_id, **data):
|
|||
INNER JOIN (SELECT user_device_type,
|
||||
groupArray([user_device, toString(count_per_device)]) AS versions_partition
|
||||
FROM (SELECT user_device_type,
|
||||
COALESCE(user_device,'unknown') AS user_device,
|
||||
coalesce(user_device,'unknown') AS user_device,
|
||||
COUNT(session_id) AS count_per_device
|
||||
FROM errors
|
||||
WHERE {" AND ".join(ch_sub_query)}
|
||||
|
|
@ -463,13 +737,17 @@ def get_details_chart(project_id, error_id, user_id, **data):
|
|||
|
||||
|
||||
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
|
||||
endTime_arg_name="endDate", type_condition=True):
|
||||
ch_sub_query = ["project_id =toUInt32(%(project_id)s)"]
|
||||
endTime_arg_name="endDate", type_condition=True, project_key="project_id", table_name=None):
|
||||
ch_sub_query = [f"{project_key} =toUInt16(%(project_id)s)"]
|
||||
if table_name is not None:
|
||||
table_name = table_name + "."
|
||||
else:
|
||||
table_name = ""
|
||||
if type_condition:
|
||||
ch_sub_query.append("event_type='ERROR'")
|
||||
ch_sub_query.append(f"{table_name}event_type='ERROR'")
|
||||
if time_constraint:
|
||||
ch_sub_query += [f"datetime >= toDateTime(%({startTime_arg_name})s/1000)",
|
||||
f"datetime < toDateTime(%({endTime_arg_name})s/1000)"]
|
||||
ch_sub_query += [f"{table_name}datetime >= toDateTime(%({startTime_arg_name})s/1000)",
|
||||
f"{table_name}datetime < toDateTime(%({endTime_arg_name})s/1000)"]
|
||||
if platform == schemas.PlatformType.mobile:
|
||||
ch_sub_query.append("user_device_type = 'mobile'")
|
||||
elif platform == schemas.PlatformType.desktop:
|
||||
|
|
@ -879,11 +1157,11 @@ def get_sessions(start_date, end_date, project_id, user_id, error_id):
|
|||
s.pages_count,
|
||||
s.errors_count,
|
||||
s.issue_types,
|
||||
COALESCE((SELECT TRUE
|
||||
coalesce((SELECT TRUE
|
||||
FROM public.user_favorite_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
coalesce((SELECT TRUE
|
||||
FROM public.user_viewed_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ def __get_basic_constraints(table_name=None, time_constraint=True, round_start=F
|
|||
table_name += "."
|
||||
else:
|
||||
table_name = ""
|
||||
ch_sub_query = [f"{table_name}{identifier} =toUInt32(%({identifier})s)"]
|
||||
ch_sub_query = [f"{table_name}{identifier} =toUInt16(%({identifier})s)"]
|
||||
if time_constraint:
|
||||
if round_start:
|
||||
ch_sub_query.append(
|
||||
|
|
@ -2012,7 +2012,7 @@ def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
|
|||
ch_sub_query = __get_basic_constraints(table_name="resources", round_start=True, data=args)
|
||||
ch_sub_query.append("resources.success = 0")
|
||||
ch_sub_query.append("resources.type IN ('fetch','script')")
|
||||
sch_sub_query = ["rs.project_id =toUInt32(%(project_id)s)", "rs.type IN ('fetch','script')"]
|
||||
sch_sub_query = ["rs.project_id =toUInt16(%(project_id)s)", "rs.type IN ('fetch','script')"]
|
||||
meta_condition = __get_meta_constraint(args)
|
||||
ch_sub_query += meta_condition
|
||||
# sch_sub_query += meta_condition
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ def get_by_session_id(session_id, project_id, start_ts, duration):
|
|||
encoded_body_size,decoded_body_size,success,
|
||||
if(success, 200, 400) AS status
|
||||
FROM {exp_ch_helper.get_main_resources_table(start_ts)}
|
||||
WHERE session_id = toUInt64(%(session_id)s)
|
||||
WHERE session_id = toUInt16(%(session_id)s)
|
||||
AND project_id = toUInt16(%(project_id)s)
|
||||
AND datetime >= toDateTime(%(res_start_ts)s / 1000)
|
||||
AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
|
||||
|
|
@ -25,8 +25,8 @@ def get_by_session_id(session_id, project_id, start_ts, duration):
|
|||
encoded_body_size,decoded_body_size,success,
|
||||
coalesce(status,if(success, 200, status)) AS status
|
||||
FROM resources
|
||||
WHERE session_id = toUInt64(%(session_id)s)
|
||||
AND project_id = toUInt64(%(project_id)s)
|
||||
WHERE session_id = toUInt16(%(session_id)s)
|
||||
AND project_id = toUInt16(%(project_id)s)
|
||||
AND datetime >= toDateTime(%(res_start_ts)s / 1000)
|
||||
AND datetime <= toDateTime(%(res_end_ts)s / 1000);"""
|
||||
params = {"session_id": session_id, "project_id": project_id, "start_ts": start_ts, "duration": duration,
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import schemas
|
|||
import schemas_ee
|
||||
from chalicelib.core import events, metadata, events_ios, \
|
||||
sessions_mobs, issues, projects, errors, resources, assist, performance_event, sessions_viewed, sessions_favorite, \
|
||||
sessions_devtool
|
||||
sessions_devtool, sessions_notes
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
|
||||
SESSION_PROJECTION_COLS = """s.project_id,
|
||||
|
|
@ -41,7 +41,7 @@ def __group_metadata(session, project_metadata):
|
|||
return meta
|
||||
|
||||
|
||||
def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentContext, full_data=False,
|
||||
def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False,
|
||||
include_fav_viewed=False, group_metadata=False, live=True):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
extra_query = []
|
||||
|
|
@ -59,13 +59,14 @@ def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentCo
|
|||
SELECT
|
||||
s.*,
|
||||
s.session_id::text AS session_id,
|
||||
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key
|
||||
(SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key,
|
||||
encode(file_key,'hex') AS file_key
|
||||
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
|
||||
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata._get_column_names()]) + ") AS project_metadata") if group_metadata else ''}
|
||||
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
|
||||
WHERE s.project_id = %(project_id)s
|
||||
AND s.session_id = %(session_id)s;""",
|
||||
{"project_id": project_id, "session_id": session_id, "userId": user_id}
|
||||
{"project_id": project_id, "session_id": session_id, "userId": context.user_id}
|
||||
)
|
||||
# print("===============")
|
||||
# print(query)
|
||||
|
|
@ -96,11 +97,14 @@ def get_by_id2_pg(project_id, session_id, user_id, context: schemas_ee.CurrentCo
|
|||
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
|
||||
session_id=session_id)
|
||||
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)
|
||||
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id)
|
||||
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
||||
context=context)
|
||||
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
|
||||
start_ts=data["startTs"], duration=data["duration"])
|
||||
|
||||
data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id,
|
||||
session_id=session_id, user_id=context.user_id)
|
||||
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
||||
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
|
||||
data['live'] = live and assist.is_live(project_id=project_id,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@ from typing import List, Union
|
|||
import schemas
|
||||
import schemas_ee
|
||||
from chalicelib.core import events, metadata, events_ios, \
|
||||
sessions_mobs, issues, projects, errors, resources, assist, performance_event, metrics
|
||||
sessions_mobs, issues, projects, errors, resources, assist, performance_event, metrics, sessions_devtool, \
|
||||
sessions_notes
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
|
||||
SESSION_PROJECTION_COLS_CH = """\
|
||||
|
|
@ -58,8 +59,8 @@ def __group_metadata(session, project_metadata):
|
|||
return meta
|
||||
|
||||
|
||||
def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False,
|
||||
live=True):
|
||||
def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, include_fav_viewed=False,
|
||||
group_metadata=False, live=True):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
extra_query = []
|
||||
if include_fav_viewed:
|
||||
|
|
@ -82,7 +83,7 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
|
|||
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
|
||||
WHERE s.project_id = %(project_id)s
|
||||
AND s.session_id = %(session_id)s;""",
|
||||
{"project_id": project_id, "session_id": session_id, "userId": user_id}
|
||||
{"project_id": project_id, "session_id": session_id, "userId": context.user_id}
|
||||
)
|
||||
# print("===============")
|
||||
# print(query)
|
||||
|
|
@ -112,11 +113,16 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
|
|||
:500] # limit the number of errors to reduce the response-body size
|
||||
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
|
||||
session_id=session_id)
|
||||
data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id)
|
||||
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id)
|
||||
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id)
|
||||
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
||||
context=context)
|
||||
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
|
||||
start_ts=data["startTs"],
|
||||
duration=data["duration"])
|
||||
|
||||
data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id,
|
||||
session_id=session_id, user_id=context.user_id)
|
||||
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
||||
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
|
||||
data['live'] = live and assist.is_live(project_id=project_id,
|
||||
|
|
|
|||
|
|
@ -5,36 +5,36 @@ from chalicelib.core import sessions, sessions_favorite_exp
|
|||
from chalicelib.utils import pg_client, s3_extra
|
||||
|
||||
|
||||
def add_favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext):
|
||||
def add_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
INSERT INTO public.user_favorite_sessions(user_id, session_id)
|
||||
VALUES (%(userId)s,%(sessionId)s);""",
|
||||
{"userId": user_id, "sessionId": session_id})
|
||||
{"userId": context.user_id, "sessionId": session_id})
|
||||
)
|
||||
|
||||
sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
|
||||
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
|
||||
include_fav_viewed=True, context=context)
|
||||
sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id)
|
||||
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id,
|
||||
full_data=False, include_fav_viewed=True, context=context)
|
||||
|
||||
|
||||
def remove_favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext):
|
||||
def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
DELETE FROM public.user_favorite_sessions
|
||||
WHERE user_id = %(userId)s
|
||||
AND session_id = %(sessionId)s;""",
|
||||
{"userId": user_id, "sessionId": session_id})
|
||||
{"userId": context.user_id, "sessionId": session_id})
|
||||
)
|
||||
sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
|
||||
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
|
||||
include_fav_viewed=True, context=context)
|
||||
sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id)
|
||||
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id,
|
||||
full_data=False, include_fav_viewed=True, context=context)
|
||||
|
||||
|
||||
def favorite_session(project_id, user_id, session_id, context: schemas_ee.CurrentContext):
|
||||
if favorite_session_exists(user_id=user_id, session_id=session_id):
|
||||
def favorite_session(context: schemas_ee.CurrentContext, project_id, session_id):
|
||||
if favorite_session_exists(user_id=context.user_id, session_id=session_id):
|
||||
key = str(session_id)
|
||||
try:
|
||||
s3_extra.tag_file(session_id=key, tag_value=config('RETENTION_D_VALUE', default='default'))
|
||||
|
|
@ -47,7 +47,7 @@ def favorite_session(project_id, user_id, session_id, context: schemas_ee.Curren
|
|||
except Exception as e:
|
||||
print(f"!!!Error while tagging: {key} to default")
|
||||
print(str(e))
|
||||
return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
|
||||
return remove_favorite_session(context=context, project_id=project_id, session_id=session_id)
|
||||
key = str(session_id)
|
||||
try:
|
||||
s3_extra.tag_file(session_id=key, tag_value=config('RETENTION_L_VALUE', default='vault'))
|
||||
|
|
@ -60,7 +60,7 @@ def favorite_session(project_id, user_id, session_id, context: schemas_ee.Curren
|
|||
except Exception as e:
|
||||
print(f"!!!Error while tagging: {key} to vault")
|
||||
print(str(e))
|
||||
return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id, context=context)
|
||||
return add_favorite_session(context=context, project_id=project_id, session_id=session_id)
|
||||
|
||||
|
||||
def favorite_session_exists(user_id, session_id):
|
||||
|
|
@ -90,4 +90,4 @@ def get_start_end_timestamp(project_id, user_id):
|
|||
{"userId": user_id, "project_id": project_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return (0, 0) if r is None else (r["max_start_ts"], r["min_start_ts"])
|
||||
return (0, 0) if r is None else (r["min_start_ts"], r["max_start_ts"])
|
||||
|
|
|
|||
170
ee/api/chalicelib/core/sessions_notes.py
Normal file
170
ee/api/chalicelib/core/sessions_notes.py
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
from urllib.parse import urljoin
|
||||
|
||||
from decouple import config
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sessions
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def get_note(tenant_id, project_id, user_id, note_id, share=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS creator_name
|
||||
{",(SELECT name FROM users WHERE tenant_id=%(tenant_id)s AND user_id=%(share)s) AS share_name" if share else ""}
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.note_id = %(note_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND (sessions_notes.user_id = %(user_id)s
|
||||
OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s);""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id,
|
||||
"note_id": note_id, "share": share})
|
||||
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
row = helper.dict_to_camel_case(row)
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return row
|
||||
|
||||
|
||||
def get_session_notes(tenant_id, project_id, session_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*
|
||||
FROM sessions_notes
|
||||
INNER JOIN users USING (user_id)
|
||||
WHERE sessions_notes.project_id = %(project_id)s
|
||||
AND sessions_notes.deleted_at IS NULL
|
||||
AND sessions_notes.session_id = %(session_id)s
|
||||
AND (sessions_notes.user_id = %(user_id)s
|
||||
OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)
|
||||
ORDER BY created_at DESC;""",
|
||||
{"project_id": project_id, "user_id": user_id,
|
||||
"tenant_id": tenant_id, "session_id": session_id})
|
||||
|
||||
cur.execute(query=query)
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return rows
|
||||
|
||||
|
||||
def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL"]
|
||||
extra_params = {}
|
||||
if data.tags and len(data.tags) > 0:
|
||||
k = "tag_value"
|
||||
conditions.append(
|
||||
sessions._multiple_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k))
|
||||
extra_params = sessions._multiple_values(data.tags, value_key=k)
|
||||
if data.shared_only:
|
||||
conditions.append("sessions_notes.is_public AND users.tenant_id = %(tenant_id)s")
|
||||
elif data.mine_only:
|
||||
conditions.append("sessions_notes.user_id = %(user_id)s")
|
||||
else:
|
||||
conditions.append(
|
||||
"(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)")
|
||||
query = cur.mogrify(f"""SELECT sessions_notes.*
|
||||
FROM sessions_notes
|
||||
INNER JOIN users USING (user_id)
|
||||
WHERE {" AND ".join(conditions)}
|
||||
ORDER BY created_at {data.order}
|
||||
LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params})
|
||||
|
||||
cur.execute(query=query)
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return rows
|
||||
|
||||
|
||||
def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public)
|
||||
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s)
|
||||
RETURNING *;""",
|
||||
{"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()})
|
||||
cur.execute(query)
|
||||
result = helper.dict_to_camel_case(cur.fetchone())
|
||||
if result:
|
||||
result["createdAt"] = TimeUTC.datetime_to_timestamp(result["createdAt"])
|
||||
return result
|
||||
|
||||
|
||||
def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNoteSchema):
|
||||
sub_query = []
|
||||
if data.message is not None:
|
||||
sub_query.append("message = %(message)s")
|
||||
if data.tag is not None and len(data.tag) > 0:
|
||||
sub_query.append("tag = %(tag)s")
|
||||
if data.is_public is not None:
|
||||
sub_query.append("is_public = %(is_public)s")
|
||||
if data.timestamp is not None:
|
||||
sub_query.append("timestamp = %(timestamp)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""UPDATE public.sessions_notes
|
||||
SET
|
||||
{" ,".join(sub_query)}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND user_id = %(user_id)s
|
||||
AND note_id = %(note_id)s
|
||||
AND deleted_at ISNULL
|
||||
RETURNING *;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict()})
|
||||
)
|
||||
row = helper.dict_to_camel_case(cur.fetchone())
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
return row
|
||||
|
||||
|
||||
def delete(tenant_id, user_id, project_id, note_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(""" UPDATE public.sessions_notes
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE note_id = %(note_id)s
|
||||
AND project_id = %(project_id)s
|
||||
AND user_id = %(user_id)s
|
||||
AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id, "user_id": user_id, "note_id": note_id})
|
||||
)
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
|
||||
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
|
||||
if note is None:
|
||||
return {"errors": ["Note not found"]}
|
||||
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/sessions/{note['sessionId']}")
|
||||
title = f"<{session_url}|Note for session {note['sessionId']}>"
|
||||
|
||||
blocks = [{"type": "section",
|
||||
"fields": [{"type": "mrkdwn",
|
||||
"text": title}]},
|
||||
{"type": "section",
|
||||
"fields": [{"type": "plain_text",
|
||||
"text": note["message"]}]}]
|
||||
if note["tag"]:
|
||||
blocks.append({"type": "context",
|
||||
"elements": [{"type": "plain_text",
|
||||
"text": f"Tag: *{note['tag']}*"}]})
|
||||
bottom = f"Created by {note['creatorName'].capitalize()}"
|
||||
if user_id != note["userId"]:
|
||||
bottom += f"\nSent by {note['shareName']}: "
|
||||
blocks.append({"type": "context",
|
||||
"elements": [{"type": "plain_text",
|
||||
"text": bottom}]})
|
||||
return Slack.send_raw(
|
||||
tenant_id=tenant_id,
|
||||
webhook_id=webhook_id,
|
||||
body={"blocks": blocks}
|
||||
)
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
import json
|
||||
|
||||
import schemas
|
||||
import schemas_ee
|
||||
from chalicelib.core import users, telemetry, tenants
|
||||
from chalicelib.utils import captcha
|
||||
from chalicelib.utils import helper
|
||||
|
|
@ -18,60 +19,48 @@ def create_step1(data: schemas.UserSignupSchema):
|
|||
print(f"=====================> {email}")
|
||||
password = data.password
|
||||
|
||||
print("Verifying email validity")
|
||||
if email is None or len(email) < 5 or not helper.is_valid_email(email):
|
||||
if email is None or len(email) < 5:
|
||||
errors.append("Invalid email address.")
|
||||
else:
|
||||
print("Verifying email existance")
|
||||
if users.email_exists(email):
|
||||
errors.append("Email address already in use.")
|
||||
if users.get_deleted_user_by_email(email) is not None:
|
||||
errors.append("Email address previously deleted.")
|
||||
|
||||
print("Verifying captcha")
|
||||
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
|
||||
errors.append("Invalid captcha.")
|
||||
|
||||
print("Verifying password validity")
|
||||
if len(password) < 6:
|
||||
errors.append("Password is too short, it must be at least 6 characters long.")
|
||||
|
||||
print("Verifying fullname validity")
|
||||
fullname = data.fullname
|
||||
if fullname is None or len(fullname) < 1 or not helper.is_alphabet_space_dash(fullname):
|
||||
errors.append("Invalid full name.")
|
||||
|
||||
print("Verifying company's name validity")
|
||||
company_name = data.organizationName
|
||||
if company_name is None or len(company_name) < 1:
|
||||
errors.append("invalid organization's name")
|
||||
|
||||
print("Verifying project's name validity")
|
||||
project_name = data.projectName
|
||||
if project_name is None or len(project_name) < 1:
|
||||
project_name = "my first project"
|
||||
organization_name = data.organizationName
|
||||
if organization_name is None or len(organization_name) < 1:
|
||||
errors.append("Invalid organization name.")
|
||||
|
||||
if len(errors) > 0:
|
||||
print("==> error")
|
||||
print(f"==> error for email:{data.email}, fullname:{data.fullname}, organizationName:{data.organizationName}")
|
||||
print(errors)
|
||||
return {"errors": errors}
|
||||
print("No errors detected")
|
||||
print("Decomposed infos")
|
||||
|
||||
params = {"email": email, "password": password,
|
||||
"fullname": fullname, "companyName": company_name,
|
||||
"projectName": project_name,
|
||||
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()})}
|
||||
query = """\
|
||||
WITH t AS (
|
||||
INSERT INTO public.tenants (name, version_number)
|
||||
VALUES (%(companyName)s, (SELECT openreplay_version()))
|
||||
project_name = "my first project"
|
||||
params = {
|
||||
"email": email, "password": password, "fullname": fullname, "projectName": project_name,
|
||||
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), "organizationName": organization_name,
|
||||
"permissions": [p.value for p in schemas_ee.Permissions]
|
||||
}
|
||||
query = """WITH t AS (
|
||||
INSERT INTO public.tenants (name)
|
||||
VALUES (%(organizationName)s)
|
||||
RETURNING tenant_id, api_key
|
||||
),
|
||||
r AS (
|
||||
INSERT INTO public.roles(tenant_id, name, description, permissions, protected)
|
||||
VALUES ((SELECT tenant_id FROM t), 'Owner', 'Owner', '{"SESSION_REPLAY", "DEV_TOOLS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], TRUE),
|
||||
((SELECT tenant_id FROM t), 'Member', 'Member', '{"SESSION_REPLAY", "DEV_TOOLS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], FALSE)
|
||||
VALUES ((SELECT tenant_id FROM t), 'Owner', 'Owner', %(permissions)s::text[], TRUE),
|
||||
((SELECT tenant_id FROM t), 'Member', 'Member', %(permissions)s::text[], FALSE)
|
||||
RETURNING *
|
||||
),
|
||||
u AS (
|
||||
|
|
@ -109,7 +98,7 @@ def create_step1(data: schemas.UserSignupSchema):
|
|||
}
|
||||
c = {
|
||||
"tenantId": 1,
|
||||
"name": company_name,
|
||||
"name": organization_name,
|
||||
"apiKey": api_key,
|
||||
"remainingTrial": 14,
|
||||
"trialEnded": False,
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ def process_data(data):
|
|||
|
||||
|
||||
def compute():
|
||||
with pg_client.PostgresClient() as cur:
|
||||
with pg_client.PostgresClient(long_query=True) as cur:
|
||||
cur.execute(
|
||||
f"""UPDATE public.tenants
|
||||
SET t_integrations = COALESCE((SELECT COUNT(DISTINCT provider)
|
||||
|
|
@ -39,17 +39,18 @@ def compute():
|
|||
FROM public.projects
|
||||
WHERE deleted_at ISNULL
|
||||
AND projects.tenant_id = all_tenants.tenant_id), 0),
|
||||
t_sessions=COALESCE((SELECT COUNT(*)
|
||||
FROM public.sessions
|
||||
INNER JOIN public.projects USING (project_id)
|
||||
WHERE projects.tenant_id = all_tenants.tenant_id), 0),
|
||||
t_sessions=t_sessions + COALESCE((SELECT COUNT(*)
|
||||
FROM public.sessions INNER JOIN public.projects USING (project_id)
|
||||
WHERE projects.tenant_id = all_tenants.tenant_id
|
||||
AND start_ts >= (SELECT last_telemetry FROM tenants)
|
||||
AND start_ts <=CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT)), 0),
|
||||
t_users=COALESCE((SELECT COUNT(*)
|
||||
FROM public.users
|
||||
WHERE deleted_at ISNULL
|
||||
AND users.tenant_id = all_tenants.tenant_id), 0)
|
||||
FROM (
|
||||
SELECT tenant_id
|
||||
FROM public.tenants
|
||||
AND users.tenant_id = all_tenants.tenant_id), 0),
|
||||
last_telemetry=CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT)
|
||||
FROM (SELECT tenant_id
|
||||
FROM public.tenants
|
||||
) AS all_tenants
|
||||
WHERE tenants.tenant_id = all_tenants.tenant_id
|
||||
RETURNING name,t_integrations,t_projects,t_sessions,t_users,tenant_key,opt_out,
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ def get_by_tenant_key(tenant_key):
|
|||
t.api_key,
|
||||
t.created_at,
|
||||
'{license.EDITION}' AS edition,
|
||||
t.version_number,
|
||||
openreplay_version() AS version_number,
|
||||
t.opt_out
|
||||
FROM public.tenants AS t
|
||||
WHERE t.tenant_key = %(tenant_key)s AND t.deleted_at ISNULL
|
||||
|
|
@ -33,7 +33,7 @@ def get_by_tenant_id(tenant_id):
|
|||
t.api_key,
|
||||
t.created_at,
|
||||
'{license.EDITION}' AS edition,
|
||||
t.version_number,
|
||||
openreplay_version() AS version_number,
|
||||
t.opt_out,
|
||||
t.tenant_key
|
||||
FROM public.tenants AS t
|
||||
|
|
|
|||
|
|
@ -293,9 +293,8 @@ def generate_new_api_key(user_id):
|
|||
cur.mogrify(
|
||||
f"""UPDATE public.users
|
||||
SET api_key=generate_api_key(20)
|
||||
WHERE
|
||||
users.user_id = %(userId)s
|
||||
AND deleted_at IS NULL
|
||||
WHERE users.user_id = %(userId)s
|
||||
AND deleted_at IS NULL
|
||||
RETURNING api_key;""",
|
||||
{"userId": user_id})
|
||||
)
|
||||
|
|
@ -344,6 +343,47 @@ def edit(user_id_to_update, tenant_id, changes: schemas_ee.EditUserSchema, edito
|
|||
return {"data": user}
|
||||
|
||||
|
||||
def edit_member(user_id_to_update, tenant_id, changes: schemas_ee.EditUserSchema, editor_id):
|
||||
user = get_member(user_id=user_id_to_update, tenant_id=tenant_id)
|
||||
if editor_id != user_id_to_update or changes.admin is not None and changes.admin != user["admin"]:
|
||||
admin = get(tenant_id=tenant_id, user_id=editor_id)
|
||||
if not admin["superAdmin"] and not admin["admin"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
_changes = {}
|
||||
if editor_id == user_id_to_update:
|
||||
if changes.admin is not None:
|
||||
if user["superAdmin"]:
|
||||
changes.admin = None
|
||||
elif changes.admin != user["admin"]:
|
||||
return {"errors": ["cannot change your own role"]}
|
||||
if changes.roleId is not None:
|
||||
if user["superAdmin"]:
|
||||
changes.roleId = None
|
||||
elif changes.roleId != user["roleId"]:
|
||||
return {"errors": ["cannot change your own role"]}
|
||||
|
||||
if changes.email is not None and changes.email != user["email"]:
|
||||
if email_exists(changes.email):
|
||||
return {"errors": ["email already exists."]}
|
||||
if get_deleted_user_by_email(changes.email) is not None:
|
||||
return {"errors": ["email previously deleted."]}
|
||||
_changes["email"] = changes.email
|
||||
|
||||
if changes.name is not None and len(changes.name) > 0:
|
||||
_changes["name"] = changes.name
|
||||
|
||||
if changes.admin is not None:
|
||||
_changes["role"] = "admin" if changes.admin else "member"
|
||||
|
||||
if changes.roleId is not None:
|
||||
_changes["roleId"] = changes.roleId
|
||||
|
||||
if len(_changes.keys()) > 0:
|
||||
update(tenant_id=tenant_id, user_id=user_id_to_update, changes=_changes)
|
||||
return {"data": get_member(tenant_id=tenant_id, user_id=user_id_to_update)}
|
||||
return {"data": user}
|
||||
|
||||
|
||||
def get_by_email_only(email):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
|
|
@ -393,12 +433,49 @@ def get_by_email_reset(email, reset_token):
|
|||
return helper.dict_to_camel_case(r)
|
||||
|
||||
|
||||
def get_member(tenant_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT
|
||||
users.user_id,
|
||||
users.email,
|
||||
users.role,
|
||||
users.name,
|
||||
users.created_at,
|
||||
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
|
||||
DATE_PART('day',timezone('utc'::text, now()) \
|
||||
- COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation,
|
||||
basic_authentication.password IS NOT NULL OR users.origin IS NOT NULL AS joined,
|
||||
invitation_token,
|
||||
role_id,
|
||||
roles.name AS role_name
|
||||
FROM public.users
|
||||
LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
|
||||
LEFT JOIN public.roles USING (role_id)
|
||||
WHERE users.tenant_id = %(tenant_id)s AND users.deleted_at IS NULL AND users.user_id = %(user_id)s
|
||||
ORDER BY name, user_id""",
|
||||
{"tenant_id": tenant_id, "user_id": user_id})
|
||||
)
|
||||
u = helper.dict_to_camel_case(cur.fetchone())
|
||||
if u:
|
||||
u["createdAt"] = TimeUTC.datetime_to_timestamp(u["createdAt"])
|
||||
if u["invitationToken"]:
|
||||
u["invitationLink"] = __get_invitation_link(u.pop("invitationToken"))
|
||||
else:
|
||||
u["invitationLink"] = None
|
||||
|
||||
return u
|
||||
|
||||
|
||||
def get_members(tenant_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT
|
||||
users.user_id AS id,
|
||||
users.user_id,
|
||||
users.email,
|
||||
users.role,
|
||||
users.name,
|
||||
|
|
@ -416,7 +493,7 @@ def get_members(tenant_id):
|
|||
LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
|
||||
LEFT JOIN public.roles USING (role_id)
|
||||
WHERE users.tenant_id = %(tenant_id)s AND users.deleted_at IS NULL
|
||||
ORDER BY name, id""",
|
||||
ORDER BY name, user_id""",
|
||||
{"tenant_id": tenant_id})
|
||||
)
|
||||
r = cur.fetchall()
|
||||
|
|
|
|||
|
|
@ -40,3 +40,9 @@ def get_user_viewed_sessions_table(timestamp=0):
|
|||
|
||||
def get_user_viewed_errors_table(timestamp=0):
|
||||
return "experimental.user_viewed_errors"
|
||||
|
||||
|
||||
def get_main_js_errors_sessions_table(timestamp=0):
|
||||
return "experimental.js_errors_sessions_mv" # \
|
||||
# if config("EXP_7D_MV", cast=bool, default=True) \
|
||||
# and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.events"
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
#!/bin/sh
|
||||
sh env_vars.sh
|
||||
source /tmp/.env.override
|
||||
cd sourcemap-reader
|
||||
nohup npm start &
|
||||
cd ..
|
||||
|
||||
uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ idp_x509cert=
|
|||
invitation_link=/api/users/invitation?token=%s
|
||||
js_cache_bucket=sessions-assets
|
||||
jwt_algorithm=HS512
|
||||
JWT_EXP_DELTA_SECONDS=2592000
|
||||
JWT_EXPIRATION=2592000
|
||||
JWT_ISSUER=openreplay-ee
|
||||
jwt_secret="SET A RANDOM STRING HERE"
|
||||
ASSIST_URL=http://assist-openreplay.app.svc.cluster.local:9001/assist/%s
|
||||
|
|
@ -48,13 +48,14 @@ PG_POOL=true
|
|||
sessions_bucket=mobs
|
||||
sessions_region=us-east-1
|
||||
sourcemaps_bucket=sourcemaps
|
||||
sourcemaps_reader=http://127.0.0.1:9000/sourcemaps/%s/sourcemaps
|
||||
sourcemaps_reader=http://sourcemaps-reader-openreplay.app.svc.cluster.local:9000/sourcemaps/%s/sourcemaps
|
||||
stage=default-ee
|
||||
version_number=1.0.0
|
||||
FS_DIR=/mnt/efs
|
||||
EXP_SESSIONS_SEARCH=false
|
||||
EXP_AUTOCOMPLETE=false
|
||||
EXP_ERRORS_SEARCH=false
|
||||
EXP_ERRORS_GET=false
|
||||
EXP_METRICS=true
|
||||
EXP_7D_MV=false
|
||||
EXP_ALERTS=false
|
||||
|
|
@ -65,5 +66,7 @@ EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob
|
|||
EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
|
||||
SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs
|
||||
SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe
|
||||
DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob
|
||||
PRESIGNED_URL_EXPIRATION=3600
|
||||
DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs
|
||||
PRESIGNED_URL_EXPIRATION=3600
|
||||
ASSIST_JWT_EXPIRATION=144000
|
||||
ASSIST_JWT_SECRET=
|
||||
|
|
@ -7,7 +7,7 @@ from starlette.responses import RedirectResponse, FileResponse
|
|||
import schemas
|
||||
import schemas_ee
|
||||
from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \
|
||||
errors_favorite
|
||||
errors_favorite, sessions_notes
|
||||
from chalicelib.core import sessions_viewed
|
||||
from chalicelib.core import tenants, users, projects, license
|
||||
from chalicelib.core import webhook
|
||||
|
|
@ -50,7 +50,6 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
|
||||
|
||||
@app.post('/account', tags=["account"])
|
||||
@app.put('/account', tags=["account"])
|
||||
def edit_account(data: schemas_ee.EditUserSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data,
|
||||
|
|
@ -74,8 +73,8 @@ def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_con
|
|||
return {"data": data}
|
||||
|
||||
|
||||
@app.put('/integrations/slack', tags=['integrations'])
|
||||
@app.post('/integrations/slack', tags=['integrations'])
|
||||
@app.put('/integrations/slack', tags=['integrations'])
|
||||
def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
n = Slack.add_channel(tenant_id=context.tenant_id, url=data.url, name=data.name)
|
||||
if n is None:
|
||||
|
|
@ -85,7 +84,6 @@ def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentConte
|
|||
return {"data": n}
|
||||
|
||||
|
||||
@app.put('/integrations/slack/{integrationId}', tags=['integrations'])
|
||||
@app.post('/integrations/slack/{integrationId}', tags=['integrations'])
|
||||
def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
@ -102,7 +100,6 @@ def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = B
|
|||
|
||||
|
||||
@app.post('/client/members', tags=["client"])
|
||||
@app.put('/client/members', tags=["client"])
|
||||
def add_member(background_tasks: BackgroundTasks, data: schemas_ee.CreateMemberSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(),
|
||||
|
|
@ -127,7 +124,6 @@ def process_invitation_link(token: str):
|
|||
|
||||
|
||||
@public_app.post('/password/reset', tags=["users"])
|
||||
@public_app.put('/password/reset', tags=["users"])
|
||||
def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)):
|
||||
if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8:
|
||||
return {"errors": ["please provide a valid invitation & pass"]}
|
||||
|
|
@ -140,12 +136,11 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
|
|||
return users.set_password_invitation(new_password=data.password, user_id=user["userId"], tenant_id=user["tenantId"])
|
||||
|
||||
|
||||
@app.put('/client/members/{memberId}', tags=["client"])
|
||||
@app.post('/client/members/{memberId}', tags=["client"])
|
||||
def edit_member(memberId: int, data: schemas_ee.EditMemberSchema,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
|
||||
user_id_to_update=memberId)
|
||||
return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
|
||||
user_id_to_update=memberId)
|
||||
|
||||
|
||||
@app.get('/metadata/session_search', tags=["metadata"])
|
||||
|
|
@ -183,7 +178,7 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
|
|||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(sessionId, str):
|
||||
return {"errors": ["session not found"]}
|
||||
data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id,
|
||||
data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True,
|
||||
include_fav_viewed=True, group_metadata=True, context=context)
|
||||
if data is None:
|
||||
return {"errors": ["session not found"]}
|
||||
|
|
@ -274,9 +269,8 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
|
|||
context: schemas_ee.CurrentContext = Depends(OR_context)):
|
||||
data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
|
||||
if data is None:
|
||||
data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True,
|
||||
user_id=context.user_id, include_fav_viewed=True, group_metadata=True, live=False,
|
||||
context=context)
|
||||
data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, full_data=True,
|
||||
include_fav_viewed=True, group_metadata=True, live=False)
|
||||
if data is None:
|
||||
return {"errors": ["session not found"]}
|
||||
if data.get("inDB"):
|
||||
|
|
@ -285,9 +279,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
|
|||
return {'data': data}
|
||||
|
||||
|
||||
@app.get('/{projectId}/unprocessed/{sessionId}', tags=["assist"],
|
||||
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)])
|
||||
@app.get('/{projectId}/assist/sessions/{sessionId}/replay', tags=["assist"],
|
||||
@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"],
|
||||
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)])
|
||||
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
@ -308,9 +300,7 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
|||
return FileResponse(path=path, media_type="application/octet-stream")
|
||||
|
||||
|
||||
@app.get('/{projectId}/unprocessed/{sessionId}/devtools', tags=["assist"],
|
||||
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)])
|
||||
@app.get('/{projectId}/assist/sessions/{sessionId}/devtools', tags=["assist"],
|
||||
@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"],
|
||||
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)])
|
||||
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
@ -339,13 +329,10 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema =
|
|||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
@app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
def add_remove_favorite_session2(projectId: int, sessionId: int,
|
||||
context: schemas_ee.CurrentContext = Depends(OR_context)):
|
||||
return {
|
||||
"data": sessions_favorite.favorite_session(project_id=projectId, user_id=context.user_id,
|
||||
session_id=sessionId, context=context)}
|
||||
"data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)}
|
||||
|
||||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"],
|
||||
|
|
@ -380,12 +367,8 @@ def assign_session(projectId: int, sessionId: int, issueId: str,
|
|||
|
||||
@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
@app.put('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
@app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
@app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId,
|
||||
|
|
@ -396,3 +379,68 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem
|
|||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
|
||||
return {"errors": ["Session not found"]}
|
||||
data = sessions_notes.create(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id, data=data)
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
|
||||
session_id=sessionId, user_id=context.user_id)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, data=data)
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId)
|
||||
return data
|
||||
|
||||
|
||||
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"])
|
||||
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
|
||||
note_id=noteId, webhook_id=webhookId)
|
||||
|
||||
|
||||
@app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)])
|
||||
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
|
||||
user_id=context.user_id, data=data)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {'data': data}
|
||||
|
|
|
|||
59
ee/scripts/helm/db/init_dbs/clickhouse/1.8.2/1.8.2.sql
Normal file
59
ee/scripts/helm/db/init_dbs/clickhouse/1.8.2/1.8.2.sql
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
ALTER TABLE experimental.events
|
||||
ADD COLUMN IF NOT EXISTS error_tags_keys Array(String);
|
||||
ALTER TABLE experimental.events
|
||||
ADD COLUMN IF NOT EXISTS error_tags_values Array(Nullable(String));
|
||||
|
||||
ALTER TABLE experimental.events
|
||||
ADD COLUMN IF NOT EXISTS issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19));
|
||||
ALTER TABLE experimental.events
|
||||
ADD COLUMN IF NOT EXISTS issue_id Nullable(String);
|
||||
ALTER TABLE experimental.events
|
||||
MODIFY COLUMN event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'ERROR'=5,'CUSTOM'=6, 'GRAPHQL'=7, 'STATEACTION'=8, 'ISSUE'=9);
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.issues
|
||||
(
|
||||
project_id UInt16,
|
||||
issue_id String,
|
||||
type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19),
|
||||
context_string String,
|
||||
context_keys Array(String),
|
||||
context_values Array(Nullable(String)),
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, issue_id, type)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.js_errors_sessions_mv
|
||||
ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, event_type, error_id, session_id)
|
||||
TTL _timestamp + INTERVAL 35 DAY
|
||||
POPULATE
|
||||
AS
|
||||
SELECT session_id,
|
||||
project_id,
|
||||
events.datetime AS datetime,
|
||||
event_type,
|
||||
assumeNotNull(error_id) AS error_id,
|
||||
source,
|
||||
name,
|
||||
message,
|
||||
error_tags_keys,
|
||||
error_tags_values,
|
||||
message_id,
|
||||
user_browser,
|
||||
user_browser_version,
|
||||
user_os,
|
||||
user_os_version,
|
||||
user_device_type,
|
||||
user_device,
|
||||
user_country,
|
||||
_timestamp
|
||||
FROM experimental.events
|
||||
INNER JOIN experimental.sessions USING (session_id)
|
||||
WHERE event_type = 'ERROR'
|
||||
AND source = 'js_exception';
|
||||
|
||||
-- TODO: find a way to update materialized views; or drop and re-create them
|
||||
|
|
@ -15,7 +15,7 @@ CREATE TABLE IF NOT EXISTS experimental.events
|
|||
(
|
||||
session_id UInt64,
|
||||
project_id UInt16,
|
||||
event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'ERROR'=5,'CUSTOM'=6, 'GRAPHQL'=7, 'STATEACTION'=8),
|
||||
event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'ERROR'=5,'CUSTOM'=6, 'GRAPHQL'=7, 'STATEACTION'=8, 'ISSUE'=9),
|
||||
datetime DateTime,
|
||||
label Nullable(String),
|
||||
hesitation_time Nullable(UInt32),
|
||||
|
|
@ -78,6 +78,10 @@ CREATE TABLE IF NOT EXISTS experimental.events
|
|||
success Nullable(UInt8),
|
||||
request_body Nullable(String),
|
||||
response_body Nullable(String),
|
||||
issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19)),
|
||||
issue_id Nullable(String),
|
||||
error_tags_keys Array(String),
|
||||
error_tags_values Array(Nullable(String)),
|
||||
message_id UInt64 DEFAULT 0,
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
|
|
@ -192,6 +196,20 @@ CREATE TABLE IF NOT EXISTS experimental.user_viewed_errors
|
|||
ORDER BY (project_id, user_id, error_id)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS experimental.issues
|
||||
(
|
||||
project_id UInt16,
|
||||
issue_id String,
|
||||
type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19),
|
||||
context_string String,
|
||||
context_keys Array(String),
|
||||
context_values Array(Nullable(String)),
|
||||
_timestamp DateTime DEFAULT now()
|
||||
) ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(_timestamp)
|
||||
ORDER BY (project_id, issue_id, type)
|
||||
TTL _timestamp + INTERVAL 3 MONTH;
|
||||
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.events_l7d_mv
|
||||
ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
|
|
@ -256,6 +274,10 @@ SELECT session_id,
|
|||
success,
|
||||
request_body,
|
||||
response_body,
|
||||
issue_type,
|
||||
issue_id,
|
||||
error_tags_keys,
|
||||
error_tags_values,
|
||||
message_id,
|
||||
_timestamp
|
||||
FROM experimental.events
|
||||
|
|
@ -338,4 +360,36 @@ SELECT session_id,
|
|||
FROM experimental.sessions
|
||||
WHERE datetime >= now() - INTERVAL 7 DAY
|
||||
AND isNotNull(duration)
|
||||
AND duration > 0;
|
||||
AND duration > 0;
|
||||
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS experimental.js_errors_sessions_mv
|
||||
ENGINE = ReplacingMergeTree(_timestamp)
|
||||
PARTITION BY toYYYYMM(datetime)
|
||||
ORDER BY (project_id, datetime, event_type, error_id, session_id)
|
||||
TTL _timestamp + INTERVAL 35 DAY
|
||||
POPULATE
|
||||
AS
|
||||
SELECT session_id,
|
||||
project_id,
|
||||
events.datetime AS datetime,
|
||||
event_type,
|
||||
assumeNotNull(error_id) AS error_id,
|
||||
source,
|
||||
name,
|
||||
message,
|
||||
error_tags_keys,
|
||||
error_tags_values,
|
||||
message_id,
|
||||
user_id,
|
||||
user_browser,
|
||||
user_browser_version,
|
||||
user_os,
|
||||
user_os_version,
|
||||
user_device_type,
|
||||
user_device,
|
||||
user_country,
|
||||
_timestamp
|
||||
FROM experimental.events
|
||||
INNER JOIN experimental.sessions USING (session_id)
|
||||
WHERE event_type = 'ERROR'
|
||||
AND source = 'js_exception';
|
||||
|
|
@ -1,4 +1,64 @@
|
|||
BEGIN;
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT 'v1.8.2-ee'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
|
||||
ALTER TABLE IF EXISTS public.tenants
|
||||
ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT),
|
||||
DROP COLUMN IF EXISTS version_number;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sessions_notes
|
||||
(
|
||||
note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
message text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
|
||||
user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
deleted_at timestamp without time zone NULL DEFAULT NULL,
|
||||
tag text NULL,
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
timestamp integer NOT NULL DEFAULT -1,
|
||||
is_public boolean NOT NULL DEFAULT FALSE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS errors_tags
|
||||
(
|
||||
key text NOT NULL,
|
||||
value text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
|
||||
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
|
||||
session_id bigint NOT NULL,
|
||||
message_id bigint NOT NULL,
|
||||
FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id);
|
||||
CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id);
|
||||
CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id);
|
||||
|
||||
UPDATE metrics
|
||||
SET default_config=default_config || '{"col":4}'
|
||||
WHERE metric_type = 'funnel';
|
||||
|
||||
UPDATE dashboard_widgets
|
||||
SET config=config || '{"col":4}'
|
||||
WHERE metric_id IN (SELECT metric_id FROM metrics WHERE metric_type = 'funnel');
|
||||
|
||||
CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS
|
||||
$$
|
||||
BEGIN
|
||||
IF NEW IS NULL THEN
|
||||
PERFORM pg_notify('integration',
|
||||
jsonb_build_object('project_id', OLD.project_id, 'provider', OLD.provider, 'options',
|
||||
null)::text);
|
||||
ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN
|
||||
PERFORM pg_notify('integration', row_to_json(NEW)::text);
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
ALTER TABLE sessions ADD file_key BYTEA NULL;
|
||||
|
||||
|
|
|
|||
|
|
@ -61,7 +61,8 @@ $$
|
|||
BEGIN
|
||||
IF NEW IS NULL THEN
|
||||
PERFORM pg_notify('integration',
|
||||
(row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text));
|
||||
jsonb_build_object('project_id', OLD.project_id, 'provider', OLD.provider, 'options',
|
||||
null)::text);
|
||||
ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN
|
||||
PERFORM pg_notify('integration', row_to_json(NEW)::text);
|
||||
END IF;
|
||||
|
|
@ -129,7 +130,8 @@ $$
|
|||
('user_viewed_errors'),
|
||||
('user_viewed_sessions'),
|
||||
('users'),
|
||||
('webhooks'))
|
||||
('webhooks'),
|
||||
('sessions_notes'))
|
||||
select bool_and(exists(select *
|
||||
from information_schema.tables t
|
||||
where table_schema = 'public'
|
||||
|
|
@ -147,13 +149,13 @@ $$
|
|||
api_key text UNIQUE default generate_api_key(20) not null,
|
||||
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
deleted_at timestamp without time zone NULL DEFAULT NULL,
|
||||
version_number text NOT NULL,
|
||||
license text NULL,
|
||||
opt_out bool NOT NULL DEFAULT FALSE,
|
||||
t_projects integer NOT NULL DEFAULT 1,
|
||||
t_sessions bigint NOT NULL DEFAULT 0,
|
||||
t_users integer NOT NULL DEFAULT 1,
|
||||
t_integrations integer NOT NULL DEFAULT 0
|
||||
t_integrations integer NOT NULL DEFAULT 0,
|
||||
last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT)
|
||||
);
|
||||
|
||||
|
||||
|
|
@ -223,7 +225,7 @@ $$
|
|||
provider_user_id text NOT NULL,
|
||||
token text NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication(user_id,provider);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS projects
|
||||
(
|
||||
|
|
@ -495,6 +497,21 @@ $$
|
|||
CREATE INDEX IF NOT EXISTS user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id);
|
||||
CREATE INDEX IF NOT EXISTS user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS errors_tags
|
||||
(
|
||||
key text NOT NULL,
|
||||
value text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
|
||||
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
|
||||
session_id bigint NOT NULL,
|
||||
message_id bigint NOT NULL,
|
||||
FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id);
|
||||
CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id);
|
||||
CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id);
|
||||
|
||||
IF NOT EXISTS(SELECT *
|
||||
FROM pg_type typ
|
||||
WHERE typ.typname = 'platform') THEN
|
||||
|
|
@ -856,6 +873,20 @@ $$
|
|||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE notify_alert();
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sessions_notes
|
||||
(
|
||||
note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
message text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
|
||||
user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
deleted_at timestamp without time zone NULL DEFAULT NULL,
|
||||
tag text NULL,
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
timestamp integer NOT NULL DEFAULT -1,
|
||||
is_public boolean NOT NULL DEFAULT FALSE
|
||||
);
|
||||
|
||||
RAISE NOTICE 'Created missing public schema tables';
|
||||
END IF;
|
||||
END;
|
||||
|
|
|
|||
|
|
@ -112,4 +112,21 @@ if (process.env.uws !== "true") {
|
|||
// process.exit(1);
|
||||
});
|
||||
module.exports = {uapp};
|
||||
}
|
||||
|
||||
if (process.env.uws !== "true") {
|
||||
wsapp.get('/private/shutdown', (req, res) => {
|
||||
console.log("Requested shutdown");
|
||||
res.statusCode = 200;
|
||||
res.end("ok!");
|
||||
process.kill(1, "SIGTERM");
|
||||
}
|
||||
);
|
||||
} else {
|
||||
uapp.get('/private/shutdown', (res, req) => {
|
||||
console.log("Requested shutdown");
|
||||
res.writeStatus('200 OK').end("ok!");
|
||||
process.kill(1, "SIGTERM");
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
const dumps = require('./utils/HeapSnapshot');
|
||||
const {request_logger} = require('./utils/helper');
|
||||
const assert = require('assert').strict;
|
||||
const {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers/peerjs-server');
|
||||
const express = require('express');
|
||||
const {ExpressPeerServer} = require('peer');
|
||||
|
|
@ -40,4 +41,12 @@ process.on('uncaughtException', err => {
|
|||
console.log(`Uncaught Exception: ${err.message}`);
|
||||
debug && console.log(err.stack);
|
||||
// process.exit(1);
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/private/shutdown', (req, res) => {
|
||||
console.log("Requested shutdown");
|
||||
res.statusCode = 200;
|
||||
res.end("ok!");
|
||||
process.kill(1, "SIGTERM");
|
||||
}
|
||||
);
|
||||
63
scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql
Normal file
63
scripts/helm/db/init_dbs/postgresql/1.8.2/1.8.2.sql
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
BEGIN;
|
||||
CREATE OR REPLACE FUNCTION openreplay_version()
|
||||
RETURNS text AS
|
||||
$$
|
||||
SELECT 'v1.8.2'
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
|
||||
ALTER TABLE IF EXISTS public.tenants
|
||||
ADD COLUMN IF NOT EXISTS last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT),
|
||||
DROP COLUMN IF EXISTS version_number;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sessions_notes
|
||||
(
|
||||
note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
message text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
|
||||
user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
deleted_at timestamp without time zone NULL DEFAULT NULL,
|
||||
tag text NULL,
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
timestamp integer NOT NULL DEFAULT -1,
|
||||
is_public boolean NOT NULL DEFAULT FALSE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS errors_tags
|
||||
(
|
||||
key text NOT NULL,
|
||||
value text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
|
||||
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
|
||||
session_id bigint NOT NULL,
|
||||
message_id bigint NOT NULL,
|
||||
FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id);
|
||||
CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id);
|
||||
CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id);
|
||||
|
||||
UPDATE metrics
|
||||
SET default_config=default_config || '{"col":4}'
|
||||
WHERE metric_type = 'funnel';
|
||||
|
||||
UPDATE dashboard_widgets
|
||||
SET config=config || '{"col":4}'
|
||||
WHERE metric_id IN (SELECT metric_id FROM metrics WHERE metric_type = 'funnel');
|
||||
|
||||
CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS
|
||||
$$
|
||||
BEGIN
|
||||
IF NEW IS NULL THEN
|
||||
PERFORM pg_notify('integration',
|
||||
jsonb_build_object('project_id', OLD.project_id, 'provider', OLD.provider, 'options',
|
||||
null)::text);
|
||||
ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN
|
||||
PERFORM pg_notify('integration', row_to_json(NEW)::text);
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMIT;
|
||||
|
|
@ -60,7 +60,9 @@ CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS
|
|||
$$
|
||||
BEGIN
|
||||
IF NEW IS NULL THEN
|
||||
PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text));
|
||||
PERFORM pg_notify('integration',
|
||||
jsonb_build_object('project_id', OLD.project_id, 'provider', OLD.provider, 'options',
|
||||
null)::text);
|
||||
ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN
|
||||
PERFORM pg_notify('integration', row_to_json(NEW)::text);
|
||||
END IF;
|
||||
|
|
@ -121,14 +123,14 @@ $$
|
|||
name text NOT NULL,
|
||||
api_key text NOT NULL DEFAULT generate_api_key(20),
|
||||
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
version_number text NOT NULL,
|
||||
license text NULL,
|
||||
opt_out bool NOT NULL DEFAULT FALSE,
|
||||
t_projects integer NOT NULL DEFAULT 1,
|
||||
t_sessions bigint NOT NULL DEFAULT 0,
|
||||
t_users integer NOT NULL DEFAULT 1,
|
||||
t_integrations integer NOT NULL DEFAULT 0,
|
||||
CONSTRAINT onerow_uni CHECK (tenant_id = 1)
|
||||
last_telemetry bigint NOT NULL DEFAULT CAST(EXTRACT(epoch FROM date_trunc('day', now())) * 1000 AS BIGINT)
|
||||
CONSTRAINT onerow_uni CHECK (tenant_id = 1)
|
||||
);
|
||||
|
||||
CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member');
|
||||
|
|
@ -167,7 +169,7 @@ $$
|
|||
provider_user_id text NOT NULL,
|
||||
token text NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX oauth_authentication_unique_user_id_provider_idx ON oauth_authentication(user_id,provider);
|
||||
CREATE UNIQUE INDEX oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider);
|
||||
|
||||
-- --- projects.sql ---
|
||||
|
||||
|
|
@ -403,6 +405,20 @@ $$
|
|||
CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id);
|
||||
CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
|
||||
|
||||
CREATE TABLE errors_tags
|
||||
(
|
||||
key text NOT NULL,
|
||||
value text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
|
||||
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
|
||||
session_id bigint NOT NULL,
|
||||
message_id bigint NOT NULL,
|
||||
FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX errors_tags_error_id_idx ON errors_tags (error_id);
|
||||
CREATE INDEX errors_tags_session_id_idx ON errors_tags (session_id);
|
||||
CREATE INDEX errors_tags_message_id_idx ON errors_tags (message_id);
|
||||
|
||||
-- --- sessions.sql ---
|
||||
CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other');
|
||||
|
|
@ -999,6 +1015,20 @@ $$
|
|||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE notify_alert();
|
||||
|
||||
CREATE TABLE sessions_notes
|
||||
(
|
||||
note_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
message text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
|
||||
user_id integer NULL REFERENCES users (user_id) ON DELETE SET NULL,
|
||||
deleted_at timestamp without time zone NULL DEFAULT NULL,
|
||||
tag text NULL,
|
||||
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
|
||||
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
|
||||
timestamp integer NOT NULL DEFAULT -1,
|
||||
is_public boolean NOT NULL DEFAULT FALSE
|
||||
);
|
||||
|
||||
raise notice 'DB created';
|
||||
END IF;
|
||||
END;
|
||||
|
|
|
|||
23
sourcemap-reader/Dockerfile
Normal file
23
sourcemap-reader/Dockerfile
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
FROM node:18-alpine
|
||||
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
|
||||
RUN apk add --no-cache tini
|
||||
|
||||
ARG envarg
|
||||
ENV SOURCE_MAP_VERSION=0.7.4 \
|
||||
APP_NAME=sourcemaps-reader \
|
||||
LISTEN_PORT=9000 \
|
||||
MAPPING_WASM=/work/mappings.wasm \
|
||||
ENTERPRISE_BUILD=${envarg}
|
||||
|
||||
ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm ${MAPPING_WASM}
|
||||
WORKDIR /work
|
||||
COPY *.json ./
|
||||
RUN npm install && chmod 644 ${MAPPING_WASM}
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN adduser -u 1001 openreplay -D
|
||||
USER 1001
|
||||
|
||||
ENTRYPOINT ["/sbin/tini", "--"]
|
||||
CMD ./entrypoint.sh
|
||||
44
sourcemap-reader/build.sh
Normal file
44
sourcemap-reader/build.sh
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Script to build api module
|
||||
# flags to accept:
|
||||
# envarg: build for enterprise edition.
|
||||
# Default will be OSS build.
|
||||
|
||||
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
|
||||
|
||||
git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)}
|
||||
envarg="default-foss"
|
||||
check_prereq() {
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
exit 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
function build_api(){
|
||||
cp -R ../sourcemap-reader ../_smr
|
||||
cd ../_smr
|
||||
cp -R ../utilities/utils .
|
||||
tag=""
|
||||
# Copy enterprise code
|
||||
[[ $1 == "ee" ]] && {
|
||||
cp -rf ../ee/sourcemap-reader/* ./
|
||||
envarg="default-ee"
|
||||
tag="ee-"
|
||||
}
|
||||
docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/souremaps-reader:${git_sha1} .
|
||||
cd ../sourcemap-reader
|
||||
rm -rf ../_smr
|
||||
[[ $PUSH_IMAGE -eq 1 ]] && {
|
||||
docker push ${DOCKER_REPO:-'local'}/souremaps-reader:${git_sha1}
|
||||
docker tag ${DOCKER_REPO:-'local'}/souremaps-reader:${git_sha1} ${DOCKER_REPO:-'local'}/souremaps-reader:${tag}latest
|
||||
docker push ${DOCKER_REPO:-'local'}/souremaps-reader:${tag}latest
|
||||
}
|
||||
echo "sourcemaps-reader docker build completed"
|
||||
}
|
||||
|
||||
check_prereq
|
||||
build_api $1
|
||||
echo buil_complete
|
||||
2
sourcemap-reader/entrypoint.sh
Executable file
2
sourcemap-reader/entrypoint.sh
Executable file
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/sh
|
||||
npm start
|
||||
712
sourcemap-reader/package-lock.json
generated
712
sourcemap-reader/package-lock.json
generated
|
|
@ -11,6 +11,7 @@
|
|||
"dependencies": {
|
||||
"aws-sdk": "^2.1172.0",
|
||||
"express": "^4.18.1",
|
||||
"request": "^2.88.2",
|
||||
"source-map": "^0.7.4"
|
||||
}
|
||||
},
|
||||
|
|
@ -26,11 +27,47 @@
|
|||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/ajv": {
|
||||
"version": "6.12.6",
|
||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
|
||||
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
|
||||
"dependencies": {
|
||||
"fast-deep-equal": "^3.1.1",
|
||||
"fast-json-stable-stringify": "^2.0.0",
|
||||
"json-schema-traverse": "^0.4.1",
|
||||
"uri-js": "^4.2.2"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/epoberezkin"
|
||||
}
|
||||
},
|
||||
"node_modules/array-flatten": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||
"integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI="
|
||||
},
|
||||
"node_modules/asn1": {
|
||||
"version": "0.2.6",
|
||||
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
|
||||
"integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==",
|
||||
"dependencies": {
|
||||
"safer-buffer": "~2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/assert-plus": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
|
||||
"integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==",
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||
},
|
||||
"node_modules/aws-sdk": {
|
||||
"version": "2.1172.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1172.0.tgz",
|
||||
|
|
@ -50,6 +87,19 @@
|
|||
"node": ">= 10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-sign2": {
|
||||
"version": "0.7.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
|
||||
"integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/aws4": {
|
||||
"version": "1.11.0",
|
||||
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz",
|
||||
"integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA=="
|
||||
},
|
||||
"node_modules/base64-js": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
|
||||
|
|
@ -69,6 +119,14 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"node_modules/bcrypt-pbkdf": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
|
||||
"integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
|
||||
"dependencies": {
|
||||
"tweetnacl": "^0.14.3"
|
||||
}
|
||||
},
|
||||
"node_modules/body-parser": {
|
||||
"version": "1.20.0",
|
||||
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz",
|
||||
|
|
@ -122,6 +180,22 @@
|
|||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/caseless": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
|
||||
"integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw=="
|
||||
},
|
||||
"node_modules/combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||
"dependencies": {
|
||||
"delayed-stream": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/content-disposition": {
|
||||
"version": "0.5.4",
|
||||
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
|
||||
|
|
@ -154,6 +228,22 @@
|
|||
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
|
||||
"integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw="
|
||||
},
|
||||
"node_modules/core-util-is": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
|
||||
"integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ=="
|
||||
},
|
||||
"node_modules/dashdash": {
|
||||
"version": "1.14.1",
|
||||
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
|
||||
"integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==",
|
||||
"dependencies": {
|
||||
"assert-plus": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
|
|
@ -162,6 +252,14 @@
|
|||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/delayed-stream": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/depd": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
||||
|
|
@ -179,6 +277,15 @@
|
|||
"npm": "1.2.8000 || >= 1.4.16"
|
||||
}
|
||||
},
|
||||
"node_modules/ecc-jsbn": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
|
||||
"integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==",
|
||||
"dependencies": {
|
||||
"jsbn": "~0.1.0",
|
||||
"safer-buffer": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ee-first": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
|
||||
|
|
@ -254,6 +361,29 @@
|
|||
"node": ">= 0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/extend": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
||||
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
|
||||
},
|
||||
"node_modules/extsprintf": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
|
||||
"integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==",
|
||||
"engines": [
|
||||
"node >=0.6.0"
|
||||
]
|
||||
},
|
||||
"node_modules/fast-deep-equal": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
||||
},
|
||||
"node_modules/fast-json-stable-stringify": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
|
||||
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
|
||||
},
|
||||
"node_modules/finalhandler": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz",
|
||||
|
|
@ -271,6 +401,27 @@
|
|||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/forever-agent": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
|
||||
"integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
|
||||
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.6",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.12"
|
||||
}
|
||||
},
|
||||
"node_modules/forwarded": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
|
||||
|
|
@ -305,6 +456,35 @@
|
|||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/getpass": {
|
||||
"version": "0.1.7",
|
||||
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
|
||||
"integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==",
|
||||
"dependencies": {
|
||||
"assert-plus": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/har-schema": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
|
||||
"integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/har-validator": {
|
||||
"version": "5.1.5",
|
||||
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz",
|
||||
"integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==",
|
||||
"deprecated": "this library is no longer supported",
|
||||
"dependencies": {
|
||||
"ajv": "^6.12.3",
|
||||
"har-schema": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/has": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
|
||||
|
|
@ -342,6 +522,20 @@
|
|||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/http-signature": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
|
||||
"integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==",
|
||||
"dependencies": {
|
||||
"assert-plus": "^1.0.0",
|
||||
"jsprim": "^1.2.2",
|
||||
"sshpk": "^1.7.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8",
|
||||
"npm": ">=1.3.7"
|
||||
}
|
||||
},
|
||||
"node_modules/iconv-lite": {
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
|
|
@ -371,11 +565,21 @@
|
|||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/is-typedarray": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
|
||||
"integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA=="
|
||||
},
|
||||
"node_modules/isarray": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
|
||||
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
|
||||
},
|
||||
"node_modules/isstream": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
|
||||
"integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g=="
|
||||
},
|
||||
"node_modules/jmespath": {
|
||||
"version": "0.16.0",
|
||||
"resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz",
|
||||
|
|
@ -384,6 +588,40 @@
|
|||
"node": ">= 0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/jsbn": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
|
||||
"integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg=="
|
||||
},
|
||||
"node_modules/json-schema": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz",
|
||||
"integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="
|
||||
},
|
||||
"node_modules/json-schema-traverse": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
|
||||
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
|
||||
},
|
||||
"node_modules/json-stringify-safe": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
|
||||
"integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA=="
|
||||
},
|
||||
"node_modules/jsprim": {
|
||||
"version": "1.4.2",
|
||||
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz",
|
||||
"integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==",
|
||||
"dependencies": {
|
||||
"assert-plus": "1.0.0",
|
||||
"extsprintf": "1.3.0",
|
||||
"json-schema": "0.4.0",
|
||||
"verror": "1.10.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/media-typer": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
||||
|
|
@ -448,6 +686,14 @@
|
|||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/oauth-sign": {
|
||||
"version": "0.9.0",
|
||||
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
|
||||
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/object-inspect": {
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz",
|
||||
|
|
@ -480,6 +726,11 @@
|
|||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
|
||||
"integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
|
||||
},
|
||||
"node_modules/performance-now": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
|
||||
"integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow=="
|
||||
},
|
||||
"node_modules/proxy-addr": {
|
||||
"version": "2.0.7",
|
||||
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
|
||||
|
|
@ -492,6 +743,11 @@
|
|||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/psl": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz",
|
||||
"integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag=="
|
||||
},
|
||||
"node_modules/punycode": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
|
||||
|
|
@ -542,6 +798,54 @@
|
|||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/request": {
|
||||
"version": "2.88.2",
|
||||
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
|
||||
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
|
||||
"deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142",
|
||||
"dependencies": {
|
||||
"aws-sign2": "~0.7.0",
|
||||
"aws4": "^1.8.0",
|
||||
"caseless": "~0.12.0",
|
||||
"combined-stream": "~1.0.6",
|
||||
"extend": "~3.0.2",
|
||||
"forever-agent": "~0.6.1",
|
||||
"form-data": "~2.3.2",
|
||||
"har-validator": "~5.1.3",
|
||||
"http-signature": "~1.2.0",
|
||||
"is-typedarray": "~1.0.0",
|
||||
"isstream": "~0.1.2",
|
||||
"json-stringify-safe": "~5.0.1",
|
||||
"mime-types": "~2.1.19",
|
||||
"oauth-sign": "~0.9.0",
|
||||
"performance-now": "^2.1.0",
|
||||
"qs": "~6.5.2",
|
||||
"safe-buffer": "^5.1.2",
|
||||
"tough-cookie": "~2.5.0",
|
||||
"tunnel-agent": "^0.6.0",
|
||||
"uuid": "^3.3.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/request/node_modules/qs": {
|
||||
"version": "6.5.3",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz",
|
||||
"integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==",
|
||||
"engines": {
|
||||
"node": ">=0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/request/node_modules/uuid": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
|
||||
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==",
|
||||
"deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.",
|
||||
"bin": {
|
||||
"uuid": "bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
|
|
@ -639,6 +943,30 @@
|
|||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/sshpk": {
|
||||
"version": "1.17.0",
|
||||
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz",
|
||||
"integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==",
|
||||
"dependencies": {
|
||||
"asn1": "~0.2.3",
|
||||
"assert-plus": "^1.0.0",
|
||||
"bcrypt-pbkdf": "^1.0.0",
|
||||
"dashdash": "^1.12.0",
|
||||
"ecc-jsbn": "~0.1.1",
|
||||
"getpass": "^0.1.1",
|
||||
"jsbn": "~0.1.0",
|
||||
"safer-buffer": "^2.0.2",
|
||||
"tweetnacl": "~0.14.0"
|
||||
},
|
||||
"bin": {
|
||||
"sshpk-conv": "bin/sshpk-conv",
|
||||
"sshpk-sign": "bin/sshpk-sign",
|
||||
"sshpk-verify": "bin/sshpk-verify"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/statuses": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
||||
|
|
@ -655,6 +983,42 @@
|
|||
"node": ">=0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/tough-cookie": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
|
||||
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
|
||||
"dependencies": {
|
||||
"psl": "^1.1.28",
|
||||
"punycode": "^2.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/tough-cookie/node_modules/punycode": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
||||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/tunnel-agent": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
|
||||
"integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==",
|
||||
"dependencies": {
|
||||
"safe-buffer": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/tweetnacl": {
|
||||
"version": "0.14.5",
|
||||
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
|
||||
"integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
|
||||
},
|
||||
"node_modules/type-is": {
|
||||
"version": "1.6.18",
|
||||
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
|
||||
|
|
@ -675,6 +1039,22 @@
|
|||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/uri-js": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
|
||||
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
|
||||
"dependencies": {
|
||||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/uri-js/node_modules/punycode": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
||||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/url": {
|
||||
"version": "0.10.3",
|
||||
"resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz",
|
||||
|
|
@ -708,6 +1088,19 @@
|
|||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/verror": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
|
||||
"integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==",
|
||||
"engines": [
|
||||
"node >=0.6.0"
|
||||
],
|
||||
"dependencies": {
|
||||
"assert-plus": "^1.0.0",
|
||||
"core-util-is": "1.0.2",
|
||||
"extsprintf": "^1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/xml2js": {
|
||||
"version": "0.4.19",
|
||||
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz",
|
||||
|
|
@ -736,11 +1129,40 @@
|
|||
"negotiator": "0.6.3"
|
||||
}
|
||||
},
|
||||
"ajv": {
|
||||
"version": "6.12.6",
|
||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
|
||||
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
|
||||
"requires": {
|
||||
"fast-deep-equal": "^3.1.1",
|
||||
"fast-json-stable-stringify": "^2.0.0",
|
||||
"json-schema-traverse": "^0.4.1",
|
||||
"uri-js": "^4.2.2"
|
||||
}
|
||||
},
|
||||
"array-flatten": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||
"integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI="
|
||||
},
|
||||
"asn1": {
|
||||
"version": "0.2.6",
|
||||
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
|
||||
"integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==",
|
||||
"requires": {
|
||||
"safer-buffer": "~2.1.0"
|
||||
}
|
||||
},
|
||||
"assert-plus": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
|
||||
"integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw=="
|
||||
},
|
||||
"asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||
},
|
||||
"aws-sdk": {
|
||||
"version": "2.1172.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1172.0.tgz",
|
||||
|
|
@ -757,11 +1179,29 @@
|
|||
"xml2js": "0.4.19"
|
||||
}
|
||||
},
|
||||
"aws-sign2": {
|
||||
"version": "0.7.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
|
||||
"integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA=="
|
||||
},
|
||||
"aws4": {
|
||||
"version": "1.11.0",
|
||||
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz",
|
||||
"integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA=="
|
||||
},
|
||||
"base64-js": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
|
||||
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="
|
||||
},
|
||||
"bcrypt-pbkdf": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
|
||||
"integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
|
||||
"requires": {
|
||||
"tweetnacl": "^0.14.3"
|
||||
}
|
||||
},
|
||||
"body-parser": {
|
||||
"version": "1.20.0",
|
||||
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.0.tgz",
|
||||
|
|
@ -805,6 +1245,19 @@
|
|||
"get-intrinsic": "^1.0.2"
|
||||
}
|
||||
},
|
||||
"caseless": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
|
||||
"integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw=="
|
||||
},
|
||||
"combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||
"requires": {
|
||||
"delayed-stream": "~1.0.0"
|
||||
}
|
||||
},
|
||||
"content-disposition": {
|
||||
"version": "0.5.4",
|
||||
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
|
||||
|
|
@ -828,6 +1281,19 @@
|
|||
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
|
||||
"integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw="
|
||||
},
|
||||
"core-util-is": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
|
||||
"integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ=="
|
||||
},
|
||||
"dashdash": {
|
||||
"version": "1.14.1",
|
||||
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
|
||||
"integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==",
|
||||
"requires": {
|
||||
"assert-plus": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"debug": {
|
||||
"version": "2.6.9",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||
|
|
@ -836,6 +1302,11 @@
|
|||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"delayed-stream": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
|
||||
},
|
||||
"depd": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
|
||||
|
|
@ -846,6 +1317,15 @@
|
|||
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
|
||||
"integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="
|
||||
},
|
||||
"ecc-jsbn": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
|
||||
"integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==",
|
||||
"requires": {
|
||||
"jsbn": "~0.1.0",
|
||||
"safer-buffer": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"ee-first": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
|
||||
|
|
@ -909,6 +1389,26 @@
|
|||
"vary": "~1.1.2"
|
||||
}
|
||||
},
|
||||
"extend": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
||||
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
|
||||
},
|
||||
"extsprintf": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
|
||||
"integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g=="
|
||||
},
|
||||
"fast-deep-equal": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
||||
},
|
||||
"fast-json-stable-stringify": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
|
||||
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
|
||||
},
|
||||
"finalhandler": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz",
|
||||
|
|
@ -923,6 +1423,21 @@
|
|||
"unpipe": "~1.0.0"
|
||||
}
|
||||
},
|
||||
"forever-agent": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
|
||||
"integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw=="
|
||||
},
|
||||
"form-data": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
|
||||
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
|
||||
"requires": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.6",
|
||||
"mime-types": "^2.1.12"
|
||||
}
|
||||
},
|
||||
"forwarded": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
|
||||
|
|
@ -948,6 +1463,28 @@
|
|||
"has-symbols": "^1.0.3"
|
||||
}
|
||||
},
|
||||
"getpass": {
|
||||
"version": "0.1.7",
|
||||
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
|
||||
"integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==",
|
||||
"requires": {
|
||||
"assert-plus": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"har-schema": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
|
||||
"integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q=="
|
||||
},
|
||||
"har-validator": {
|
||||
"version": "5.1.5",
|
||||
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz",
|
||||
"integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==",
|
||||
"requires": {
|
||||
"ajv": "^6.12.3",
|
||||
"har-schema": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"has": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
|
||||
|
|
@ -973,6 +1510,16 @@
|
|||
"toidentifier": "1.0.1"
|
||||
}
|
||||
},
|
||||
"http-signature": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
|
||||
"integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==",
|
||||
"requires": {
|
||||
"assert-plus": "^1.0.0",
|
||||
"jsprim": "^1.2.2",
|
||||
"sshpk": "^1.7.0"
|
||||
}
|
||||
},
|
||||
"iconv-lite": {
|
||||
"version": "0.4.24",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||
|
|
@ -996,16 +1543,57 @@
|
|||
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
|
||||
"integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="
|
||||
},
|
||||
"is-typedarray": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
|
||||
"integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA=="
|
||||
},
|
||||
"isarray": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
|
||||
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
|
||||
},
|
||||
"isstream": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
|
||||
"integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g=="
|
||||
},
|
||||
"jmespath": {
|
||||
"version": "0.16.0",
|
||||
"resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz",
|
||||
"integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw=="
|
||||
},
|
||||
"jsbn": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
|
||||
"integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg=="
|
||||
},
|
||||
"json-schema": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz",
|
||||
"integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="
|
||||
},
|
||||
"json-schema-traverse": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
|
||||
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
|
||||
},
|
||||
"json-stringify-safe": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
|
||||
"integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA=="
|
||||
},
|
||||
"jsprim": {
|
||||
"version": "1.4.2",
|
||||
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz",
|
||||
"integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==",
|
||||
"requires": {
|
||||
"assert-plus": "1.0.0",
|
||||
"extsprintf": "1.3.0",
|
||||
"json-schema": "0.4.0",
|
||||
"verror": "1.10.0"
|
||||
}
|
||||
},
|
||||
"media-typer": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
||||
|
|
@ -1049,6 +1637,11 @@
|
|||
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
|
||||
"integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="
|
||||
},
|
||||
"oauth-sign": {
|
||||
"version": "0.9.0",
|
||||
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
|
||||
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
|
||||
},
|
||||
"object-inspect": {
|
||||
"version": "1.12.2",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz",
|
||||
|
|
@ -1072,6 +1665,11 @@
|
|||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
|
||||
"integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
|
||||
},
|
||||
"performance-now": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
|
||||
"integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow=="
|
||||
},
|
||||
"proxy-addr": {
|
||||
"version": "2.0.7",
|
||||
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz",
|
||||
|
|
@ -1081,6 +1679,11 @@
|
|||
"ipaddr.js": "1.9.1"
|
||||
}
|
||||
},
|
||||
"psl": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz",
|
||||
"integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag=="
|
||||
},
|
||||
"punycode": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
|
||||
|
|
@ -1115,6 +1718,45 @@
|
|||
"unpipe": "1.0.0"
|
||||
}
|
||||
},
|
||||
"request": {
|
||||
"version": "2.88.2",
|
||||
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
|
||||
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
|
||||
"requires": {
|
||||
"aws-sign2": "~0.7.0",
|
||||
"aws4": "^1.8.0",
|
||||
"caseless": "~0.12.0",
|
||||
"combined-stream": "~1.0.6",
|
||||
"extend": "~3.0.2",
|
||||
"forever-agent": "~0.6.1",
|
||||
"form-data": "~2.3.2",
|
||||
"har-validator": "~5.1.3",
|
||||
"http-signature": "~1.2.0",
|
||||
"is-typedarray": "~1.0.0",
|
||||
"isstream": "~0.1.2",
|
||||
"json-stringify-safe": "~5.0.1",
|
||||
"mime-types": "~2.1.19",
|
||||
"oauth-sign": "~0.9.0",
|
||||
"performance-now": "^2.1.0",
|
||||
"qs": "~6.5.2",
|
||||
"safe-buffer": "^5.1.2",
|
||||
"tough-cookie": "~2.5.0",
|
||||
"tunnel-agent": "^0.6.0",
|
||||
"uuid": "^3.3.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"qs": {
|
||||
"version": "6.5.3",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz",
|
||||
"integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA=="
|
||||
},
|
||||
"uuid": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
|
||||
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
|
|
@ -1188,6 +1830,22 @@
|
|||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz",
|
||||
"integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA=="
|
||||
},
|
||||
"sshpk": {
|
||||
"version": "1.17.0",
|
||||
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz",
|
||||
"integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==",
|
||||
"requires": {
|
||||
"asn1": "~0.2.3",
|
||||
"assert-plus": "^1.0.0",
|
||||
"bcrypt-pbkdf": "^1.0.0",
|
||||
"dashdash": "^1.12.0",
|
||||
"ecc-jsbn": "~0.1.1",
|
||||
"getpass": "^0.1.1",
|
||||
"jsbn": "~0.1.0",
|
||||
"safer-buffer": "^2.0.2",
|
||||
"tweetnacl": "~0.14.0"
|
||||
}
|
||||
},
|
||||
"statuses": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
||||
|
|
@ -1198,6 +1856,35 @@
|
|||
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
|
||||
"integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="
|
||||
},
|
||||
"tough-cookie": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
|
||||
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
|
||||
"requires": {
|
||||
"psl": "^1.1.28",
|
||||
"punycode": "^2.1.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"punycode": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
||||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"tunnel-agent": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
|
||||
"integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==",
|
||||
"requires": {
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"tweetnacl": {
|
||||
"version": "0.14.5",
|
||||
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
|
||||
"integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
|
||||
},
|
||||
"type-is": {
|
||||
"version": "1.6.18",
|
||||
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
|
||||
|
|
@ -1212,6 +1899,21 @@
|
|||
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
||||
"integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="
|
||||
},
|
||||
"uri-js": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
|
||||
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
|
||||
"requires": {
|
||||
"punycode": "^2.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"punycode": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
||||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"url": {
|
||||
"version": "0.10.3",
|
||||
"resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz",
|
||||
|
|
@ -1236,6 +1938,16 @@
|
|||
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
|
||||
"integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw="
|
||||
},
|
||||
"verror": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
|
||||
"integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==",
|
||||
"requires": {
|
||||
"assert-plus": "^1.0.0",
|
||||
"core-util-is": "1.0.2",
|
||||
"extsprintf": "^1.2.0"
|
||||
}
|
||||
},
|
||||
"xml2js": {
|
||||
"version": "0.4.19",
|
||||
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz",
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@
|
|||
"dependencies": {
|
||||
"aws-sdk": "^2.1172.0",
|
||||
"express": "^4.18.1",
|
||||
"request": "^2.88.2",
|
||||
"source-map": "^0.7.4"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
#!/bin/zsh
|
||||
|
||||
MAPPING_WASM=./mappings.wasm npm start
|
||||
MAPPING_WASM=./mappings.wasm PREFIX=/abc npm start
|
||||
|
|
@ -23,4 +23,12 @@ const server = app.listen(PORT, HOST, () => {
|
|||
console.log(`SR App listening on http://${HOST}:${PORT}`);
|
||||
console.log('Press Ctrl+C to quit.');
|
||||
});
|
||||
module.exports = {server};
|
||||
module.exports = {server};
|
||||
|
||||
app.get('/private/shutdown', (req, res) => {
|
||||
console.log("Requested shutdown");
|
||||
res.statusCode = 200;
|
||||
res.end("ok!");
|
||||
process.kill(1, "SIGTERM");
|
||||
}
|
||||
);
|
||||
|
|
@ -3,6 +3,7 @@ const fs = require('fs');
|
|||
const sourceMap = require('source-map');
|
||||
const AWS = require('aws-sdk');
|
||||
const URL = require('url');
|
||||
const request = require('request');
|
||||
const wasm = fs.readFileSync(process.env.MAPPING_WASM || '/mappings.wasm');
|
||||
sourceMap.SourceMapConsumer.initialize({
|
||||
"lib/mappings.wasm": wasm
|
||||
|
|
@ -10,102 +11,141 @@ sourceMap.SourceMapConsumer.initialize({
|
|||
|
||||
console.log(`>sourceMap initialised using ${process.env.MAPPING_WASM || '/mappings.wasm'}`);
|
||||
|
||||
module.exports.sourcemapReader = async event => {
|
||||
let s3;
|
||||
|
||||
if (process.env.S3_HOST) {
|
||||
s3 = new AWS.S3({
|
||||
endpoint: process.env.S3_HOST,
|
||||
accessKeyId: process.env.S3_KEY,
|
||||
secretAccessKey: process.env.S3_SECRET,
|
||||
s3ForcePathStyle: true, // needed with minio?
|
||||
signatureVersion: 'v4'
|
||||
});
|
||||
} else {
|
||||
s3 = new AWS.S3({
|
||||
'AccessKeyID': process.env.aws_access_key_id,
|
||||
'SecretAccessKey': process.env.aws_secret_access_key,
|
||||
'Region': process.env.aws_region
|
||||
});
|
||||
}
|
||||
|
||||
var options = {
|
||||
Bucket: event.bucket,
|
||||
Key: event.key
|
||||
};
|
||||
return new Promise(function (resolve, reject) {
|
||||
const getObjectStart = Date.now();
|
||||
s3.getObject(options, (err, data) => {
|
||||
if (err) {
|
||||
console.error("[SR] Get S3 object failed");
|
||||
console.error(err);
|
||||
return reject(err);
|
||||
}
|
||||
const getObjectEnd = Date.now();
|
||||
const fileSize = (data.ContentLength / 1024) / 1024;
|
||||
options.fileSize = `${fileSize} Mb`;
|
||||
const downloadTime = (getObjectEnd - getObjectStart) / 1000;
|
||||
options.downloadTime = `${downloadTime} s`;
|
||||
if (fileSize >= 3) {
|
||||
console.log("[SR] large file:" + JSON.stringify(options));
|
||||
}
|
||||
let sourcemap = data.Body.toString();
|
||||
|
||||
return new sourceMap.SourceMapConsumer(sourcemap)
|
||||
.then(consumer => {
|
||||
let results = [];
|
||||
for (let i = 0; i < event.positions.length; i++) {
|
||||
let original = consumer.originalPositionFor({
|
||||
function parseSourcemap(sourcemap, event, options, resolve, reject) {
|
||||
const getObjectEnd = Date.now();
|
||||
try {
|
||||
return new sourceMap.SourceMapConsumer(sourcemap)
|
||||
.then(consumer => {
|
||||
let results = [];
|
||||
for (let i = 0; i < event.positions.length; i++) {
|
||||
let original = consumer.originalPositionFor({
|
||||
line: event.positions[i].line,
|
||||
column: event.positions[i].column
|
||||
});
|
||||
let url = URL.parse("");
|
||||
let preview = [];
|
||||
if (original.source) {
|
||||
preview = consumer.sourceContentFor(original.source, true);
|
||||
if (preview !== null) {
|
||||
preview = preview.split("\n")
|
||||
.map((line, i) => [i + 1, line]);
|
||||
if (event.padding) {
|
||||
let start = original.line < event.padding ? 0 : original.line - event.padding;
|
||||
preview = preview.slice(start, original.line + event.padding);
|
||||
}
|
||||
} else {
|
||||
console.log(`[SR] source not found, null preview for: ${original.source}`);
|
||||
preview = []
|
||||
}
|
||||
url = URL.parse(original.source);
|
||||
} else {
|
||||
console.log("[SR] couldn't find original position of: " + JSON.stringify({
|
||||
line: event.positions[i].line,
|
||||
column: event.positions[i].column
|
||||
});
|
||||
let url = URL.parse("");
|
||||
let preview = [];
|
||||
if (original.source) {
|
||||
preview = consumer.sourceContentFor(original.source, true);
|
||||
if (preview !== null) {
|
||||
preview = preview.split("\n")
|
||||
.map((line, i) => [i + 1, line]);
|
||||
if (event.padding) {
|
||||
let start = original.line < event.padding ? 0 : original.line - event.padding;
|
||||
preview = preview.slice(start, original.line + event.padding);
|
||||
}
|
||||
} else {
|
||||
console.log(`[SR] source not found, null preview for: ${original.source}`);
|
||||
preview = []
|
||||
}
|
||||
url = URL.parse(original.source);
|
||||
} else {
|
||||
console.log("[SR] couldn't find original position of: " + JSON.stringify({
|
||||
line: event.positions[i].line,
|
||||
column: event.positions[i].column
|
||||
}));
|
||||
}
|
||||
let result = {
|
||||
"absPath": url.href,
|
||||
"filename": url.pathname,
|
||||
"lineNo": original.line,
|
||||
"colNo": original.column,
|
||||
"function": original.name,
|
||||
"context": preview
|
||||
};
|
||||
// console.log(result);
|
||||
results.push(result);
|
||||
}));
|
||||
}
|
||||
consumer = undefined;
|
||||
let result = {
|
||||
"absPath": url.href,
|
||||
"filename": url.pathname,
|
||||
"lineNo": original.line,
|
||||
"colNo": original.column,
|
||||
"function": original.name,
|
||||
"context": preview
|
||||
};
|
||||
// console.log(result);
|
||||
results.push(result);
|
||||
}
|
||||
consumer = undefined;
|
||||
|
||||
const sourcemapProcessingTime = (Date.now() - getObjectEnd) / 1000;
|
||||
options.sourcemapProcessingTime = `${sourcemapProcessingTime} s`
|
||||
if (fileSize >= 3 || sourcemapProcessingTime > 2) {
|
||||
console.log("[SR] " + JSON.stringify(options));
|
||||
options.sourcemapProcessingTime = (Date.now() - getObjectEnd) / 1000;
|
||||
options.sourcemapProcessingTimeUnit = 's';
|
||||
if (options.fileSize >= 3 || options.sourcemapProcessingTime > 2) {
|
||||
console.log("[SR] " + JSON.stringify(options));
|
||||
}
|
||||
// Use this code if you don't use the http event with the LAMBDA-PROXY integration
|
||||
return resolve(results);
|
||||
})
|
||||
.catch(err => {
|
||||
return reject(err);
|
||||
})
|
||||
.finally(() => {
|
||||
sourcemap = undefined;
|
||||
});
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.sourcemapReader = async event => {
|
||||
if (event.isURL) {
|
||||
let options = {
|
||||
URL: event.key
|
||||
};
|
||||
return new Promise(function (resolve, reject) {
|
||||
const getObjectStart = Date.now();
|
||||
return request.get(options.URL, (err, response, sourcemap) => {
|
||||
if (err || response.statusCode !== 200) {
|
||||
console.error("[SR] Getting file from URL failed");
|
||||
console.error("err:");
|
||||
console.error(err);
|
||||
console.error("response:");
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
// Use this code if you don't use the http event with the LAMBDA-PROXY integration
|
||||
return resolve(results);
|
||||
})
|
||||
.finally(() => {
|
||||
sourcemap = undefined;
|
||||
})
|
||||
|
||||
return reject(response);
|
||||
}
|
||||
const getObjectEnd = Date.now();
|
||||
options.fileSize = (response.headers['content-length'] / 1024) / 1024;
|
||||
options.fileSizeUnit = 'Mb';
|
||||
options.downloadTime = (getObjectEnd - getObjectStart) / 1000;
|
||||
options.downloadTimeUnit = 's';
|
||||
if (options.fileSize >= 3) {
|
||||
console.log("[SR] large file:" + JSON.stringify(options));
|
||||
}
|
||||
return parseSourcemap(sourcemap, event, options, resolve, reject);
|
||||
});
|
||||
});
|
||||
});
|
||||
} else {
|
||||
let s3;
|
||||
if (process.env.S3_HOST) {
|
||||
s3 = new AWS.S3({
|
||||
endpoint: process.env.S3_HOST,
|
||||
accessKeyId: process.env.S3_KEY,
|
||||
secretAccessKey: process.env.S3_SECRET,
|
||||
s3ForcePathStyle: true, // needed with minio?
|
||||
signatureVersion: 'v4'
|
||||
});
|
||||
} else {
|
||||
s3 = new AWS.S3({
|
||||
'AccessKeyID': process.env.aws_access_key_id,
|
||||
'SecretAccessKey': process.env.aws_secret_access_key,
|
||||
'Region': process.env.aws_region
|
||||
});
|
||||
}
|
||||
|
||||
let options = {
|
||||
Bucket: event.bucket,
|
||||
Key: event.key
|
||||
};
|
||||
return new Promise(function (resolve, reject) {
|
||||
const getObjectStart = Date.now();
|
||||
s3.getObject(options, (err, data) => {
|
||||
if (err) {
|
||||
console.error("[SR] Get S3 object failed");
|
||||
console.error(err);
|
||||
return reject(err);
|
||||
}
|
||||
const getObjectEnd = Date.now();
|
||||
options.fileSize = (data.ContentLength / 1024) / 1024;
|
||||
options.fileSizeUnit = 'Mb';
|
||||
options.downloadTime = (getObjectEnd - getObjectStart) / 1000;
|
||||
options.downloadTimeUnit = 's';
|
||||
if (options.fileSize >= 3) {
|
||||
console.log("[SR] large file:" + JSON.stringify(options));
|
||||
}
|
||||
let sourcemap = data.Body.toString();
|
||||
return parseSourcemap(sourcemap, event, options, resolve, reject);
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
@ -9,7 +9,7 @@ router.post('/', (req, res) => {
|
|||
});
|
||||
req.on('end', function () {
|
||||
data = JSON.parse(data);
|
||||
console.log("[SR] Starting parser for: " + data.key);
|
||||
console.log(`[SR] Starting parser for ${data.isURL ? "URL: " : "file: "}${data.key}`);
|
||||
// process.env = {...process.env, ...data.bucket_config};
|
||||
handler.sourcemapReader(data)
|
||||
.then((results) => {
|
||||
|
|
@ -20,8 +20,8 @@ router.post('/', (req, res) => {
|
|||
.catch((e) => {
|
||||
console.error("[SR] Something went wrong");
|
||||
console.error(e);
|
||||
res.statusCode(500);
|
||||
res.end(e);
|
||||
res.statusCode = 500;
|
||||
res.end(e.toString());
|
||||
});
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -31,4 +31,12 @@ const wsserver = wsapp.listen(PORT, HOST, () => {
|
|||
});
|
||||
wsapp.enable('trust proxy');
|
||||
socket.start(wsserver);
|
||||
module.exports = {wsserver};
|
||||
module.exports = {wsserver};
|
||||
|
||||
wsapp.get('/private/shutdown', (req, res) => {
|
||||
console.log("Requested shutdown");
|
||||
res.statusCode = 200;
|
||||
res.end("ok!");
|
||||
process.kill(1, "SIGTERM");
|
||||
}
|
||||
);
|
||||
|
|
@ -134,7 +134,7 @@ function check(socket, next) {
|
|||
debug && console.error(`projectKey:${projectKey}, sessionId:${sessionId}`);
|
||||
return next(new Error('Authentication error'));
|
||||
}
|
||||
if (projectKey !== decoded.projectKey || sessionId !== decoded.sessionId) {
|
||||
if (String(projectKey) !== String(decoded.projectKey) || String(sessionId) !== String(decoded.sessionId)) {
|
||||
debug && console.error(`Trying to access projectKey:${projectKey} instead of ${decoded.projectKey}\nor`);
|
||||
debug && console.error(`Trying to access sessionId:${sessionId} instead of ${decoded.sessionId}`);
|
||||
return next(new Error('Authorization error'));
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue