Merge pull request #733 from openreplay/api-v1.8.0_patch1

Api v1.8.0 patch1
This commit is contained in:
Kraiem Taha Yassine 2022-09-14 16:08:30 +01:00 committed by GitHub
commit a9ad3c47bc
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 63 additions and 33 deletions

View file

@ -17,6 +17,7 @@ from routers.subs import dashboard, insights, metrics, v1_api
app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""))
app.add_middleware(GZipMiddleware, minimum_size=1000)
@app.middleware('http')
async def or_middleware(request: Request, call_next):
global OR_SESSION_TOKEN
@ -28,7 +29,9 @@ async def or_middleware(request: Request, call_next):
now = int(time.time() * 1000)
response: StreamingResponse = await call_next(request)
if helper.TRACK_TIME:
print(f"Execution time: {int(time.time() * 1000) - now} ms")
now = int(time.time() * 1000) - now
if now > 500:
print(f"Execution time: {now} ms")
except Exception as e:
pg_client.close()
raise e

View file

@ -99,6 +99,9 @@ def get_traces_group(project_id, payload):
if payloads[key] is None:
continue
key_results = sourcemaps_parser.get_original_trace(key=key, positions=[o["position"] for o in payloads[key]])
if key_results is None:
all_exists = False
continue
for i, r in enumerate(key_results):
res_index = payloads[key][i]["resultIndex"]
# function name search by frontend lib is better than sourcemaps' one in most cases

View file

@ -19,11 +19,12 @@ def get_original_trace(key, positions):
r = requests.post(config("sourcemaps_reader"), json=payload,
timeout=config("sourcemapTimeout", cast=int, default=5))
if r.status_code != 200:
return {}
print(f"Issue getting sourcemap status_code:{r.status_code}")
return None
return r.json()
except requests.exceptions.Timeout:
print("Timeout getting sourcemap")
return {}
return None
except Exception as e:
print("issue getting sourcemap")
return {}
print("Issue getting sourcemap")
return None

View file

@ -20,6 +20,8 @@ PG_CONFIG = dict(_PG_CONFIG)
if config("pg_timeout", cast=int, default=0) > 0:
PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}"
logging.info(f">PG_POOL:{config('PG_POOL', cast=bool, default=None)}")
class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
def __init__(self, minconn, maxconn, *args, **kwargs):
@ -36,8 +38,15 @@ class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
raise e
def putconn(self, *args, **kwargs):
super().putconn(*args, **kwargs)
self._semaphore.release()
try:
super().putconn(*args, **kwargs)
self._semaphore.release()
except psycopg2.pool.PoolError as e:
if str(e) == "trying to put unkeyed connection":
print("!!! trying to put unkeyed connection")
print(f"env-PG_POOL:{config('PG_POOL', cast=bool, default=None)}")
return
raise e
postgreSQL_pool: ORThreadedConnectionPool = None

View file

@ -63,9 +63,6 @@ def get_presigned_url_for_upload(bucket, expires_in, key):
def get_file(source_bucket, source_key):
print("******************************")
print(f"looking for: {source_key} in {source_bucket}")
print("******************************")
try:
result = client.get_object(
Bucket=source_bucket,
@ -73,7 +70,7 @@ def get_file(source_bucket, source_key):
)
except ClientError as ex:
if ex.response['Error']['Code'] == 'NoSuchKey':
print(f'======> No object found - returning None for {source_bucket}/{source_key}')
print(f'======> No object found - returning None for \nbucket:{source_bucket}\nkey:{source_key}')
return None
else:
raise ex

View file

@ -1,5 +1,5 @@
#!/bin/sh
cd sourcemap-reader
nohup npm start &> /tmp/sourcemap-reader.log &
nohup npm start &
cd ..
uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers

View file

@ -1,28 +1,33 @@
print("============= CRONS =============")
import sys
import asyncio
import sys
from routers.crons import core_dynamic_crons
ACTIONS = {
"TELEMETRY": core_dynamic_crons.telemetry_cron,
"JOB": core_dynamic_crons.run_scheduled_jobs,
"REPORT": core_dynamic_crons.weekly_report
}
def default_action(action):
async def _func():
print(f"{action} not found in crons-definitions")
print("possible actions:")
print(ACTIONS.keys())
return _func
async def process(action):
await {
"TELEMETRY": core_dynamic_crons.telemetry_cron,
"JOB": core_dynamic_crons.run_scheduled_jobs,
"REPORT": core_dynamic_crons.weekly_report2
}.get(action.upper(), default_action(action))()
await ACTIONS.get(action.upper(), default_action(action))()
if __name__ == '__main__':
if len(sys.argv) < 2 or len(sys.argv[1]) < 1:
print("please provide actions as argument")
print("please provide actions as argument\npossible actions:")
print(ACTIONS.keys())
else:
print(f"action: {sys.argv[1]}")
asyncio.run(process(sys.argv[1]))

View file

@ -2,6 +2,6 @@
sh env_vars.sh
source /tmp/.env.override
cd sourcemap-reader
nohup npm start &> /tmp/sourcemap-reader.log &
nohup npm start &
cd ..
uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers

View file

@ -6,9 +6,8 @@ from starlette.responses import RedirectResponse
import schemas
import schemas_ee
from chalicelib.core import integrations_manager
from chalicelib.core import sessions
from chalicelib.core import tenants, users, metadata, projects, license
from chalicelib.core import tenants, users, projects, license
from chalicelib.core import webhook
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import SAML2_helper

View file

@ -1,5 +1,6 @@
from chalicelib.core import telemetry, unlock
from chalicelib.core import weekly_report, jobs
from chalicelib.core import jobs
from chalicelib.core import weekly_report as weekly_report_script
from decouple import config
@ -7,15 +8,14 @@ async def run_scheduled_jobs() -> None:
jobs.execute_jobs()
async def weekly_report2() -> None:
weekly_report.cron()
async def weekly_report() -> None:
weekly_report_script.cron()
async def telemetry_cron() -> None:
telemetry.compute()
# @app.schedule(Cron('0/60', '*', '*', '*', '?', '*'))
def unlock_cron() -> None:
print("validating license")
unlock.check()
@ -28,7 +28,7 @@ cron_jobs = [
SINGLE_CRONS = [{"func": telemetry_cron, "trigger": "cron", "day_of_week": "*"},
{"func": run_scheduled_jobs, "trigger": "interval", "seconds": 60, "misfire_grace_time": 20},
{"func": weekly_report2, "trigger": "cron", "day_of_week": "mon", "hour": 5,
{"func": weekly_report, "trigger": "cron", "day_of_week": "mon", "hour": 5,
"misfire_grace_time": 60 * 60}]
if config("LOCAL_CRONS", default=False, cast=bool):

View file

@ -1,7 +1,6 @@
from chalicelib.core import roles, traces
from chalicelib.core import unlock
from chalicelib.utils import assist_helper
from chalicelib.utils.TimeUTC import TimeUTC
unlock.check()

View file

@ -4,3 +4,4 @@ npm-debug.log
.cache
test.html
/utils/
mappings.wasm

View file

@ -40,12 +40,21 @@ module.exports.sourcemapReader = async event => {
Key: event.key
};
return new Promise(function (resolve, reject) {
const getObjectStart = Date.now();
s3.getObject(options, (err, data) => {
if (err) {
console.error("[SR] Get S3 object failed");
console.error(err);
return reject(err);
}
const getObjectEnd = Date.now();
const fileSize = (data.ContentLength / 1024) / 1024;
options.fileSize = `${fileSize} Mb`;
const downloadTime = (getObjectEnd - getObjectStart) / 1000;
options.downloadTime = `${downloadTime} s`;
if (fileSize >= 3) {
console.log("[SR] large file:" + JSON.stringify(options));
}
let sourcemap = data.Body.toString();
return new sourceMap.SourceMapConsumer(sourcemap)
@ -68,17 +77,15 @@ module.exports.sourcemapReader = async event => {
preview = preview.slice(start, original.line + event.padding);
}
} else {
console.log("[SR] source not found, null preview for:");
console.log(original.source);
console.log(`[SR] source not found, null preview for: ${original.source}`);
preview = []
}
url = URL.parse(original.source);
} else {
console.log("[SR] couldn't find original position of:");
console.log({
console.log("[SR] couldn't find original position of: " + JSON.stringify({
line: event.positions[i].line,
column: event.positions[i].column
});
}));
}
let result = {
"absPath": url.href,
@ -92,6 +99,12 @@ module.exports.sourcemapReader = async event => {
results.push(result);
}
consumer = undefined;
const sourcemapProcessingTime = (Date.now() - getObjectEnd) / 1000;
options.sourcemapProcessingTime = `${sourcemapProcessingTime} s`
if (fileSize >= 3 || sourcemapProcessingTime > 2) {
console.log("[SR] " + JSON.stringify(options));
}
// Use this code if you don't use the http event with the LAMBDA-PROXY integration
return resolve(results);
})