Changes:
- changed sentry tags - changed asayer_session_id to openReplaySessionToken - EE full merge
This commit is contained in:
parent
bd394a7141
commit
37dd7067c6
19 changed files with 258 additions and 121 deletions
26
api/app.py
26
api/app.py
|
|
@ -23,13 +23,13 @@ import traceback
|
|||
old_tb = traceback.print_exception
|
||||
old_f = sys.stdout
|
||||
old_e = sys.stderr
|
||||
ASAYER_SESSION_ID = None
|
||||
OR_SESSION_TOKEN = None
|
||||
|
||||
|
||||
class F:
|
||||
def write(self, x):
|
||||
if ASAYER_SESSION_ID is not None and x != '\n' and not helper.is_local():
|
||||
old_f.write(f"[asayer_session_id={ASAYER_SESSION_ID}] {x}")
|
||||
if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local():
|
||||
old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}")
|
||||
else:
|
||||
old_f.write(x)
|
||||
|
||||
|
|
@ -38,8 +38,8 @@ class F:
|
|||
|
||||
|
||||
def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True):
|
||||
if ASAYER_SESSION_ID is not None and not helper.is_local():
|
||||
value = type(value)(f"[asayer_session_id={ASAYER_SESSION_ID}] " + str(value))
|
||||
if OR_SESSION_TOKEN is not None and not helper.is_local():
|
||||
value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value))
|
||||
|
||||
old_tb(etype, value, tb, limit, file, chain)
|
||||
|
||||
|
|
@ -54,11 +54,11 @@ sys.stderr = F()
|
|||
|
||||
_overrides.chalice_app(app)
|
||||
|
||||
# v0905
|
||||
|
||||
@app.middleware('http')
|
||||
def asayer_middleware(event, get_response):
|
||||
global ASAYER_SESSION_ID
|
||||
ASAYER_SESSION_ID = app.current_request.headers.get('vnd.openreplay.com.sid',
|
||||
def or_middleware(event, get_response):
|
||||
global OR_SESSION_TOKEN
|
||||
OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid',
|
||||
app.current_request.headers.get('vnd.asayer.io.sid'))
|
||||
if "authorizer" in event.context and event.context["authorizer"] is None:
|
||||
print("Deleted user!!")
|
||||
|
|
@ -70,19 +70,19 @@ def asayer_middleware(event, get_response):
|
|||
import time
|
||||
now = int(time.time() * 1000)
|
||||
response = get_response(event)
|
||||
if response.status_code == 500 and helper.allow_sentry() and ASAYER_SESSION_ID is not None and not helper.is_local():
|
||||
if response.status_code == 500 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
|
||||
with configure_scope() as scope:
|
||||
scope.set_tag('stage', environ["stage"])
|
||||
scope.set_tag('asayer_session_id', ASAYER_SESSION_ID)
|
||||
scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
|
||||
scope.set_extra("context", event.context)
|
||||
sentry_sdk.capture_exception(Exception(response.body))
|
||||
if helper.TRACK_TIME:
|
||||
print(f"Execution time: {int(time.time() * 1000) - now} ms")
|
||||
except Exception as e:
|
||||
if helper.allow_sentry() and ASAYER_SESSION_ID is not None and not helper.is_local():
|
||||
if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
|
||||
with configure_scope() as scope:
|
||||
scope.set_tag('stage', environ["stage"])
|
||||
scope.set_tag('openReplaySessionToken', ASAYER_SESSION_ID)
|
||||
scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
|
||||
scope.set_extra("context", event.context)
|
||||
sentry_sdk.capture_exception(e)
|
||||
response = Response(body={"Code": "InternalServerError",
|
||||
|
|
|
|||
|
|
@ -31,14 +31,12 @@
|
|||
"assign_link": "http://127.0.0.1:8000/async/email_assignment",
|
||||
"captcha_server": "",
|
||||
"captcha_key": "",
|
||||
"sessions_bucket": "asayer-mobs",
|
||||
"sessions_bucket": "mobs",
|
||||
"sessions_region": "us-east-1",
|
||||
"put_S3_TTL": "20",
|
||||
"sourcemaps_bucket": "asayer-sourcemaps",
|
||||
"sourcemaps_bucket_key": "",
|
||||
"sourcemaps_bucket_secret": "",
|
||||
"sourcemaps_bucket_region": "us-east-1",
|
||||
"js_cache_bucket": "asayer-sessions-assets",
|
||||
"sourcemaps_reader": "http://127.0.0.1:3000/",
|
||||
"sourcemaps_bucket": "sourcemaps",
|
||||
"js_cache_bucket": "sessions-assets",
|
||||
"async_Token": "",
|
||||
"EMAIL_HOST": "",
|
||||
"EMAIL_PORT": "587",
|
||||
|
|
|
|||
1
ee/api/.gitignore
vendored
1
ee/api/.gitignore
vendored
|
|
@ -170,7 +170,6 @@ logs*.txt
|
|||
*.csv
|
||||
|
||||
*.p
|
||||
*.js
|
||||
SUBNETS.json
|
||||
|
||||
chalicelib/.config
|
||||
|
|
|
|||
|
|
@ -25,13 +25,13 @@ import traceback
|
|||
old_tb = traceback.print_exception
|
||||
old_f = sys.stdout
|
||||
old_e = sys.stderr
|
||||
ASAYER_SESSION_ID = None
|
||||
OR_SESSION_TOKEN = None
|
||||
|
||||
|
||||
class F:
|
||||
def write(self, x):
|
||||
if ASAYER_SESSION_ID is not None and x != '\n' and not helper.is_local():
|
||||
old_f.write(f"[asayer_session_id={ASAYER_SESSION_ID}] {x}")
|
||||
if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local():
|
||||
old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}")
|
||||
else:
|
||||
old_f.write(x)
|
||||
|
||||
|
|
@ -40,9 +40,8 @@ class F:
|
|||
|
||||
|
||||
def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True):
|
||||
if ASAYER_SESSION_ID is not None and not helper.is_local():
|
||||
# bugsnag.notify(Exception(str(value)), meta_data={"special_info": {"asayerSessionId": ASAYER_SESSION_ID}})
|
||||
value = type(value)(f"[asayer_session_id={ASAYER_SESSION_ID}] " + str(value))
|
||||
if OR_SESSION_TOKEN is not None and not helper.is_local():
|
||||
value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value))
|
||||
|
||||
old_tb(etype, value, tb, limit, file, chain)
|
||||
|
||||
|
|
@ -59,7 +58,7 @@ _overrides.chalice_app(app)
|
|||
|
||||
|
||||
@app.middleware('http')
|
||||
def asayer_middleware(event, get_response):
|
||||
def or_middleware(event, get_response):
|
||||
from chalicelib.ee import unlock
|
||||
if not unlock.is_valid():
|
||||
return Response(body={"errors": ["expired license"]}, status_code=403)
|
||||
|
|
@ -68,12 +67,11 @@ def asayer_middleware(event, get_response):
|
|||
if not projects.is_authorized(project_id=event.uri_params["projectId"],
|
||||
tenant_id=event.context["authorizer"]["tenantId"]):
|
||||
print("unauthorized project")
|
||||
# return {"errors": ["unauthorized project"]}
|
||||
pg_client.close()
|
||||
return Response(body={"errors": ["unauthorized project"]}, status_code=401)
|
||||
global ASAYER_SESSION_ID
|
||||
ASAYER_SESSION_ID = app.current_request.headers.get('vnd.openreplay.com.sid',
|
||||
app.current_request.headers.get('vnd.asayer.io.sid'))
|
||||
global OR_SESSION_TOKEN
|
||||
OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid',
|
||||
app.current_request.headers.get('vnd.asayer.io.sid'))
|
||||
if "authorizer" in event.context and event.context["authorizer"] is None:
|
||||
print("Deleted user!!")
|
||||
pg_client.close()
|
||||
|
|
@ -84,19 +82,24 @@ def asayer_middleware(event, get_response):
|
|||
import time
|
||||
now = int(time.time() * 1000)
|
||||
response = get_response(event)
|
||||
if response.status_code == 500 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
|
||||
with configure_scope() as scope:
|
||||
scope.set_tag('stage', environ["stage"])
|
||||
scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
|
||||
scope.set_extra("context", event.context)
|
||||
sentry_sdk.capture_exception(Exception(response.body))
|
||||
if helper.TRACK_TIME:
|
||||
print(f"Execution time: {int(time.time() * 1000) - now} ms")
|
||||
except Exception as e:
|
||||
print("middleware exception handling")
|
||||
print(e)
|
||||
pg_client.close()
|
||||
if helper.allow_sentry() and ASAYER_SESSION_ID is not None and not helper.is_local():
|
||||
if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
|
||||
with configure_scope() as scope:
|
||||
scope.set_tag('stage', environ["stage"])
|
||||
scope.set_tag('openReplaySessionToken', ASAYER_SESSION_ID)
|
||||
scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
|
||||
scope.set_extra("context", event.context)
|
||||
sentry_sdk.capture_exception(e)
|
||||
raise e
|
||||
response = Response(body={"Code": "InternalServerError",
|
||||
"Message": "An internal server error occurred [level=Fatal]."},
|
||||
status_code=500)
|
||||
pg_client.close()
|
||||
return response
|
||||
|
||||
|
|
|
|||
|
|
@ -881,5 +881,5 @@ def all_issue_types(context):
|
|||
|
||||
@app.route('/flows', methods=['GET', 'PUT', 'POST', 'DELETE'])
|
||||
@app.route('/{projectId}/flows', methods=['GET', 'PUT', 'POST', 'DELETE'])
|
||||
def removed_endpoints(context):
|
||||
def removed_endpoints(projectId=None, context=None):
|
||||
return Response(body={"errors": ["Endpoint no longer available"]}, status_code=410)
|
||||
|
|
|
|||
|
|
@ -73,10 +73,12 @@ def get_account(context):
|
|||
"projects": -1,
|
||||
"metadata": metadata.get_remaining_metadata_with_count(context['tenantId'])
|
||||
},
|
||||
**license.get_status(context["tenantId"])
|
||||
**license.get_status(context["tenantId"]),
|
||||
"smtp": environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@app.route('/projects', methods=['GET'])
|
||||
def get_projects(context):
|
||||
return {"data": projects.get_projects(tenant_id=context["tenantId"], recording_state=True, gdpr=True, recorded=True,
|
||||
|
|
@ -157,14 +159,29 @@ def add_slack_client(context):
|
|||
data = app.current_request.json_body
|
||||
if "url" not in data or "name" not in data:
|
||||
return {"errors": ["please provide a url and a name"]}
|
||||
if Slack.add_integration(tenant_id=context["tenantId"], url=data["url"], name=data["name"]):
|
||||
if Slack.add_channel(tenant_id=context["tenantId"], url=data["url"], name=data["name"]):
|
||||
return {"data": {"status": "success"}}
|
||||
else:
|
||||
return {
|
||||
"errors": ["failed URL verification, if you received a message on slack, please notify our dev-team"]
|
||||
"errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
|
||||
}
|
||||
|
||||
|
||||
@app.route('/integrations/slack/{integrationId}', methods=['POST', 'PUT'])
|
||||
def edit_slack_integration(integrationId, context):
|
||||
data = app.current_request.json_body
|
||||
if data.get("url") and len(data["url"]) > 0:
|
||||
old = webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId)
|
||||
if old["endpoint"] != data["url"]:
|
||||
if not Slack.say_hello(data["url"]):
|
||||
return {
|
||||
"errors": [
|
||||
"We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
|
||||
}
|
||||
return {"data": webhook.update(tenant_id=context["tenantId"], webhook_id=integrationId,
|
||||
changes={"name": data.get("name", ""), "endpoint": data["url"]})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/errors/search', methods=['POST'])
|
||||
def errors_search(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
|
@ -391,6 +408,7 @@ def search_sessions_by_metadata(context):
|
|||
m_key=key,
|
||||
project_id=project_id)}
|
||||
|
||||
|
||||
@app.route('/plans', methods=['GET'])
|
||||
def get_current_plan(context):
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -6,10 +6,10 @@ from chalicelib.core import webhook
|
|||
|
||||
class Slack:
|
||||
@classmethod
|
||||
def add_integration(cls, tenant_id, **args):
|
||||
def add_channel(cls, tenant_id, **args):
|
||||
url = args["url"]
|
||||
name = args["name"]
|
||||
if cls.__say_hello(url):
|
||||
if cls.say_hello(url):
|
||||
webhook.add(tenant_id=tenant_id,
|
||||
endpoint=url,
|
||||
webhook_type="slack",
|
||||
|
|
@ -18,7 +18,7 @@ class Slack:
|
|||
return False
|
||||
|
||||
@classmethod
|
||||
def __say_hello(cls, url):
|
||||
def say_hello(cls, url):
|
||||
r = requests.post(
|
||||
url=url,
|
||||
json={
|
||||
|
|
|
|||
|
|
@ -365,7 +365,7 @@ def __get_merged_queries(queries, value, project_id):
|
|||
def __get_autocomplete_table(value, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type
|
||||
FROM (SELECT *
|
||||
FROM (SELECT project_id, type, value
|
||||
FROM (SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
|
||||
FROM public.autocomplete
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import dev
|
||||
from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, sessions_mobs
|
||||
from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, sessions_mobs, issues
|
||||
|
||||
from chalicelib.ee import projects, errors
|
||||
|
||||
|
|
@ -24,7 +24,7 @@ SESSION_PROJECTION_COLS = """s.project_id,
|
|||
s.user_anonymous_id,
|
||||
s.platform,
|
||||
s.issue_score,
|
||||
s.issue_types::text[] AS issue_types,
|
||||
to_jsonb(s.issue_types) AS issue_types,
|
||||
favorite_sessions.session_id NOTNULL AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_sessions AS fs
|
||||
|
|
@ -83,7 +83,6 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
|
|||
data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id,
|
||||
session_id=session_id)
|
||||
data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id)
|
||||
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
||||
data["socket"] = socket_ios.start_replay(project_id=project_id, session_id=session_id,
|
||||
device=data["userDevice"],
|
||||
os_version=data["userOsVersion"],
|
||||
|
|
@ -100,9 +99,11 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
|
|||
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
|
||||
session_id=session_id)
|
||||
data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id)
|
||||
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
||||
data['resources'] = resources.get_by_session_id(session_id=session_id)
|
||||
|
||||
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
||||
data['issues'] = issues.get_by_session_id(session_id=session_id)
|
||||
|
||||
return data
|
||||
return None
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.helper import environ
|
||||
|
||||
import boto3
|
||||
from chalicelib.utils.s3 import client
|
||||
|
||||
|
||||
def get_web(sessionId):
|
||||
return boto3.client('s3', region_name=environ["sessions_region"]).generate_presigned_url(
|
||||
return client.generate_presigned_url(
|
||||
'get_object',
|
||||
Params={
|
||||
'Bucket': environ["sessions_bucket"],
|
||||
|
|
@ -16,7 +16,7 @@ def get_web(sessionId):
|
|||
|
||||
|
||||
def get_ios(sessionId):
|
||||
return boto3.client('s3', region_name=environ["ios_region"]).generate_presigned_url(
|
||||
return client.generate_presigned_url(
|
||||
'get_object',
|
||||
Params={
|
||||
'Bucket': environ["ios_bucket"],
|
||||
|
|
|
|||
|
|
@ -79,7 +79,12 @@ def get_traces_group(project_id, payload):
|
|||
payloads = {}
|
||||
all_exists = True
|
||||
for i, u in enumerate(frames):
|
||||
print("===============================")
|
||||
print(u["absPath"])
|
||||
print("converted to:")
|
||||
key = __get_key(project_id, u["absPath"]) # use filename instead?
|
||||
print(key)
|
||||
print("===============================")
|
||||
if key not in payloads:
|
||||
file_exists = s3.exists(environ['sourcemaps_bucket'], key)
|
||||
all_exists = all_exists and file_exists
|
||||
|
|
|
|||
|
|
@ -8,14 +8,9 @@ def get_original_trace(key, positions):
|
|||
"key": key,
|
||||
"positions": positions,
|
||||
"padding": 5,
|
||||
"bucket": environ['sourcemaps_bucket'],
|
||||
"bucket_config": {
|
||||
"aws_access_key_id": environ["sourcemaps_bucket_key"],
|
||||
"aws_secret_access_key": environ["sourcemaps_bucket_secret"],
|
||||
"aws_region": environ["sourcemaps_bucket_region"]
|
||||
}
|
||||
"bucket": environ['sourcemaps_bucket']
|
||||
}
|
||||
r = requests.post(environ["sourcemaps"], json=payload)
|
||||
r = requests.post(environ["sourcemaps_reader"], json=payload)
|
||||
if r.status_code != 200:
|
||||
return {}
|
||||
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ def get_by_type(tenant_id, webhook_type):
|
|||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT
|
||||
w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
|
||||
w.webhook_id AS integration_id, w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
|
||||
FROM public.webhooks AS w
|
||||
where
|
||||
w.tenant_id =%(tenant_id)s
|
||||
|
|
|
|||
|
|
@ -9,11 +9,26 @@ PG_CONFIG = {"host": environ["pg_host"],
|
|||
"port": int(environ["pg_port"])}
|
||||
|
||||
# connexion pool for FOS & EE
|
||||
|
||||
from psycopg2 import pool
|
||||
from threading import Semaphore
|
||||
|
||||
|
||||
class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
|
||||
def __init__(self, minconn, maxconn, *args, **kwargs):
|
||||
self._semaphore = Semaphore(maxconn)
|
||||
super().__init__(minconn, maxconn, *args, **kwargs)
|
||||
|
||||
def getconn(self, *args, **kwargs):
|
||||
self._semaphore.acquire()
|
||||
return super().getconn(*args, **kwargs)
|
||||
|
||||
def putconn(self, *args, **kwargs):
|
||||
super().putconn(*args, **kwargs)
|
||||
self._semaphore.release()
|
||||
|
||||
|
||||
try:
|
||||
postgreSQL_pool = psycopg2.pool.ThreadedConnectionPool(6, 20, **PG_CONFIG)
|
||||
postgreSQL_pool = ORThreadedConnectionPool(20, 100, **PG_CONFIG)
|
||||
if (postgreSQL_pool):
|
||||
print("Connection pool created successfully")
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
|
|
@ -21,13 +36,6 @@ except (Exception, psycopg2.DatabaseError) as error:
|
|||
raise error
|
||||
|
||||
|
||||
# finally:
|
||||
# # closing database connection.
|
||||
# # use closeall method to close all the active connection if you want to turn of the application
|
||||
# if (postgreSQL_pool):
|
||||
# postgreSQL_pool.closeall
|
||||
# print("PostgreSQL connection pool is closed")
|
||||
|
||||
class PostgresClient:
|
||||
connection = None
|
||||
cursor = None
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from chalicelib.utils.helper import environ
|
|||
|
||||
import boto3
|
||||
|
||||
import botocore
|
||||
from botocore.client import Config
|
||||
|
||||
client = boto3.client('s3', endpoint_url=environ["S3_HOST"],
|
||||
|
|
@ -13,51 +14,17 @@ client = boto3.client('s3', endpoint_url=environ["S3_HOST"],
|
|||
|
||||
|
||||
def exists(bucket, key):
|
||||
response = client.list_objects_v2(
|
||||
Bucket=bucket,
|
||||
Prefix=key,
|
||||
)
|
||||
for obj in response.get('Contents', []):
|
||||
if obj['Key'] == key:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_presigned_url_for_sharing(bucket, expires_in, key, check_exists=False):
|
||||
if check_exists and not exists(bucket, key):
|
||||
return None
|
||||
|
||||
return client.generate_presigned_url(
|
||||
'get_object',
|
||||
Params={
|
||||
'Bucket': bucket,
|
||||
'Key': key
|
||||
},
|
||||
ExpiresIn=expires_in
|
||||
)
|
||||
|
||||
|
||||
def get_presigned_url_for_upload(bucket, expires_in, key):
|
||||
return client.generate_presigned_url(
|
||||
'put_object',
|
||||
Params={
|
||||
'Bucket': bucket,
|
||||
'Key': key
|
||||
},
|
||||
ExpiresIn=expires_in
|
||||
)
|
||||
|
||||
|
||||
def get_file(source_bucket, source_key):
|
||||
try:
|
||||
result = client.get_object(
|
||||
Bucket=source_bucket,
|
||||
Key=source_key
|
||||
)
|
||||
except ClientError as ex:
|
||||
if ex.response['Error']['Code'] == 'NoSuchKey':
|
||||
print(f'======> No object found - returning None for {source_bucket}/{source_key}')
|
||||
return None
|
||||
boto3.resource('s3', endpoint_url=environ["S3_HOST"],
|
||||
aws_access_key_id=environ["S3_KEY"],
|
||||
aws_secret_access_key=environ["S3_SECRET"],
|
||||
config=Config(signature_version='s3v4'),
|
||||
region_name='us-east-1') \
|
||||
.Object(bucket, key).load()
|
||||
except botocore.exceptions.ClientError as e:
|
||||
if e.response['Error']['Code'] == "404":
|
||||
return False
|
||||
else:
|
||||
raise ex
|
||||
return result["Body"].read().decode()
|
||||
# Something else has gone wrong.
|
||||
raise
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -5,9 +5,6 @@ pyjwt==1.7.1
|
|||
psycopg2-binary==2.8.6
|
||||
pytz==2020.1
|
||||
sentry-sdk==0.19.1
|
||||
rollbar==0.15.1
|
||||
bugsnag==4.0.1
|
||||
kubernetes==12.0.0
|
||||
elasticsearch==7.9.1
|
||||
jira==2.0.0
|
||||
schedule==1.1.0
|
||||
|
|
|
|||
111
ee/api/sourcemaps_reader/handler.js
Normal file
111
ee/api/sourcemaps_reader/handler.js
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
'use strict';
|
||||
const sourceMap = require('source-map');
|
||||
const AWS = require('aws-sdk');
|
||||
const sourceMapVersion = require('./package.json').dependencies["source-map"];
|
||||
const URL = require('url');
|
||||
const getVersion = version => version.replace(/[\^\$\=\~]/, "");
|
||||
|
||||
module.exports.sourcemapReader = async event => {
|
||||
sourceMap.SourceMapConsumer.initialize({
|
||||
"lib/mappings.wasm": `https://unpkg.com/source-map@${getVersion(sourceMapVersion)}/lib/mappings.wasm`
|
||||
});
|
||||
let s3;
|
||||
if (process.env.S3_HOST) {
|
||||
s3 = new AWS.S3({
|
||||
endpoint: process.env.S3_HOST,
|
||||
accessKeyId: process.env.S3_KEY,
|
||||
secretAccessKey: process.env.S3_SECRET,
|
||||
s3ForcePathStyle: true, // needed with minio?
|
||||
signatureVersion: 'v4'
|
||||
});
|
||||
} else {
|
||||
s3 = new AWS.S3({
|
||||
'AccessKeyID': process.env.aws_access_key_id,
|
||||
'SecretAccessKey': process.env.aws_secret_access_key,
|
||||
'Region': process.env.aws_region
|
||||
});
|
||||
}
|
||||
|
||||
var options = {
|
||||
Bucket: event.bucket,
|
||||
Key: event.key
|
||||
};
|
||||
return new Promise(function (resolve, reject) {
|
||||
s3.getObject(options, (err, data) => {
|
||||
if (err) {
|
||||
console.log("Get S3 object failed");
|
||||
console.log(err);
|
||||
return reject(err);
|
||||
}
|
||||
const sourcemap = data.Body.toString();
|
||||
|
||||
return new sourceMap.SourceMapConsumer(sourcemap)
|
||||
.then(consumer => {
|
||||
let results = [];
|
||||
for (let i = 0; i < event.positions.length; i++) {
|
||||
let original = consumer.originalPositionFor({
|
||||
line: event.positions[i].line,
|
||||
column: event.positions[i].column
|
||||
});
|
||||
let url = URL.parse("");
|
||||
let preview = [];
|
||||
if (original.source) {
|
||||
preview = consumer.sourceContentFor(original.source, true);
|
||||
if (preview !== null) {
|
||||
preview = preview.split("\n")
|
||||
.map((line, i) => [i + 1, line]);
|
||||
if (event.padding) {
|
||||
let start = original.line < event.padding ? 0 : original.line - event.padding;
|
||||
preview = preview.slice(start, original.line + event.padding);
|
||||
}
|
||||
} else {
|
||||
console.log("source not found, null preview for:");
|
||||
console.log(original.source);
|
||||
preview = []
|
||||
}
|
||||
url = URL.parse(original.source);
|
||||
} else {
|
||||
console.log("couldn't find original position of:");
|
||||
console.log({
|
||||
line: event.positions[i].line,
|
||||
column: event.positions[i].column
|
||||
});
|
||||
}
|
||||
let result = {
|
||||
"absPath": url.href,
|
||||
"filename": url.pathname,
|
||||
"lineNo": original.line,
|
||||
"colNo": original.column,
|
||||
"function": original.name,
|
||||
"context": preview
|
||||
};
|
||||
// console.log(result);
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
// Use this code if you don't use the http event with the LAMBDA-PROXY integration
|
||||
return resolve(results);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// let v = {
|
||||
// 'key': '1725/99f96f044fa7e941dbb15d7d68b20549',
|
||||
// 'positions': [{'line': 1, 'column': 943}],
|
||||
// 'padding': 5,
|
||||
// 'bucket': 'asayer-sourcemaps'
|
||||
// };
|
||||
// let v = {
|
||||
// 'key': '1/65d8d3866bb8c92f3db612cb330f270c',
|
||||
// 'positions': [{'line': 1, 'column': 0}],
|
||||
// 'padding': 5,
|
||||
// 'bucket': 'asayer-sourcemaps-staging'
|
||||
// };
|
||||
// module.exports.sourcemapReader(v).then((r) => {
|
||||
// // console.log(r);
|
||||
// const fs = require('fs');
|
||||
// let data = JSON.stringify(r);
|
||||
// fs.writeFileSync('results.json', data);
|
||||
// });
|
||||
38
ee/api/sourcemaps_reader/server.js
Normal file
38
ee/api/sourcemaps_reader/server.js
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
const http = require('http');
|
||||
const handler = require('./handler');
|
||||
const hostname = '127.0.0.1';
|
||||
const port = 3000;
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
if (req.method === 'POST') {
|
||||
let data = '';
|
||||
req.on('data', chunk => {
|
||||
data += chunk;
|
||||
});
|
||||
req.on('end', function () {
|
||||
data = JSON.parse(data);
|
||||
console.log("Starting parser for: " + data.key);
|
||||
// process.env = {...process.env, ...data.bucket_config};
|
||||
handler.sourcemapReader(data)
|
||||
.then((results) => {
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify(results));
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error("Something went wrong");
|
||||
console.error(e);
|
||||
res.statusCode(500);
|
||||
res.end(e);
|
||||
});
|
||||
})
|
||||
} else {
|
||||
res.statusCode = 405;
|
||||
res.setHeader('Content-Type', 'text/plain');
|
||||
res.end('Method Not Allowed');
|
||||
}
|
||||
});
|
||||
|
||||
server.listen(port, hostname, () => {
|
||||
console.log(`Server running at http://${hostname}:${port}/`);
|
||||
});
|
||||
|
|
@ -40,9 +40,6 @@ env:
|
|||
sessions_region: us-east-1
|
||||
put_S3_TTL: '20'
|
||||
sourcemaps_bucket: sourcemaps
|
||||
sourcemaps_bucket_key: minios3AccessKeyS3cr3t
|
||||
sourcemaps_bucket_secret: m1n10s3CretK3yPassw0rd
|
||||
sourcemaps_bucket_region: us-east-1
|
||||
js_cache_bucket: sessions-assets
|
||||
async_Token: ''
|
||||
EMAIL_HOST: ''
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue