Merge branch 'dev' into codeql

This commit is contained in:
Mehdi Osman 2021-09-22 11:27:25 +02:00 committed by GitHub
commit 702ca812a4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
40 changed files with 588 additions and 3089 deletions

View file

@ -33,7 +33,7 @@
"sourcemaps_reader": "http://utilities-openreplay.app.svc.cluster.local:9000/sourcemaps",
"sourcemaps_bucket": "sourcemaps",
"js_cache_bucket": "sessions-assets",
"peers": "http://utilities-openreplay.app.svc.cluster.local:9000/assist/peers",
"peers": "http://utilities-openreplay.app.svc.cluster.local:9000/assist/%s/peers",
"async_Token": "",
"EMAIL_HOST": "",
"EMAIL_PORT": "587",

View file

@ -7,7 +7,7 @@ from chalicelib.blueprints import bp_authorizers
from chalicelib.blueprints import bp_core, bp_core_crons
from chalicelib.blueprints.app import v1_api
from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons
from chalicelib.blueprints.subs import bp_dashboard
from chalicelib.blueprints.subs import bp_dashboard,bp_insights
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.helper import environ
@ -106,4 +106,5 @@ app.register_blueprint(bp_core_crons.app)
app.register_blueprint(bp_core_dynamic.app)
app.register_blueprint(bp_core_dynamic_crons.app)
app.register_blueprint(bp_dashboard.app)
app.register_blueprint(bp_insights.app)
app.register_blueprint(v1_api.app)

View file

@ -0,0 +1,69 @@
from chalice import Blueprint
from chalicelib.utils import helper
from chalicelib import _overrides
from chalicelib.core import dashboard, insights
from chalicelib.core import metadata
app = Blueprint(__name__)
_overrides.chalice_app(app)
#
# @app.route('/{projectId}/dashboard/metadata', methods=['GET'])
# def get_metadata_map(projectId, context):
# metamap = []
# for m in metadata.get(project_id=projectId):
# metamap.append({"name": m["key"], "key": f"metadata{m['index']}"})
# return {"data": metamap}
#
#
@app.route('/{projectId}/insights/journey', methods=['GET', 'POST'])
def get_insights_journey(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": insights.get_journey(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/insights/users_retention', methods=['GET', 'POST'])
def get_users_retention(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": insights.get_retention(project_id=projectId, **{**data, **args})}
#
#
# @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET'])
# def get_dashboard_autocomplete(projectId, widget, context):
# params = app.current_request.query_params
# if params is None or params.get('q') is None or len(params.get('q')) == 0:
# return {"data": []}
# params['q'] = '^' + params['q']
#
# if widget in ['performance']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), performance=True)
# elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render',
# 'impacted_sessions_by_slow_pages', 'pages_response_time']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), pages_only=True)
# elif widget in ['resources_loading_time']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), performance=False)
# elif widget in ['time_between_events', 'events']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), performance=False, events_only=True)
# elif widget in ['metadata']:
# data = dashboard.search(params.get('q', ''), None, project_id=projectId,
# platform=params.get('platform', None), metadata=True, key=params.get("key"))
# else:
# return {"errors": [f"unsupported widget: {widget}"]}
# return {'data': data}

View file

@ -21,7 +21,7 @@ SESSION_PROJECTION_COLS = """s.project_id,
def get_live_sessions(project_id, filters=None):
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(environ["peers"] + f"/{project_key}")
connected_peers = requests.get(environ["peers"] % environ["S3_KEY"] + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
@ -65,7 +65,7 @@ def get_live_sessions(project_id, filters=None):
def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(environ["peers"] + f"/{project_key}")
connected_peers = requests.get(environ["peers"] % environ["S3_KEY"] + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)

View file

@ -0,0 +1,211 @@
from chalicelib.core import sessions_metas
from chalicelib.utils import args_transformer
from chalicelib.utils import helper, dev
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
import math
from chalicelib.core.dashboard import __get_constraints, __get_constraint_values
def __transform_journey(rows):
nodes = []
links = []
for r in rows:
source = r["source_event"][r["source_event"].index("_"):]
target = r["target_event"][r["target_event"].index("_"):]
if source not in nodes:
nodes.append(source)
if target not in nodes:
nodes.append(target)
links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]})
return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)}
JOURNEY_DEPTH = 5
JOURNEY_TYPES = {
"PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"},
"CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"},
"VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"},
"EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"}
}
@dev.timed
def get_journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args):
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
event_start = None
event_table = JOURNEY_TYPES["PAGES"]["table"]
event_column = JOURNEY_TYPES["PAGES"]["column"]
event_table_id = JOURNEY_TYPES["PAGES"]["table_id"]
extra_values = {}
for f in filters:
if f["type"] == "START_POINT":
event_start = f["value"]
elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_table = JOURNEY_TYPES[f["value"]]["table"]
event_column = JOURNEY_TYPES[f["value"]]["column"]
elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
pg_sub_query_subset.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT source_event,
target_event,
MAX(target_id) max_target_id,
MAX(source_id) max_source_id,
count(*) AS value
FROM (SELECT event_number || '_' || value as target_event,
message_id AS target_id,
LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event,
LAG(message_id, 1) OVER ( PARTITION BY session_rank ) AS source_id
FROM (SELECT value,
session_rank,
message_id,
ROW_NUMBER() OVER ( PARTITION BY session_rank ORDER BY timestamp ) AS event_number
{f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY timestamp ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN timestamp ELSE NULL END as mark"
if event_start else ""}
FROM (SELECT session_id,
message_id,
timestamp,
value,
SUM(new_session) OVER (ORDER BY session_id, timestamp) AS session_rank
FROM (SELECT *,
CASE
WHEN source_timestamp IS NULL THEN 1
ELSE 0 END AS new_session
FROM (SELECT session_id,
{event_table_id} AS message_id,
timestamp,
{event_column} AS value,
LAG(timestamp)
OVER (PARTITION BY session_id ORDER BY timestamp) AS source_timestamp
FROM {event_table} INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
) AS related_events) AS ranked_events) AS processed
{") AS marked) AS maxed WHERE timestamp >= max) AS filtered" if event_start else ""}
) AS sorted_events
WHERE event_number <= %(JOURNEY_DEPTH)s) AS final
WHERE source_event IS NOT NULL
and target_event IS NOT NULL
GROUP BY source_event, target_event
ORDER BY value DESC
LIMIT 20;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH,
**__get_constraint_values(args), **extra_values}
# print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
return __transform_journey(rows)
def __compute_retention_percentage(rows):
if rows is None or len(rows) == 0:
return rows
t = -1
for r in rows:
if r["week"] == 0:
t = r["usersCount"]
r["percentage"] = r["usersCount"] / t
return rows
def __complete_retention(rows, start_date, end_date=None):
if rows is None or len(rows) == 0:
return rows
max_week = 10
week = 0
delta_date = 0
while max_week > 0:
start_date += TimeUTC.MS_WEEK
if end_date is not None and start_date >= end_date:
break
delta = 0
if delta_date + week >= len(rows) \
or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date:
for i in range(max_week):
if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date:
break
neutral = {
"firstConnexionWeek": start_date,
"week": i,
"usersCount": 0,
"connectedUsers": [],
"percentage": 0
}
rows.insert(delta_date + week + i, neutral)
delta = i
else:
for i in range(max_week):
if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date:
break
neutral = {
"firstConnexionWeek": start_date,
"week": i,
"usersCount": 0,
"connectedUsers": [],
"percentage": 0
}
if delta_date + week + i < len(rows) \
and i != rows[delta_date + week + i]["week"]:
rows.insert(delta_date + week + i, neutral)
elif delta_date + week + i >= len(rows):
rows.append(neutral)
delta = i
week += delta
max_week -= 1
delta_date += 1
return rows
@dev.timed
def get_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[],
**args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week,
FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week,
COUNT(DISTINCT connexions_list.user_id) AS users_count,
ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users
FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week
FROM sessions
WHERE {" AND ".join(pg_sub_query)}
AND user_id IS NOT NULL
AND NOT EXISTS((SELECT 1
FROM sessions AS bsess
WHERE bsess.start_ts<EXTRACT('EPOCH' FROM DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000))) * 1000
AND project_id = %(project_id)s
AND bsess.user_id = sessions.user_id
LIMIT 1))
GROUP BY user_id) AS users_list
LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week,
user_id
FROM sessions
WHERE users_list.user_id = sessions.user_id
AND first_connexion_week <=
DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp)
AND sessions.project_id = 1
AND sessions.start_ts < (%(endTimestamp)s - 1)
GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE)
GROUP BY first_connexion_week, week
ORDER BY first_connexion_week, week;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
# print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __compute_retention_percentage(helper.list_to_camel_case(rows))
return __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now())

View file

@ -426,8 +426,27 @@ def change_password(tenant_id, user_id, email, old_password, new_password):
if auth is None:
return {"errors": ["wrong password"]}
changes = {"password": new_password, "generatedPassword": False}
return {"data": update(tenant_id=tenant_id, user_id=user_id, changes=changes),
"jwt": authenticate(email, new_password)["jwt"]}
user = update(tenant_id=tenant_id, user_id=user_id, changes=changes)
r = authenticate(user['email'], new_password)
tenant_id = r.pop("tenantId")
r["limits"] = {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)}
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True)
c["smtp"] = helper.has_smtp()
return {
'jwt': r.pop('jwt'),
'data': {
"user": r,
"client": c
}
}
def set_password_invitation(user_id, new_password):

View file

@ -7,6 +7,7 @@ class TimeUTC:
MS_MINUTE = 60 * 1000
MS_HOUR = MS_MINUTE * 60
MS_DAY = MS_HOUR * 24
MS_WEEK = MS_DAY * 7
MS_MONTH = MS_DAY * 30
MS_MONTH_TRUE = monthrange(datetime.now(pytz.utc).astimezone(pytz.utc).year,
datetime.now(pytz.utc).astimezone(pytz.utc).month)[1] * MS_DAY
@ -113,3 +114,11 @@ class TimeUTC:
@staticmethod
def get_utc_offset():
return int((datetime.now(pytz.utc).now() - datetime.now(pytz.utc).replace(tzinfo=None)).total_seconds() * 1000)
@staticmethod
def trunc_week(timestamp):
dt = TimeUTC.from_ms_timestamp(timestamp)
start = dt - timedelta(days=dt.weekday())
return TimeUTC.datetime_to_timestamp(start
.replace(hour=0, minute=0, second=0, microsecond=0)
.astimezone(pytz.utc))

View file

@ -51,7 +51,7 @@ class PostgresClient:
try:
self.connection.commit()
self.cursor.close()
except:
except Exception as error:
print("Error while committing/closing PG-connection", error)
raise error
finally:

View file

@ -25,7 +25,7 @@ ENV TZ=UTC \
MAXMINDDB_FILE=/root/geoip.mmdb \
UAPARSER_FILE=/root/regexes.yaml \
HTTP_PORT=80 \
BEACON_SIZE_LIMIT=1000000 \
BEACON_SIZE_LIMIT=7000000 \
KAFKA_USE_SSL=true \
REDIS_STREAMS_MAX_LEN=3000 \
TOPIC_RAW=raw \

View file

@ -66,13 +66,12 @@ func ResolveCSS(baseURL string, css string) string {
css = rewriteLinks(css, func(rawurl string) string {
return ResolveURL(baseURL, rawurl)
})
return strings.Replace(css, ":hover", ".-asayer-hover", -1)
return strings.Replace(css, ":hover", ".-openreplay-hover", -1)
}
func (r *Rewriter) RewriteCSS(sessionID uint64, baseurl string, css string) string {
css = rewriteLinks(css, func(rawurl string) string {
url , _ := r.RewriteURL(sessionID, baseurl, rawurl)
return url
return r.RewriteURL(sessionID, baseurl, rawurl)
})
return strings.Replace(css, ":hover", ".-asayer-hover", -1)
return strings.Replace(css, ":hover", ".-openreplay-hover", -1)
}

View file

@ -50,23 +50,15 @@ func GetFullCachableURL(baseURL string, relativeURL string) (string, bool) {
if !isRelativeCachable(relativeURL) {
return "", false
}
return ResolveURL(baseURL, relativeURL), true
fullURL := ResolveURL(baseURL, relativeURL)
if !isCachable(fullURL) {
return "", false
}
return fullURL, true
}
const OPENREPLAY_QUERY_START = "OPENREPLAY_QUERY"
func getCachePath(rawurl string) string {
return "/" + strings.ReplaceAll(url.QueryEscape(rawurl), "%", "!") // s3 keys are ok with "!"
// u, _ := url.Parse(rawurl)
// s := "/" + u.Scheme + "/" + u.Hostname() + u.Path
// if u.RawQuery != "" {
// if (s[len(s) - 1] != '/') {
// s += "/"
// }
// s += OPENREPLAY_QUERY_START + url.PathEscape(u.RawQuery)
// }
// return s
}
func getCachePathWithKey(sessionID uint64, rawurl string) string {
@ -82,14 +74,10 @@ func GetCachePathForAssets(sessionID uint64, rawurl string) string {
}
func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL string) (string, bool) {
// TODO: put it in one check within GetFullCachableURL
if !isRelativeCachable(relativeURL) {
return relativeURL, false
}
fullURL := ResolveURL(baseURL, relativeURL)
if !isCachable(fullURL) {
return relativeURL, false
func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL string) string {
fullURL, cachable := GetFullCachableURL(baseURL, relativeURL)
if !cachable {
return relativeURL
}
u := url.URL{
@ -98,6 +86,6 @@ func (r *Rewriter) RewriteURL(sessionID uint64, baseURL string, relativeURL stri
Scheme: r.assetsURL.Scheme,
}
return u.String(), true
return u.String()
}

View file

@ -21,11 +21,8 @@ func sendAssetsForCacheFromCSS(sessionID uint64, baseURL string, css string) {
func handleURL(sessionID uint64, baseURL string, url string) string {
if CACHE_ASSESTS {
rewrittenURL, isCachable := rewriter.RewriteURL(sessionID, baseURL, url)
if isCachable {
sendAssetForCache(sessionID, baseURL, url)
}
return rewrittenURL
sendAssetForCache(sessionID, baseURL, url)
return rewriter.RewriteURL(sessionID, baseURL, url)
}
return assets.ResolveURL(baseURL, url)
}

View file

@ -34,11 +34,12 @@ func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
Reset bool `json:"reset"`
}
type response struct {
Timestamp int64 `json:"timestamp"`
Delay int64 `json:"delay"`
Token string `json:"token"`
UserUUID string `json:"userUUID"`
SessionID string `json:"sessionID"`
Timestamp int64 `json:"timestamp"`
Delay int64 `json:"delay"`
Token string `json:"token"`
UserUUID string `json:"userUUID"`
SessionID string `json:"sessionID"`
BeaconSizeLimit int64 `json:"beaconSizeLimit"`
}
startTime := time.Now()
@ -115,6 +116,7 @@ func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
Token: tokenizer.Compose(*tokenData),
UserUUID: userUUID,
SessionID: strconv.FormatUint(tokenData.ID, 10),
BeaconSizeLimit: BEACON_SIZE_LIMIT,
})
}

View file

@ -8,6 +8,8 @@ import (
"os/signal"
"syscall"
"golang.org/x/net/http2"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/flakeid"
@ -131,6 +133,7 @@ func main() {
}
}),
}
http2.ConfigureServer(server, nil)
go func() {
if err := server.ListenAndServe(); err != nil {
log.Fatalf("Server error: %v\n", err)

View file

@ -35,7 +35,7 @@
"put_S3_TTL": "20",
"sourcemaps_reader": "http://utilities-openreplay.app.svc.cluster.local:9000/sourcemaps",
"sourcemaps_bucket": "sourcemaps",
"peers": "http://utilities-openreplay.app.svc.cluster.local:9000/assist/peers",
"peers": "http://utilities-openreplay.app.svc.cluster.local:9000/assist/%s/peers",
"js_cache_bucket": "sessions-assets",
"async_Token": "",
"EMAIL_HOST": "",

View file

@ -436,8 +436,27 @@ def change_password(tenant_id, user_id, email, old_password, new_password):
if auth is None:
return {"errors": ["wrong password"]}
changes = {"password": new_password, "generatedPassword": False}
return {"data": update(tenant_id=tenant_id, user_id=user_id, changes=changes),
"jwt": authenticate(email, new_password)["jwt"]}
user = update(tenant_id=tenant_id, user_id=user_id, changes=changes)
r = authenticate(user['email'], new_password)
tenant_id = r.pop("tenantId")
r["limits"] = {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)}
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True)
c["smtp"] = helper.has_smtp()
return {
'jwt': r.pop('jwt'),
'data': {
"user": r,
"client": c,
}
}
def set_password_invitation(tenant_id, user_id, new_password):
@ -457,6 +476,7 @@ def set_password_invitation(tenant_id, user_id, new_password):
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True)
c["smtp"] = helper.has_smtp()
return {
'jwt': r.pop('jwt'),
'data': {

View file

@ -7,6 +7,7 @@ import ListWalker from './ListWalker';
type MouseMoveTimed = MouseMove & Timed;
const HOVER_CLASS = "-openreplay-hover";
const HOVER_CLASS_DEPR = "-asayer-hover";
export default class MouseManager extends ListWalker<MouseMoveTimed> {
private hoverElements: Array<Element> = [];
@ -19,8 +20,14 @@ export default class MouseManager extends ListWalker<MouseMoveTimed> {
const diffAdd = curHoverElements.filter(elem => !this.hoverElements.includes(elem));
const diffRemove = this.hoverElements.filter(elem => !curHoverElements.includes(elem));
this.hoverElements = curHoverElements;
diffAdd.forEach(elem => elem.classList.add(HOVER_CLASS));
diffRemove.forEach(elem => elem.classList.remove(HOVER_CLASS));
diffAdd.forEach(elem => {
elem.classList.add(HOVER_CLASS)
elem.classList.add(HOVER_CLASS_DEPR)
});
diffRemove.forEach(elem => {
elem.classList.remove(HOVER_CLASS)
elem.classList.remove(HOVER_CLASS_DEPR)
});
}
reset(): void {

View file

@ -0,0 +1,10 @@
BEGIN;
CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timestamp);
CREATE INDEX projects_tenant_id_idx ON projects(tenant_id);
CREATE INDEX webhooks_tenant_id_idx ON webhooks(tenant_id);
CREATE INDEX issues_project_id_idx ON issues(project_id);
CREATE INDEX jobs_project_id_idx ON jobs(project_id);
COMMIT;

View file

@ -172,6 +172,7 @@ CREATE TABLE projects
"defaultInputMode": "plain"
}'::jsonb -- ??????
);
CREATE INDEX projects_tenant_id_idx ON projects(tenant_id);
CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS
$$
@ -247,7 +248,7 @@ create table webhooks
index integer default 0 not null,
name varchar(100)
);
CREATE INDEX webhooks_tenant_id_idx ON webhooks(tenant_id);
-- --- notifications.sql ---
@ -387,6 +388,7 @@ CREATE TABLE issues
);
CREATE INDEX ON issues (issue_id, type);
CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops);
CREATE INDEX issues_project_id_idx ON issues(project_id);
-- --- errors.sql ---
@ -870,5 +872,6 @@ CREATE TABLE jobs
);
CREATE INDEX ON jobs (status);
CREATE INDEX ON jobs (start_at);
CREATE INDEX jobs_project_id_idx ON jobs(project_id);
COMMIT;

View file

@ -10,28 +10,31 @@ npm i -D @openreplay/sourcemap-uploader
## CLI
Upload sourcemap for one file:
### Upload a sourcemap for one file:
```
sourcemap-uploader -s https://opnereplay.mycompany.com/api -k API_KEY -p PROJECT_KEY file -m ./dist/index.js.map -u https://myapp.com/index.js
```
Upload all sourcemaps in a given directory. The URL must correspond to the root where you upload JS files from the directory. In other words, if you have your `app-42.js` along with the `app-42.js.map` in the `./build` folder and then want to upload it to your OpenReplay instance so it can be reachable through the link `https://myapp.com/static/app-42.js`, then the command should be like:
### Upload all sourcemaps in a given directory.
The URL must correspond to the root where you upload JS files from the directory. In other words, if you have your `app-42.js` along with the `app-42.js.map` in the `./build` folder and then want to upload it to your OpenReplay instance so it can be reachable through the link `https://myapp.com/static/app-42.js`, then the command should be like:
```
sourcemap-uploader -s https://opnereplay.mycompany.com/api -k API_KEY -p PROJECT_KEY dir -m ./build -u https://myapp.com/static
```
- Use `-s` (`--server`) to specify the URL of your OpenReplay instance (make to append it with /api)
- Use `-s` (`--server`) to specify the URL of your OpenReplay instance (append it with /api).
**Do not use this parameter if you use SaaS version of the OpenRplay**
- Use `-v` (`--verbose`) to see the logs.
## NPM
There are two functions inside `index.js` of the package:
There are two functions you can export from the package:
```
uploadFile(api_key, project_key, sourcemap_file_path, js_file_url)
uploadDir(api_key, project_key, sourcemap_dir_path, js_dir_url)
uploadFile(api_key, project_key, sourcemap_file_path, js_file_url, [server])
uploadDir(api_key, project_key, sourcemap_dir_path, js_dir_url, [server])
```
Both functions return Promise.
Both functions return Promise with a result value to be the list of files for which sourcemaps were uploaded.

View file

@ -58,15 +58,9 @@ const { command, api_key, project_key, server, verbose, ...args } = parser.parse
global._VERBOSE = !!verbose;
try {
global.SERVER = new URL(server || "https://api.openreplay.com");
} catch (e) {
console.error(`Sourcemap Uploader: server URL parse error. ${e}`)
}
(command === 'file'
? uploadFile(api_key, project_key, args.sourcemap_file_path, args.js_file_url)
: uploadDir(api_key, project_key, args.sourcemap_dir_path, args.js_dir_url)
? uploadFile(api_key, project_key, args.sourcemap_file_path, args.js_file_url, server)
: uploadDir(api_key, project_key, args.sourcemap_dir_path, args.js_dir_url, server)
)
.then((sourceFiles) =>
sourceFiles.length > 0

View file

@ -3,12 +3,12 @@ const readFile = require('./lib/readFile.js'),
uploadSourcemaps = require('./lib/uploadSourcemaps.js');
module.exports = {
async uploadFile(api_key, project_key, sourcemap_file_path, js_file_url) {
async uploadFile(api_key, project_key, sourcemap_file_path, js_file_url, server) {
const sourcemap = await readFile(sourcemap_file_path, js_file_url);
return uploadSourcemaps(api_key, project_key, [sourcemap]);
return uploadSourcemaps(api_key, project_key, [sourcemap], server);
},
async uploadDir(api_key, project_key, sourcemap_dir_path, js_dir_url) {
async uploadDir(api_key, project_key, sourcemap_dir_path, js_dir_url, server) {
const sourcemaps = await readDir(sourcemap_dir_path, js_dir_url);
return uploadSourcemaps(api_key, project_key, sourcemaps);
return uploadSourcemaps(api_key, project_key, sourcemaps, server);
},
};

View file

@ -1,15 +1,22 @@
const https = require('https');
const getUploadURLs = (api_key, project_key, js_file_urls) =>
const getUploadURLs = (api_key, project_key, js_file_urls, server) =>
new Promise((resolve, reject) => {
if (js_file_urls.length === 0) {
resolve([]);
}
const pathPrefix = (global.SERVER.pathname + "/").replace(/\/+/g, '/');
let serverURL;
try {
serverURL = new URL(server);
} catch(e) {
return reject(`Failed to parse server URL "${server}".`)
}
const pathPrefix = (serverURL.pathname + "/").replace(/\/+/g, '/');
const options = {
method: 'PUT',
hostname: global.SERVER.host,
hostname: serverURL.host,
path: pathPrefix + `${project_key}/sourcemaps/`,
headers: { Authorization: api_key, 'Content-Type': 'application/json' },
}
@ -74,11 +81,12 @@ const uploadSourcemap = (upload_url, body) =>
req.end();
});
module.exports = (api_key, project_key, sourcemaps) =>
module.exports = (api_key, project_key, sourcemaps, server) =>
getUploadURLs(
api_key,
project_key,
sourcemaps.map(({ js_file_url }) => js_file_url),
server || "https://api.openreplay.com",
).then(upload_urls =>
Promise.all(
upload_urls.map((upload_url, i) =>

View file

@ -1,6 +1,6 @@
{
"name": "@openreplay/sourcemap-uploader",
"version": "3.0.5",
"version": "3.0.6",
"description": "NPM module to upload your JS sourcemaps files to OpenReplay",
"bin": "cli.js",
"main": "index.js",

View file

@ -1,6 +1,6 @@
{
"name": "@openreplay/tracker-assist",
"version": "3.0.4",
"version": "3.1.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -30,6 +30,12 @@
"js-tokens": "^4.0.0"
}
},
"@medv/finder": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@medv/finder/-/finder-2.1.0.tgz",
"integrity": "sha512-Egrg5XO4kLol24b1Kv50HDfi5hW0yQ6aWSsO0Hea1eJ4rogKElIN0M86FdVnGF4XIGYyA7QWx0MgbOzVPA0qkA==",
"dev": true
},
"@nodelib/fs.scandir": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
@ -57,11 +63,12 @@
}
},
"@openreplay/tracker": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.0.5.tgz",
"integrity": "sha512-hIY7DnQmm7bCe6v+e257WD7OdNuBOWUZ15Q3yUEdyxu7xDNG7brbak9pS97qCt3VY9xGK0RvW/j3ANlRPk8aVg==",
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/@openreplay/tracker/-/tracker-3.3.0.tgz",
"integrity": "sha512-g9sOG01VaiRLw4TcUbux8j3moa7gsGs8rjZPEVJ5SJqxjje9R7tpUD5UId9ne7QdHSoiHfrWFk3TNRLpXyvImg==",
"dev": true,
"requires": {
"@medv/finder": "^2.0.0",
"error-stack-parser": "^2.0.6"
}
},

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker-assist",
"description": "Tracker plugin for screen assistance through the WebRTC",
"version": "3.0.4",
"version": "3.1.1",
"keywords": [
"WebRTC",
"assistance",
@ -24,10 +24,10 @@
"peerjs": "^1.3.2"
},
"peerDependencies": {
"@openreplay/tracker": "^3.1.0"
"@openreplay/tracker": "^3.3.0"
},
"devDependencies": {
"@openreplay/tracker": "^3.0.5",
"@openreplay/tracker": "^3.3.0",
"prettier": "^1.18.2",
"replace-in-files-cli": "^1.0.0",
"typescript": "^3.6.4"

View file

@ -1,7 +1,7 @@
const declineIcon = `<svg xmlns="http://www.w3.org/2000/svg" height="22" width="22" viewBox="0 0 128 128" ><g id="Circle_Grid" data-name="Circle Grid"><circle cx="64" cy="64" fill="#ef5261" r="64"/></g><g id="icon"><path d="m57.831 70.1c8.79 8.79 17.405 12.356 20.508 9.253l4.261-4.26a7.516 7.516 0 0 1 10.629 0l9.566 9.566a7.516 7.516 0 0 1 0 10.629l-7.453 7.453c-7.042 7.042-27.87-2.358-47.832-22.319-9.976-9.981-16.519-19.382-20.748-28.222s-5.086-16.091-1.567-19.61l7.453-7.453a7.516 7.516 0 0 1 10.629 0l9.566 9.563a7.516 7.516 0 0 1 0 10.629l-4.264 4.271c-3.103 3.1.462 11.714 9.252 20.5z" fill="#eeefee"/></g></svg>`;
export default class Confirm {
export default class ConfirmWindow {
private wrapper: HTMLDivElement;
constructor(text: string, styles?: Object) {

View file

@ -5,7 +5,7 @@ import type Message from '@openreplay/tracker';
import Mouse from './Mouse';
import CallWindow from './CallWindow';
import Confirm from './Confirm';
import ConfirmWindow from './ConfirmWindow';
export interface Options {
@ -34,20 +34,28 @@ export default function(opts: Partial<Options> = {}) {
return;
}
let assistDemandedRestart = false;
let peer : Peer | null = null;
app.attachStopCallback(function() {
if (assistDemandedRestart) { return; }
peer && peer.destroy();
});
app.attachStartCallback(function() {
// @ts-ignore
if (assistDemandedRestart) { return; }
const peerID = `${app.projectKey}-${app.getSessionID()}`
const peer = new Peer(peerID, {
peer = new Peer(peerID, {
// @ts-ignore
host: app.getHost(),
path: '/assist',
port: location.protocol === 'http:' && appOptions.__DISABLE_SECURE_MODE ? 80 : 443,
});
console.log('OpenReplay tracker-assist peerID:', peerID)
peer.on('error', e => console.log("OpenReplay tracker-assist peer error: ", e.type, e))
peer.on('connection', function(conn) {
window.addEventListener("beforeunload", () => conn.open && conn.send("unload"));
peer.on('error', e => console.log("OpenReplay tracker-assist peer error: ", e.type, e))
console.log('OpenReplay tracker-assist: Connecting...')
conn.on('open', function() {
@ -66,9 +74,12 @@ export default function(opts: Partial<Options> = {}) {
buffering = false;
}
}
assistDemandedRestart = true;
app.stop();
//@ts-ignore (should update tracker dependency)
app.addCommitCallback((messages: Array<Message>): void => {
if (!conn.open) { return; } // TODO: clear commit callbacks on connection close
let i = 0;
while (i < messages.length) {
buffer.push(messages.slice(i, i+=1000));
@ -78,30 +89,31 @@ export default function(opts: Partial<Options> = {}) {
sendNext();
}
});
app.start();
app.start().then(() => { assistDemandedRestart = false; });
});
});
let calling: CallingState = CallingState.False;
let callingState: CallingState = CallingState.False;
peer.on('call', function(call) {
if (!peer) { return; }
const dataConn: DataConnection | undefined = peer
.connections[call.peer].find(c => c.type === 'data');
if (calling !== CallingState.False || !dataConn) {
if (callingState !== CallingState.False || !dataConn) {
call.close();
return;
}
calling = CallingState.Requesting;
const notifyCallEnd = () => {
dataConn.open && dataConn.send("call_end");
}
const confirm = new Confirm(options.confirmText, options.confirmStyle);
callingState = CallingState.Requesting;
const confirm = new ConfirmWindow(options.confirmText, options.confirmStyle);
dataConn.on('data', (data) => { // if call closed by a caller before confirm
if (data === "call_end") {
//console.log('OpenReplay tracker-assist: receiving callend onconfirm')
calling = CallingState.False;
callingState = CallingState.False;
confirm.remove();
}
});
@ -110,7 +122,7 @@ export default function(opts: Partial<Options> = {}) {
if (!agreed || !dataConn.open) {
call.close();
notifyCallEnd();
calling = CallingState.False;
callingState = CallingState.False;
return;
}
@ -123,7 +135,7 @@ export default function(opts: Partial<Options> = {}) {
mouse.remove();
callUI?.remove();
lStream.getTracks().forEach(t => t.stop());
calling = CallingState.False;
callingState = CallingState.False;
}
const initiateCallEnd = () => {
//console.log("callend initiated")

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker",
"description": "The OpenReplay tracker main package",
"version": "3.2.1",
"version": "3.3.0",
"keywords": [
"logging",
"replay"
@ -30,9 +30,6 @@
"@typescript-eslint/parser": "^2.34.0",
"eslint": "^6.8.0",
"eslint-plugin-prettier": "^3.1.4",
"gulp": "^4.0.2",
"gulp-typescript": "^6.0.0-alpha.1",
"merge2": "^1.4.1",
"prettier": "^2.0.0",
"replace-in-files": "^2.0.3",
"rollup": "^2.17.0",

View file

@ -8,5 +8,5 @@ export default {
file: 'build/webworker.js',
format: 'cjs',
},
plugins: [resolve(), babel({ babelHelpers: 'bundled' }), terser()],
plugins: [resolve(), babel({ babelHelpers: 'bundled' }), terser({ mangle: { reserved: ['$'] } })],
};

View file

@ -11,6 +11,12 @@ import type { Options as ObserverOptions } from './observer';
import type { Options as WebworkerOptions, WorkerMessageData } from '../../messages/webworker';
interface OnStartInfo {
sessionID: string,
sessionToken: string,
userUUID: string,
}
export type Options = {
revID: string;
node_id: string;
@ -19,13 +25,16 @@ export type Options = {
local_uuid_key: string;
ingestPoint: string;
__is_snippet: boolean;
onStart?: (info: { sessionID: string, sessionToken: string, userUUID: string }) => void;
__debug_report_edp: string | null;
onStart?: (info: OnStartInfo) => void;
} & ObserverOptions & WebworkerOptions;
type Callback = () => void;
type CommitCallback = (messages: Array<Message>) => void;
export const DEFAULT_INGEST_POINT = 'https://ingest.openreplay.com';
// TODO: use backendHost only
export const DEFAULT_INGEST_POINT = 'https://api.openreplay.com/ingest';
export default class App {
readonly nodes: Nodes;
@ -57,6 +66,7 @@ export default class App {
local_uuid_key: '__openreplay_uuid',
ingestPoint: DEFAULT_INGEST_POINT,
__is_snippet: false,
__debug_report_edp: null,
obscureTextEmails: true,
obscureTextNumbers: false,
},
@ -99,8 +109,23 @@ export default class App {
this.attachEventListener(window, 'beforeunload', alertWorker, false);
this.attachEventListener(document, 'mouseleave', alertWorker, false, false);
this.attachEventListener(document, 'visibilitychange', alertWorker, false);
} catch (e) { /* TODO: send report */}
} catch (e) {
this.sendDebugReport("worker_start", e);
}
}
private sendDebugReport(context: string, e: any) {
if(this.options.__debug_report_edp !== null) {
fetch(this.options.__debug_report_edp, {
method: 'POST',
body: JSON.stringify({
context,
error: `${e}`
})
});
}
}
send(message: Message, urgent = false): void {
if (!this.isActive) {
return;
@ -184,7 +209,7 @@ export default class App {
active(): boolean {
return this.isActive;
}
_start(reset: boolean): void { // TODO: return a promise instead of onStart handling
private _start(reset: boolean): Promise<OnStartInfo> {
if (!this.isActive) {
this.isActive = true;
if (!this.worker) {
@ -208,7 +233,7 @@ export default class App {
connAttemptGap: this.options.connAttemptGap,
}
this.worker.postMessage(messageData); // brings delay of 10th ms?
window.fetch(this.options.ingestPoint + '/v1/web/start', {
return window.fetch(this.options.ingestPoint + '/v1/web/start', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
@ -230,14 +255,17 @@ export default class App {
if (r.status === 200) {
return r.json()
} else { // TODO: handle canceling && 403
throw new Error("Server error");
return r.text().then(text => {
throw new Error(`Server error: ${r.status}. ${text}`);
});
}
})
.then(r => {
const { token, userUUID, sessionID } = r;
const { token, userUUID, sessionID, beaconSizeLimit } = r;
if (typeof token !== 'string' ||
typeof userUUID !== 'string') {
throw new Error("Incorrect server response");
typeof userUUID !== 'string' ||
(typeof beaconSizeLimit !== 'number' && typeof beaconSizeLimit !== 'undefined')) {
throw new Error(`Incorrect server response: ${ JSON.stringify(r) }`);
}
sessionStorage.setItem(this.options.session_token_key, token);
localStorage.setItem(this.options.local_uuid_key, userUUID);
@ -247,34 +275,40 @@ export default class App {
if (!this.worker) {
throw new Error("Stranger things: no worker found after start request");
}
this.worker.postMessage({ token });
this.worker.postMessage({ token, beaconSizeLimit });
this.startCallbacks.forEach((cb) => cb());
this.observer.observe();
this.ticker.start();
log("OpenReplay tracking started.");
const onStartInfo = { sessionToken: token, userUUID, sessionID };
if (typeof this.options.onStart === 'function') {
this.options.onStart({ sessionToken: token, userUUID, sessionID });
this.options.onStart(onStartInfo);
}
return onStartInfo;
})
.catch(e => {
this.stop();
/* TODO: send report */
this.sendDebugReport("session_start", e);
throw e;
})
}
return Promise.reject("Player is active");
}
start(reset: boolean = false): void {
start(reset: boolean = false): Promise<OnStartInfo> {
if (!document.hidden) {
this._start(reset);
return this._start(reset);
} else {
const onVisibilityChange = () => {
if (!document.hidden) {
document.removeEventListener("visibilitychange", onVisibilityChange);
this._start(reset);
return new Promise((resolve) => {
const onVisibilityChange = () => {
if (!document.hidden) {
document.removeEventListener("visibilitychange", onVisibilityChange);
resolve(this._start(reset));
}
}
}
document.addEventListener("visibilitychange", onVisibilityChange);
document.addEventListener("visibilitychange", onVisibilityChange);
});
}
}
stop(): void {

View file

@ -20,12 +20,14 @@ import CSSRules from './modules/cssrules';
import { IN_BROWSER, deprecationWarn } from './utils';
import { Options as AppOptions } from './app';
import { Options as ExceptionOptions } from './modules/exception';
import { Options as ConsoleOptions } from './modules/console';
import { Options as ExceptionOptions } from './modules/exception';
import { Options as InputOptions } from './modules/input';
import { Options as MouseOptions } from './modules/mouse';
import { Options as PerformanceOptions } from './modules/performance';
import { Options as TimingOptions } from './modules/timing';
export type Options = Partial<
AppOptions & ConsoleOptions & ExceptionOptions & InputOptions & TimingOptions
AppOptions & ConsoleOptions & ExceptionOptions & InputOptions & MouseOptions & PerformanceOptions & TimingOptions
> & {
projectID?: number; // For the back compatibility only (deprecated)
projectKey: string;
@ -92,9 +94,9 @@ export default class API {
Exception(this.app, options);
Img(this.app);
Input(this.app, options);
Mouse(this.app);
Mouse(this.app, options);
Timing(this.app, options);
Performance(this.app);
Performance(this.app, options);
Scroll(this.app);
Longtasks(this.app);
(window as any).__OPENREPLAY__ = (window as any).__OPENREPLAY__ || this;
@ -223,18 +225,12 @@ export default class API {
}
}
handleError = (e: Error) => {
if (e instanceof Error && this.app !== null) {
handleError = (e: Error | ErrorEvent | PromiseRejectionEvent) => {
if (this.app === null) { return; }
if (e instanceof Error) {
this.app.send(getExceptionMessage(e, []));
}
}
handleErrorEvent = (e: ErrorEvent | PromiseRejectionEvent) => {
if (
(e instanceof ErrorEvent ||
('PromiseRejectionEvent' in window && e instanceof PromiseRejectionEvent)
) &&
this.app !== null
} else if (e instanceof ErrorEvent ||
('PromiseRejectionEvent' in window && e instanceof PromiseRejectionEvent)
) {
const msg = getExceptionMessageFromEvent(e);
if (msg != null) {

View file

@ -1,14 +1,10 @@
import type { Options as FinderOptions } from '../vendors/finder/finder';
import { finder } from '../vendors/finder/finder';
import { normSpaces, hasOpenreplayAttribute, getLabelAttribute } from '../utils';
import App from '../app';
import { MouseMove, MouseClick } from '../../messages';
import { getInputLabel } from './input';
const selectorMap: {[id:number]: string} = {};
function getSelector(id: number, target: Element): string {
return selectorMap[id] = selectorMap[id] || finder(target);
}
function getTarget(target: EventTarget | null): Element | null {
if (target instanceof Element) {
return _getTarget(target);
@ -76,7 +72,18 @@ function getTargetLabel(target: Element): string {
return '';
}
export default function (app: App): void {
export interface Options {
selectorFinder: boolean | FinderOptions;
}
export default function (app: App, opts: Partial<Options>): void {
const options: Options = Object.assign(
{
selectorFinder: true,
},
opts,
);
let mousePositionX = -1;
let mousePositionY = -1;
let mousePositionChanged = false;
@ -97,6 +104,13 @@ export default function (app: App): void {
}
};
const selectorMap: {[id:number]: string} = {};
function getSelector(id: number, target: Element): string {
if (options.selectorFinder === false) { return '' }
return selectorMap[id] = selectorMap[id] ||
finder(target, options.selectorFinder === true ? undefined : options.selectorFinder);
}
app.attachEventListener(
<HTMLElement>document.documentElement,
'mouseover',

View file

@ -11,7 +11,7 @@ type Perf = {
}
}
const perf: Perf = IN_BROWSER && 'memory' in performance // works in Chrome only
const perf: Perf = IN_BROWSER && 'performance' in window && 'memory' in performance // works in Chrome only
? performance as any
: { memory: {} }
@ -19,7 +19,19 @@ const perf: Perf = IN_BROWSER && 'memory' in performance // works in Chrome only
export const deviceMemory = IN_BROWSER ? ((navigator as any).deviceMemory || 0) * 1024 : 0;
export const jsHeapSizeLimit = perf.memory.jsHeapSizeLimit || 0;
export default function (app: App): void {
export interface Options {
capturePerformance: boolean;
}
export default function (app: App, opts: Partial<Options>): void {
const options: Options = Object.assign(
{
capturePerformance: true,
},
opts,
);
if (!options.capturePerformance) { return; }
let frames: number | undefined;
let ticks: number | undefined;

View file

@ -1,6 +1,7 @@
import { isURL } from '../utils';
import App from '../app';
import { ResourceTiming, PageLoadTiming, PageRenderTiming } from '../../messages';
import type Message from '../../messages/message';
// Inspired by https://github.com/WPO-Foundation/RUM-SpeedIndex/blob/master/src/rum-speedindex.js
@ -104,21 +105,28 @@ export default function (app: App, opts: Partial<Options>): void {
if (!('PerformanceObserver' in window)) {
options.captureResourceTimings = false;
}
if (!options.captureResourceTimings) {
options.capturePageLoadTimings = false;
options.capturePageRenderTimings = false;
}
if (!options.captureResourceTimings) { return } // Resources are necessary for all timings
let resources: ResourcesTimeMap | null = options.captureResourceTimings
? {}
: null;
const mQueue: Message[] = []
function sendOnStart(m: Message) {
if (app.active()) {
app.send(m)
} else {
mQueue.push(m)
}
}
app.attachStartCallback(function() {
mQueue.forEach(m => app.send(m))
})
let resources: ResourcesTimeMap | null = {}
function resourceTiming(entry: PerformanceResourceTiming): void {
if (entry.duration <= 0 || !isURL(entry.name) || app.isServiceURL(entry.name)) return;
if (resources !== null) {
resources[entry.name] = entry.startTime + entry.duration;
}
app.send(new
sendOnStart(new
ResourceTiming(
entry.startTime + performance.timing.navigationStart,
entry.duration,
@ -136,20 +144,17 @@ export default function (app: App, opts: Partial<Options>): void {
);
}
const observer: PerformanceObserver | null = options.captureResourceTimings
? new PerformanceObserver((list) =>
list.getEntries().forEach(resourceTiming),
)
: null;
if (observer !== null) {
performance.getEntriesByType('resource').forEach(resourceTiming);
observer.observe({ entryTypes: ['resource'] });
}
const observer: PerformanceObserver = new PerformanceObserver(
(list) => list.getEntries().forEach(resourceTiming),
)
performance.getEntriesByType('resource').forEach(resourceTiming)
observer.observe({ entryTypes: ['resource'] })
let firstPaint = 0,
firstContentfulPaint = 0;
if (options.capturePageLoadTimings && observer !== null) {
if (options.capturePageLoadTimings) {
let pageLoadTimingSent: boolean = false;
app.ticker.attach(() => {
@ -200,7 +205,7 @@ export default function (app: App, opts: Partial<Options>): void {
}, 30);
}
if (options.capturePageRenderTimings && observer !== null) {
if (options.capturePageRenderTimings) {
let visuallyComplete = 0,
interactiveWindowStartTime = 0,
interactiveWindowTickTime: number | null = 0,

View file

@ -11,6 +11,7 @@ type Settings = {
pageNo?: number;
startTimestamp?: number;
timeAdjustment?: number;
beaconSizeLimit?: number;
} & Partial<Options>;
export type WorkerMessageData = null | "stop" | Settings | Array<{ _id: number }>;

View file

@ -6,7 +6,7 @@ import type { WorkerMessageData } from '../messages/webworker';
const SEND_INTERVAL = 20 * 1000;
const BEACON_SIZE_LIMIT = 1e6 // Limit is set in the backend/services/http
let BEACON_SIZE_LIMIT = 1e6 // Limit is set in the backend/services/http
let beaconSize = 4 * 1e5; // Default 400kB
@ -123,6 +123,7 @@ self.onmessage = ({ data }: MessageEvent<WorkerMessageData>) => {
timeAdjustment = data.timeAdjustment || timeAdjustment;
MAX_ATTEMPTS_COUNT = data.connAttemptCount || MAX_ATTEMPTS_COUNT;
ATTEMPT_TIMEOUT = data.connAttemptGap || ATTEMPT_TIMEOUT;
BEACON_SIZE_LIMIT = data.beaconSizeLimit || BEACON_SIZE_LIMIT;
beaconSize = Math.min(BEACON_SIZE_LIMIT, data.beaconSize || beaconSize);
if (writer.isEmpty()) {
writeBatchMeta();

View file

@ -24,7 +24,7 @@ const peerServer = ExpressPeerServer(server, {
debug: true,
path: '/',
proxied: true,
allow_discovery: true
allow_discovery: false
});
peerServer.on('connection', peerConnection);
peerServer.on('disconnect', peerDisconnect);

View file

@ -48,13 +48,13 @@ const peerError = (error) => {
}
peerRouter.get('/peers', function (req, res) {
peerRouter.get(`/${process.env.S3_KEY}/peers`, function (req, res) {
console.log("looking for all available sessions");
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({"data": connectedPeers}));
});
peerRouter.get('/peers/:projectKey', function (req, res) {
peerRouter.get(`/${process.env.S3_KEY}/peers/:projectKey`, function (req, res) {
console.log(`looking for available sessions for ${req.params.projectKey}`);
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');