Merge remote-tracking branch 'origin/dev' into api_insights

# Conflicts:
#	scripts/helm/db/init_dbs/postgresql/init_schema.sql
This commit is contained in:
Taha Yassine Kraiem 2021-10-28 18:26:24 +02:00
commit ec0bb60443
27 changed files with 1275 additions and 1125 deletions

View file

@ -897,3 +897,8 @@ def sessions_live_search(projectId, context):
def get_heatmaps_by_url(projectId, context):
data = app.current_request.json_body
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
@app.route('/general_stats', methods=['GET'], authorizer=None)
def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}

View file

@ -162,6 +162,7 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False
"projectId": project_id,
"userId": user_id}
with pg_client.PostgresClient() as cur:
ss_constraints = []
extra_constraints = [
cur.mogrify("s.project_id = %(project_id)s", {"project_id": project_id}),
cur.mogrify("s.duration IS NOT NULL", {})
@ -173,7 +174,96 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False
extra_constraints.append(cur.mogrify("fs.user_id = %(userId)s", {"userId": user_id}))
events_query_part = ""
if "filters" in data:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
for f in data["filters"]:
if not isinstance(f.get("value"), list):
f["value"] = [f.get("value")]
if len(f["value"]) == 0 or f["value"][0] is None:
continue
filter_type = f["type"].upper()
f["value"] = __get_sql_value_multiple(f["value"])
if filter_type == sessions_metas.meta_type.USERBROWSER:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_browser {op} %(value)s', {"value": f["value"]}))
ss_constraints.append(cur.mogrify(f'ms.user_browser {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_os {op} %(value)s', {"value": f["value"]}))
ss_constraints.append(cur.mogrify(f'ms.user_os {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_device {op} %(value)s', {"value": f["value"]}))
ss_constraints.append(cur.mogrify(f'ms.user_device {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_country {op} %(value)s', {"value": f["value"]}))
ss_constraints.append(cur.mogrify(f'ms.user_country {op} %(value)s', {"value": f["value"]}))
elif filter_type == "duration".upper():
if len(f["value"]) > 0 and f["value"][0] is not None:
extra_constraints.append(
cur.mogrify("s.duration >= %(minDuration)s", {"minDuration": f["value"][0]}))
ss_constraints.append(
cur.mogrify("ms.duration >= %(minDuration)s", {"minDuration": f["value"][0]}))
if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0:
extra_constraints.append(
cur.mogrify("s.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]}))
ss_constraints.append(
cur.mogrify("ms.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]}))
elif filter_type == sessions_metas.meta_type.REFERRER:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(
cur.mogrify(f"p.base_referrer {op} %(referrer)s", {"referrer": f["value"]}))
elif filter_type == events.event_type.METADATA.ui_type:
op = __get_sql_operator(f["operator"])
if f.get("key") in meta_keys.keys():
extra_constraints.append(
cur.mogrify(f"s.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)}))
ss_constraints.append(
cur.mogrify(f"ms.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)}))
elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.user_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
ss_constraints.append(
cur.mogrify(f"ms.user_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID,
sessions_metas.meta_type.USERANONYMOUSID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.user_anonymous_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
ss_constraints.append(
cur.mogrify(f"ms.user_anonymous_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.rev_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
ss_constraints.append(
cur.mogrify(f"ms.rev_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
# ---------------------------------------------------------------------------
if len(data.get("events", [])) > 0:
ss_constraints = [s.decode('UTF-8') for s in ss_constraints]
events_query_from = []
event_index = 0
@ -268,7 +358,8 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False
else:
continue
if event_index == 0:
event_where += ss_constraints
if is_not:
if event_index == 0:
events_query_from.append(cur.mogrify(f"""\
@ -316,73 +407,6 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False
else:
data["events"] = []
# ---------------------------------------------------------------------------
if "filters" in data:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
for f in data["filters"]:
if not isinstance(f.get("value"), list):
f["value"] = [f.get("value")]
if len(f["value"]) == 0 or f["value"][0] is None:
continue
filter_type = f["type"].upper()
f["value"] = __get_sql_value_multiple(f["value"])
if filter_type == sessions_metas.meta_type.USERBROWSER:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(
cur.mogrify(f's.user_browser {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_os {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_device {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_country {op} %(value)s', {"value": f["value"]}))
elif filter_type == "duration".upper():
if len(f["value"]) > 0 and f["value"][0] is not None:
extra_constraints.append(
cur.mogrify("s.duration >= %(minDuration)s", {"minDuration": f["value"][0]}))
if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0:
extra_constraints.append(
cur.mogrify("s.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]}))
elif filter_type == sessions_metas.meta_type.REFERRER:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(
cur.mogrify(f"p.base_referrer {op} %(referrer)s", {"referrer": f["value"]}))
elif filter_type == events.event_type.METADATA.ui_type:
op = __get_sql_operator(f["operator"])
if f.get("key") in meta_keys.keys():
extra_constraints.append(
cur.mogrify(f"s.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.user_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID,
sessions_metas.meta_type.USERANONYMOUSID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.user_anonymous_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.rev_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
# ---------------------------------------------------------------------------
if data.get("startDate") is not None:
@ -741,3 +765,9 @@ def delete_sessions_by_user_ids(project_id, user_ids):
cur.execute(query=query)
return True
def count_all():
with pg_client.PostgresClient() as cur:
row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions")
return row.get("count", 0)

View file

@ -14,8 +14,8 @@ import (
gzip "github.com/klauspost/pgzip"
"openreplay/backend/pkg/db/postgres"
. "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/token"
. "openreplay/backend/pkg/messages"
)
const JSON_SIZE_LIMIT int64 = 1e3 // 1Kb
@ -124,17 +124,22 @@ func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) {
body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT)
//defer body.Close()
var reader io.ReadCloser
var err error
switch r.Header.Get("Content-Encoding") {
case "gzip":
reader, err := gzip.NewReader(body)
log.Println("Gzip", reader)
reader, err = gzip.NewReader(body)
if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: stage-dependent responce
return
}
log.Println("Gzip reader init", reader)
defer reader.Close()
default:
reader = body
}
log.Println("Reader after switch:", reader)
buf, err := ioutil.ReadAll(reader)
if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging

View file

@ -1,145 +1,180 @@
package main
// const FILES_SIZE_LIMIT int64 = 1e8 // 100Mb
import (
"encoding/json"
"net/http"
"errors"
"time"
"math/rand"
"strconv"
// func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) {
// type request struct {
// // SessionID *string
// EncodedProjectID *uint64 `json:"projectID"`
// TrackerVersion string `json:"trackerVersion"`
// RevID string `json:"revID"`
// UserUUID *string `json:"userUUID"`
// //UserOS string `json"userOS"` //hardcoded 'MacOS'
// UserOSVersion string `json:"userOSVersion"`
// UserDevice string `json:"userDevice"`
// Timestamp uint64 `json:"timestamp"`
// // UserDeviceType uint 0:phone 1:pad 2:tv 3:carPlay 5:mac
// // “performances”:{
// // “activeProcessorCount”:8,
// // “isLowPowerModeEnabled”:0,
// // “orientation”:0,
// // “systemUptime”:585430,
// // “batteryState”:0,
// // “thermalState”:0,
// // “batteryLevel”:0,
// // “processorCount”:8,
// // “physicalMemory”:17179869184
// // },
// }
// type response struct {
// Token string `json:"token"`
// ImagesHashList []string `json:"imagesHashList"`
// UserUUID string `json:"userUUID"`
// SESSION_ID uint64 `json:"SESSION_ID"` ///TEMP
// }
// startTime := time.Now()
// req := &request{}
// body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT)
// //defer body.Close()
// if err := json.NewDecoder(body).Decode(req); err != nil {
// responseWithError(w, http.StatusBadRequest, err)
// return
// }
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/token"
. "openreplay/backend/pkg/messages"
)
// if req.EncodedProjectID == nil {
// responseWithError(w, http.StatusForbidden, errors.New("ProjectID value required"))
// return
// }
// projectID := decodeProjectID(*(req.EncodedProjectID))
// if projectID == 0 {
// responseWithError(w, http.StatusUnprocessableEntity, errors.New("ProjectID value is invalid"))
// return
// }
// p, err := pgconn.GetProject(uint32(projectID))
// if err != nil {
// if postgres.IsNoRowsErr(err) {
// responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active"))
// } else {
// responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
// }
// return
// }
// sessionID, err := flaker.Compose(req.Timestamp)
// if err != nil {
// responseWithError(w, http.StatusInternalServerError, err)
// return
// }
// userUUID := getUUID(req.UserUUID)
// country := geoIP.ExtractISOCodeFromHTTPRequest(r)
// expirationTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
const FILES_SIZE_LIMIT int64 = 1e8 // 100Mb
// imagesHashList, err := s3.GetFrequentlyUsedKeys(*(req.EncodedProjectID)) // TODO: reuse index: ~ frequency * size
// if err != nil {
// responseWithError(w, http.StatusInternalServerError, err)
// return
// }
func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) {
type request struct {
Token string `json:"token"`
ProjectKey *string `json:"projectKey"`
TrackerVersion string `json:"trackerVersion"`
RevID string `json:"revID"`
UserUUID *string `json:"userUUID"`
//UserOS string `json"userOS"` //hardcoded 'MacOS'
UserOSVersion string `json:"userOSVersion"`
UserDevice string `json:"userDevice"`
Timestamp uint64 `json:"timestamp"`
// UserDeviceType uint 0:phone 1:pad 2:tv 3:carPlay 5:mac
// “performances”:{
// “activeProcessorCount”:8,
// “isLowPowerModeEnabled”:0,
// “orientation”:0,
// “systemUptime”:585430,
// “batteryState”:0,
// “thermalState”:0,
// “batteryLevel”:0,
// “processorCount”:8,
// “physicalMemory”:17179869184
// },
}
type response struct {
Token string `json:"token"`
ImagesHashList []string `json:"imagesHashList"`
UserUUID string `json:"userUUID"`
BeaconSizeLimit int64 `json:"beaconSizeLimit"`
SessionID string `json:"sessionID"`
}
startTime := time.Now()
req := &request{}
body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT)
//defer body.Close()
if err := json.NewDecoder(body).Decode(req); err != nil {
responseWithError(w, http.StatusBadRequest, err)
return
}
// responseWithJSON(w, &response{
// Token: tokenizer.Compose(sessionID, uint64(expirationTime.UnixNano()/1e6)),
// ImagesHashList: imagesHashList,
// UserUUID: userUUID,
// //TEMP:
// SESSION_ID: sessionID,
// })
// producer.Produce(topicRaw, sessionID, messages.Encode(&messages.IOSSessionStart{
// Timestamp: req.Timestamp,
// ProjectID: projectID,
// TrackerVersion: req.TrackerVersion,
// RevID: req.RevID,
// UserUUID: userUUID,
// UserOS: "MacOS",
// UserOSVersion: req.UserOSVersion,
// UserDevice: MapIOSDevice(req.UserDevice),
// UserDeviceType: GetIOSDeviceType(req.UserDevice), // string `json:"userDeviceType"` // From UserDevice; ENUM ?
// UserCountry: country,
// }))
// }
if req.ProjectKey == nil {
responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"))
return
}
p, err := pgconn.GetProjectByKey(*req.ProjectKey)
if err != nil {
if postgres.IsNoRowsErr(err) {
responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active"))
} else {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
}
return
}
userUUID := getUUID(req.UserUUID)
tokenData, err := tokenizer.Parse(req.Token)
if err != nil { // Starting the new one
dice := byte(rand.Intn(100)) // [0, 100)
if dice >= p.SampleRate {
responseWithError(w, http.StatusForbidden, errors.New("cancel"))
return
}
ua := uaParser.ParseFromHTTPRequest(r)
if ua == nil {
responseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6))
if err != nil {
responseWithError(w, http.StatusInternalServerError, err)
return
}
// TODO: if EXPIRED => send message for two sessions association
expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6}
country := geoIP.ExtractISOCodeFromHTTPRequest(r)
// The difference with web is mostly here:
producer.Produce(TOPIC_RAW, tokenData.ID, Encode(&IOSSessionStart{
Timestamp: req.Timestamp,
ProjectID: uint64(p.ProjectID),
TrackerVersion: req.TrackerVersion,
RevID: req.RevID,
UserUUID: userUUID,
UserOS: "IOS",
UserOSVersion: req.UserOSVersion,
UserDevice: MapIOSDevice(req.UserDevice),
UserDeviceType: GetIOSDeviceType(req.UserDevice),
UserCountry: country,
}))
}
// imagesHashList, err := s3.GetFrequentlyUsedKeys(*(req.EncodedProjectID)) // TODO: reuse index: ~ frequency * size
// if err != nil {
// responseWithError(w, http.StatusInternalServerError, err)
// return
// }
responseWithJSON(w, &response{
// ImagesHashList: imagesHashList,
Token: tokenizer.Compose(*tokenData),
UserUUID: userUUID,
SessionID: strconv.FormatUint(tokenData.ID, 10),
BeaconSizeLimit: BEACON_SIZE_LIMIT,
})
}
// func pushLateMessagesHandler(w http.ResponseWriter, r *http.Request) {
// sessionData, err := tokenizer.ParseFromHTTPRequest(r)
// if err != nil && err != token.EXPIRED {
// responseWithError(w, http.StatusUnauthorized, err)
// return
// }
// // Check timestamps here?
// pushMessages(w, r, sessionData.ID)
// }
func pushLateMessagesHandler(w http.ResponseWriter, r *http.Request) {
sessionData, err := tokenizer.ParseFromHTTPRequest(r)
if err != nil && err != token.EXPIRED {
responseWithError(w, http.StatusUnauthorized, err)
return
}
// Check timestamps here?
pushMessages(w, r, sessionData.ID)
}
// func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) {
// r.Body = http.MaxBytesReader(w, r.Body, FILES_SIZE_LIMIT)
// // defer r.Body.Close()
// err := r.ParseMultipartForm(1e5) // 100Kb
// if err == http.ErrNotMultipart || err == http.ErrMissingBoundary {
// responseWithError(w, http.StatusUnsupportedMediaType, err)
// // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB
// } else if err != nil {
// responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
// }
func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) {
sessionData, err := tokenizer.ParseFromHTTPRequest(r)
if err != nil { // Should accept expired token?
responseWithError(w, http.StatusUnauthorized, err)
return
}
// if len(r.MultipartForm.Value["projectID"]) == 0 {
// responseWithError(w, http.StatusBadRequest, errors.New("projectID parameter required")) // status for missing/wrong parameter?
// return
// }
// // encodedProjectID, err := strconv.ParseUint(r.MultipartForm.Value["projectID"][0], 10, 64)
// // projectID := decodeProjectID(encodedProjectID)
// // if projectID == 0 || err != nil {
// // responseWithError(w, http.StatusUnprocessableEntity, errors.New("projectID value is incorrect"))
// // return
// // }
// prefix := r.MultipartForm.Value["projectID"][0] + "/" //strconv.FormatUint(uint64(projectID), 10) + "/"
r.Body = http.MaxBytesReader(w, r.Body, FILES_SIZE_LIMIT)
// defer r.Body.Close()
err = r.ParseMultipartForm(1e6) // ~1Mb
if err == http.ErrNotMultipart || err == http.ErrMissingBoundary {
responseWithError(w, http.StatusUnsupportedMediaType, err)
// } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB
} else if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
}
// for _, fileHeaderList := range r.MultipartForm.File {
// for _, fileHeader := range fileHeaderList {
// file, err := fileHeader.Open()
// if err != nil {
// continue // TODO: send server error or accumulate successful files
// }
// key := prefix + fileHeader.Filename // TODO: Malicious image put: use jwt?
// go s3.Upload(file, key, "image/png", false)
// }
// }
if (r.MultipartForm == nil) {
responseWithError(w, http.StatusInternalServerError, errors.New("Multipart not parsed"))
}
// w.WriteHeader(http.StatusOK)
// }
if len(r.MultipartForm.Value["projectKey"]) == 0 {
responseWithError(w, http.StatusBadRequest, errors.New("projectKey parameter missing")) // status for missing/wrong parameter?
return
}
prefix := r.MultipartForm.Value["projectKey"][0] + "/" + strconv.FormatUint(sessionData.ID, 10) + "/"
for _, fileHeaderList := range r.MultipartForm.File {
for _, fileHeader := range fileHeaderList {
file, err := fileHeader.Open()
if err != nil {
continue // TODO: send server error or accumulate successful files
}
key := prefix + fileHeader.Filename
go s3.Upload(file, key, "image/png", false)
}
}
w.WriteHeader(http.StatusOK)
}

View file

@ -76,6 +76,9 @@ func main() {
return
}
log.Printf("Request: %v - %v ", r.Method, r.URL.Path)
switch r.URL.Path {
case "/":
w.WriteHeader(http.StatusOK)
@ -100,34 +103,34 @@ func main() {
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
// case "/v1/ios/start":
// switch r.Method {
// case http.MethodPost:
// startSessionHandlerIOS(w, r)
// default:
// w.WriteHeader(http.StatusMethodNotAllowed)
// }
// case "/v1/ios/append":
// switch r.Method {
// case http.MethodPost:
// pushMessagesHandler(w, r)
// default:
// w.WriteHeader(http.StatusMethodNotAllowed)
// }
// case "/v1/ios/late":
// switch r.Method {
// case http.MethodPost:
// pushLateMessagesHandler(w, r)
// default:
// w.WriteHeader(http.StatusMethodNotAllowed)
// }
// case "/v1/ios/images":
// switch r.Method {
// case http.MethodPost:
// iosImagesUploadHandler(w, r)
// default:
// w.WriteHeader(http.StatusMethodNotAllowed)
// }
case "/v1/ios/start":
switch r.Method {
case http.MethodPost:
startSessionHandlerIOS(w, r)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
case "/v1/ios/i":
switch r.Method {
case http.MethodPost:
pushMessagesHandler(w, r)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
case "/v1/ios/late":
switch r.Method {
case http.MethodPost:
pushLateMessagesHandler(w, r)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
case "/v1/ios/images":
switch r.Method {
case http.MethodPost:
iosImagesUploadHandler(w, r)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
default:
w.WriteHeader(http.StatusNotFound)
}

View file

@ -11,6 +11,7 @@ func responseWithJSON(w http.ResponseWriter, res interface{}) {
if err != nil {
log.Println(err)
}
w.Header().Set("Content-Type", "application/json")
w.Write(body)
}

View file

@ -26,7 +26,7 @@ import stl from './storage.css';
function getActionsName(type) {
switch(type) {
case STORAGE_TYPES.MOBX:
return "EVENTS";
return "MUTATIONS";
case STORAGE_TYPES.VUEX:
return "MUTATIONS";
default:
@ -141,7 +141,7 @@ export default class Storage extends React.PureComponent {
break;
case STORAGE_TYPES.MOBX:
src = item.payload;
name = `@${item.type} ${src && src.name}`;
name = `@${item.type} ${src && src.type}`;
break;
}

View file

@ -50,9 +50,9 @@ spec:
{{- if eq .Values.pvc.name "hostPath" }}
volumeMounts:
- mountPath: {{ .Values.pvc.mountPath }}
name: {{ .Values.pvc.name }}
name: datadir
volumes:
- name: mydir
- name: datadir
hostPath:
# Ensure the file directory is created.
path: {{ .Values.pvc.hostMountPath }}

File diff suppressed because it is too large Load diff

View file

@ -5,6 +5,9 @@ metadata:
namespace: {{ .Release.Namespace }}
data:
location.list: |-
location ~* /general_stats {
deny all;
}
location /healthz {
return 200 'OK';
}

View file

@ -21,7 +21,7 @@
file="{{ item|basename }}"
kubectl exec -n db postgresql-postgresql-0 -- /bin/bash -c "rm -rf /tmp/$file"
kubectl cp -n db $file postgresql-postgresql-0:/tmp/
kubectl exec -n db postgresql-postgresql-0 -- /bin/bash -c "PGPASSWORD=asayerPostgres psql -U postgres -f /tmp/$file" &> "{{ playbook_dir }}"/postgresql_init.log
kubectl exec -n db postgresql-postgresql-0 -- /bin/bash -c "PGPASSWORD=asayerPostgres psql -v ON_ERROR_STOP=1 -U postgres -f /tmp/$file" &> "{{ playbook_dir }}"/postgresql_init.log
args:
chdir: db/init_dbs/postgresql
with_fileglob:

View file

@ -5,7 +5,7 @@ image:
{% endif %}
env:
LICENSE_KEY: "{{ enterprise_edition_license }}"
POSTGRES_STRING: "postgres://{{postgres_db_user}}:{{postgres_db_password}}@{{postgres_endpoint}}:{{postgres_port}}"
POSTGRES_STRING: "postgres://{{postgres_db_user}}:{{postgres_db_password}}@{{postgres_endpoint}}:{{postgres_port}}/{{ postgres_db_name }}"
{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %}
imagePullSecrets: []

View file

@ -6,6 +6,7 @@ image:
env:
AWS_ACCESS_KEY_ID: "{{ minio_access_key }}"
AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}"
S3_BUCKET_ASSETS: "{{ assets_bucket }}"
LICENSE_KEY: "{{ enterprise_edition_license }}"
AWS_ENDPOINT: "{{ s3_endpoint }}"
AWS_REGION: "{{ aws_region }}"

View file

@ -12,12 +12,11 @@ env:
S3_SECRET: "{{ minio_secret_key }}"
sourcemaps_bucket_key: "{{ minio_access_key }}"
sourcemaps_bucket_secret: "{{ minio_secret_key }}"
S3_HOST: "https://{{ domain_name }}"
SITE_URL: "https://{{ domain_name }}"
jwt_secret: "{{ jwt_secret_key }}"
pg_host: "{{ postgres_endpoint }}"
pg_port: "{{ postgres_port }}"
pg_dbname: "{{ postgres_port }}"
pg_dbname: "{{ postgres_db_name }}"
pg_user: "{{ postgres_db_user }}"
pg_password: "{{ postgres_db_password }}"
EMAIL_HOST: "{{ email_host }}"
@ -29,8 +28,18 @@ env:
EMAIL_SSL_KEY: "{{ email_ssl_key }}"
EMAIL_SSL_CERT: "{{ email_ssl_cert }}"
EMAIL_FROM: "{{ email_from }}"
AWS_DEFAULT_REGION: "{{ aws_default_region }}"
sessions_region: "{{ aws_default_region }}"
AWS_DEFAULT_REGION: "{{ aws_region }}"
sessions_region: "{{ aws_region }}"
sessions_bucket: "{{ recordings_bucket }}"
sourcemaps_bucket: "{{ sourcemaps_bucket }}"
js_cache_bucket: "{{ assets_bucket }}"
# In case of minio, the instance is running inside kuberntes,
# which is accessible via nginx ingress.
{% if s3_endpoint == "http://minio.db.svc.cluster.local:9000" %}
S3_HOST: "https://{{ domain_name }}"
{% else %}
S3_HOST: "{{ s3_endpoint }}"
{% endif %}
{% if env is defined and env.chalice is defined and env.chalice%}
{{ env.chalice | to_nice_yaml | trim | indent(2) }}
{% endif %}

View file

@ -5,7 +5,7 @@ image:
{% endif %}
env:
LICENSE_KEY: "{{ enterprise_edition_license }}"
POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_password }}@{{ postgres_endpoint }}:{{ postgres_port }}"
POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}/{{ postgres_db_name }}"
REDIS_STRING: "{{ redis_endpoint }}"
KAFKA_SERVERS: "{{ kafka_endpoint }}"
KAFKA_USE_SSL: "{{ kafka_ssl }}"

View file

@ -8,7 +8,7 @@ env:
AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}"
LICENSE_KEY: "{{ enterprise_edition_license }}"
AWS_REGION: "{{ aws_region }}"
POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_password }}@{{ postgres_endpoint }}:{{ postgres_port }}"
POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}/{{ postgres_db_name }}"
REDIS_STRING: "{{ redis_endpoint }}"
KAFKA_SERVERS: "{{ kafka_endpoint }}"
KAFKA_USE_SSL: "{{ kafka_ssl }}"

View file

@ -5,7 +5,7 @@ image:
{% endif %}
env:
LICENSE_KEY: "{{ enterprise_edition_license }}"
POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_password }}@{{ postgres_endpoint }}:{{ postgres_port }}"
POSTGRES_STRING: "postgres://{{ postgres_db_user }}:{{ postgres_db_password }}@{{ postgres_endpoint }}:{{ postgres_port }}/{{ postgres_db_name }}"
#
REDIS_STRING: "{{ redis_endpoint }}"
KAFKA_SERVERS: "{{ kafka_endpoint }}"

View file

@ -10,6 +10,8 @@ env:
AWS_ENDPOINT: "{{ s3_endpoint }}"
AWS_REGION_WEB: "{{ aws_region }}"
AWS_REGION_IOS: "{{ aws_region }}"
S3_BUCKET_WEB: "{{ recordings_bucket }}"
S3_BUCKET_IOS: "{{ recordings_bucket }}"
REDIS_STRING: "{{ redis_endpoint }}"
KAFKA_SERVERS: "{{ kafka_endpoint }}"
KAFKA_USE_SSL: "{{ kafka_ssl }}"

View file

@ -90,6 +90,9 @@ db_resource_override:
## Sane defaults
s3_endpoint: "http://minio.db.svc.cluster.local:9000"
aws_region: "us-east-1"
assets_bucket: sessions-assets
recordings_bucket: mobs
sourcemaps_bucket: sourcemaps
kafka_endpoint: kafka.db.svc.cluster.local:9042
kafka_ssl: false
postgres_endpoint: postgresql.db.svc.cluster.local

View file

@ -1,6 +1,6 @@
{
"name": "@openreplay/tracker",
"version": "3.4.1",
"version": "3.4.4",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -293,11 +293,6 @@
"to-fast-properties": "^2.0.0"
}
},
"@medv/finder": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@medv/finder/-/finder-2.0.0.tgz",
"integrity": "sha512-gV4jOsGpiWNDGd8Dw7tod1Fc9Gc7StaOT4oZ/6srHRWtsHU+HYWzmkYsa3Qy/z0e9tY1WpJ9wWdBFGskfbzoug=="
},
"@nodelib/fs.scandir": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.3.tgz",

View file

@ -1,7 +1,7 @@
{
"name": "@openreplay/tracker",
"description": "The OpenReplay tracker main package",
"version": "3.4.4",
"version": "3.4.5",
"keywords": [
"logging",
"replay"
@ -38,7 +38,6 @@
"typescript": "^4.3.4"
},
"dependencies": {
"@medv/finder": "^2.0.0",
"error-stack-parser": "^2.0.6"
},
"engines": {

View file

@ -131,7 +131,7 @@ export default class App {
});
}
if(this.options.__debug_log) {
warn("OpenReplay errror: ", context, e)
warn("OpenReplay error: ", context, e)
}
}
@ -153,9 +153,13 @@ export default class App {
}
}
addCommitCallback(cb: CommitCallback): void {
attachCommitCallback(cb: CommitCallback): void {
this.commitCallbacks.push(cb)
}
// @Depricated (TODO: remove in 3.5.*)
addCommitCallback(cb: CommitCallback): void {
this.attachCommitCallback(cb)
}
safe<T extends (...args: any[]) => void>(fn: T): T {

View file

@ -411,11 +411,13 @@ export default class Observer {
private iframeObservers: Observer[] = [];
private handleIframe(iframe: HTMLIFrameElement): void {
let context: Window | null = null
const handle = () => {
const context = iframe.contentWindow as Window | null
const id = this.app.nodes.getID(iframe)
if (!context || id === undefined) { return }
if (id === undefined) { return }
if (iframe.contentWindow === context) { return }
context = iframe.contentWindow as Window | null;
if (!context) { return }
const observer = new Observer(this.app, this.options, context)
this.iframeObservers.push(observer)
observer.observeIframe(id, context)

View file

@ -23,11 +23,11 @@ import { Options as AppOptions } from './app';
import { Options as ConsoleOptions } from './modules/console';
import { Options as ExceptionOptions } from './modules/exception';
import { Options as InputOptions } from './modules/input';
import { Options as MouseOptions } from './modules/mouse';
import { Options as PerformanceOptions } from './modules/performance';
import { Options as TimingOptions } from './modules/timing';
export type Options = Partial<
AppOptions & ConsoleOptions & ExceptionOptions & InputOptions & MouseOptions & PerformanceOptions & TimingOptions
AppOptions & ConsoleOptions & ExceptionOptions & InputOptions & PerformanceOptions & TimingOptions
> & {
projectID?: number; // For the back compatibility only (deprecated)
projectKey: string;
@ -98,7 +98,7 @@ export default class API {
Exception(this.app, options);
Img(this.app);
Input(this.app, options);
Mouse(this.app, options);
Mouse(this.app);
Timing(this.app, options);
Performance(this.app, options);
Scroll(this.app);

View file

@ -110,7 +110,7 @@ export default function (app: App, opts: Partial<Options>): void {
return;
}
const sendConsoleLog = app.safe((level: string, args: any[]): void =>
const sendConsoleLog = app.safe((level: string, args: unknown[]): void =>
app.send(new ConsoleLog(level, printf(args))),
);
@ -121,18 +121,36 @@ export default function (app: App, opts: Partial<Options>): void {
app.attachStartCallback(reset);
app.ticker.attach(reset, 33, false);
options.consoleMethods.forEach((method) => {
if (consoleMethods.indexOf(method) === -1) {
console.error(`OpenReplay: unsupported console method "${method}"`);
return;
}
const fn = (console as any)[method];
(console as any)[method] = function (...args: any[]): void {
fn.apply(this, args);
if (n++ > options.consoleThrottling) {
const patchConsole = (console: Console) =>
options.consoleMethods!.forEach((method) => {
if (consoleMethods.indexOf(method) === -1) {
console.error(`OpenReplay: unsupported console method "${method}"`);
return;
}
sendConsoleLog(method, args);
};
});
const fn = (console as any)[method];
(console as any)[method] = function (...args: unknown[]): void {
fn.apply(this, args);
if (n++ > options.consoleThrottling) {
return;
}
sendConsoleLog(method, args);
};
});
patchConsole(window.console);
app.nodes.attachNodeCallback(node => {
if (node instanceof HTMLIFrameElement) {
let context = node.contentWindow
if (context) {
patchConsole((context as (Window & typeof globalThis)).console)
}
app.attachEventListener(node, "load", () => {
if (node.contentWindow !== context) {
context = node.contentWindow
patchConsole((context as (Window & typeof globalThis)).console)
}
})
}
})
}

View file

@ -47,5 +47,5 @@ export default function (app: App): void {
const observer: PerformanceObserver = new PerformanceObserver((list) =>
list.getEntries().forEach(longTask),
);
observer.observe({ entryTypes: ['longtask'], buffered: true });
observer.observe({ entryTypes: ['longtask'] });
}

View file

@ -1,10 +1,30 @@
import type { Options as FinderOptions } from '../vendors/finder/finder';
import { finder } from '../vendors/finder/finder';
import { normSpaces, hasOpenreplayAttribute, getLabelAttribute } from '../utils';
import App from '../app';
import { MouseMove, MouseClick } from '../../messages';
import { getInputLabel } from './input';
function _getSelector(target: Element): string {
let el: Element | null = target
let selector: string | null = null
do {
if (el.id) {
return `#${el.id}` + (selector ? ` > ${selector}` : '')
}
selector =
el.className.split(' ')
.map(cn => cn.trim())
.filter(cn => cn !== '')
.reduce((sel, cn) => `${sel}.${cn}`, el.tagName.toLowerCase()) +
(selector ? ` > ${selector}` : '');
if (el === document.body) {
return selector
}
el = el.parentElement
} while (el !== document.body && el !== null)
return selector
}
//TODO: fix (typescript doesn't allow work when the guard is inside the function)
function getTarget(target: EventTarget | null): Element | null {
if (target instanceof Element) {
return _getTarget(target);
@ -72,26 +92,11 @@ function getTargetLabel(target: Element): string {
return '';
}
interface HeatmapsOptions {
finder: FinderOptions,
}
export interface Options {
heatmaps: boolean | HeatmapsOptions;
}
export default function (app: App, opts: Partial<Options>): void {
const options: Options = Object.assign(
{
heatmaps: false // {
// finder: {
// threshold: 5,
// maxNumberOfTries: 600,
// },
// },
},
opts,
);
export default function (app: App): void {
// const options: Options = Object.assign(
// {},
// opts,
// );
let mousePositionX = -1;
let mousePositionY = -1;
@ -115,9 +120,7 @@ export default function (app: App, opts: Partial<Options>): void {
const selectorMap: {[id:number]: string} = {};
function getSelector(id: number, target: Element): string {
if (options.heatmaps === false) { return '' }
return selectorMap[id] = selectorMap[id] ||
finder(target, options.heatmaps === true ? undefined : options.heatmaps.finder);
return selectorMap[id] = selectorMap[id] || _getSelector(target);
}
app.attachEventListener(