From 4e662aafd650f645ca5cd2e376e610b5e1eabdea Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 16 Nov 2022 13:05:45 +0100 Subject: [PATCH 01/70] feat(DB): remove unused request's indexes --- .../schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql | 4 ++++ .../schema/db/init_dbs/postgresql/init_schema.sql | 12 +----------- .../schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql | 4 ++++ .../schema/db/init_dbs/postgresql/init_schema.sql | 10 +--------- 4 files changed, 10 insertions(+), 20 deletions(-) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql index 65db23b07..3315df03f 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql @@ -70,4 +70,8 @@ WHERE deleted_at IS NOT NULL; UPDATE roles SET permissions=array_remove(permissions, 'ERRORS'); +DROP INDEX IF EXISTS events_common.requests_url_idx; +DROP INDEX IF EXISTS events_common.requests_url_gin_idx; +DROP INDEX IF EXISTS events_common.requests_url_gin_idx2; + COMMIT; \ No newline at end of file diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index f486c731e..78026e245 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -1221,19 +1221,9 @@ $$ query text NULL, PRIMARY KEY (session_id, timestamp, seq_index) ); - CREATE INDEX IF NOT EXISTS requests_url_idx ON events_common.requests (url); + CREATE INDEX IF NOT EXISTS requests_duration_idx ON events_common.requests (duration); - CREATE INDEX IF NOT EXISTS requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); CREATE INDEX IF NOT EXISTS requests_timestamp_idx ON events_common.requests (timestamp); - CREATE INDEX IF NOT EXISTS requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, - length(url) - - (CASE - WHEN url LIKE 'http://%' - THEN 7 - WHEN url LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); CREATE INDEX IF NOT EXISTS requests_timestamp_session_id_failed_idx ON events_common.requests (timestamp, session_id) WHERE success = FALSE; CREATE INDEX IF NOT EXISTS requests_request_body_nn_gin_idx ON events_common.requests USING GIN (request_body gin_trgm_ops) WHERE request_body IS NOT NULL; CREATE INDEX IF NOT EXISTS requests_response_body_nn_gin_idx ON events_common.requests USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL; diff --git a/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql b/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql index 638d0774b..a9ef541a5 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql @@ -60,4 +60,8 @@ BEGIN END; $$ LANGUAGE plpgsql; +DROP INDEX IF EXISTS events_common.requests_url_idx; +DROP INDEX IF EXISTS events_common.requests_url_gin_idx; +DROP INDEX IF EXISTS events_common.requests_url_gin_idx2; + COMMIT; \ No newline at end of file diff --git a/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/scripts/schema/db/init_dbs/postgresql/init_schema.sql index c58e65d4c..a57978965 100644 --- a/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -596,17 +596,9 @@ $$ query text NULL, PRIMARY KEY (session_id, timestamp, seq_index) ); - CREATE INDEX requests_url_idx ON events_common.requests (url); + CREATE INDEX requests_duration_idx ON events_common.requests (duration); - CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); CREATE INDEX requests_timestamp_idx ON events_common.requests (timestamp); - CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE - WHEN url LIKE 'http://%' - THEN 7 - WHEN url LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); CREATE INDEX requests_timestamp_session_id_failed_idx ON events_common.requests (timestamp, session_id) WHERE success = FALSE; CREATE INDEX requests_request_body_nn_gin_idx ON events_common.requests USING GIN (request_body gin_trgm_ops) WHERE request_body IS NOT NULL; CREATE INDEX requests_response_body_nn_gin_idx ON events_common.requests USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL; From c3f9871e23a807c2fa0c8c4a3b553dcd62cd3859 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 16 Nov 2022 13:35:11 +0100 Subject: [PATCH 02/70] fix(ui) - network timeline --- .../shared/DevTools/TimeTable/BarRow.tsx | 74 +++++++++---------- 1 file changed, 36 insertions(+), 38 deletions(-) diff --git a/frontend/app/components/shared/DevTools/TimeTable/BarRow.tsx b/frontend/app/components/shared/DevTools/TimeTable/BarRow.tsx index 2b151cf14..f283eb7ac 100644 --- a/frontend/app/components/shared/DevTools/TimeTable/BarRow.tsx +++ b/frontend/app/components/shared/DevTools/TimeTable/BarRow.tsx @@ -27,6 +27,40 @@ const BarRow = ({ }: Props) => { const timeOffset = time - timestart; ttfb = ttfb || 0; + // TODO fix the tooltip + + const content = ( + + {ttfb != null && ( +
+
{'Waiting (TTFB)'}
+
+
+
+
{formatTime(ttfb)}
+
+ )} +
+
{'Content Download'}
+
+
+
+
{formatTime(duration - ttfb)}
+
+ + ); const trigger = (
- - {ttfb != null && ( -
-
{'Waiting (TTFB)'}
-
-
-
-
{formatTime(ttfb)}
-
- )} -
-
{'Content Download'}
-
-
-
-
{formatTime(duration - ttfb)}
-
- - } - placement="top" - > - {trigger} - +
+ {trigger}
); }; From 3ad610cf750f0a632992aeba20bc044eb4c4b11e Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 16 Nov 2022 14:22:24 +0100 Subject: [PATCH 03/70] chore(helm): clean up unused bucket Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/files/minio.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/helmcharts/openreplay/files/minio.sh b/scripts/helmcharts/openreplay/files/minio.sh index 0e7b4c506..24e07fb17 100644 --- a/scripts/helmcharts/openreplay/files/minio.sh +++ b/scripts/helmcharts/openreplay/files/minio.sh @@ -5,7 +5,7 @@ set -e cd /tmp -buckets=("mobs" "sessions-assets" "static" "sourcemaps" "sessions-mobile-assets" "quickwit" "vault-data") +buckets=("mobs" "sessions-assets" "sourcemaps" "sessions-mobile-assets" "quickwit" "vault-data") mc alias set minio http://minio.db.svc.cluster.local:9000 $MINIO_ACCESS_KEY $MINIO_SECRET_KEY @@ -35,7 +35,6 @@ mc ilm import minio/mobs < /tmp/lifecycle.json || true mc mb minio/frontend || true mc policy set download minio/frontend || true mc policy set download minio/sessions-assets || true -mc policy set download minio/static || true } # /bin/bash kafka.sh migrate $migration_versions From 00a9d9753f21d0e16c315fbb4cbfb3b27eff17cf Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 16 Nov 2022 14:28:24 +0100 Subject: [PATCH 04/70] docs(helm): Write warning for public bucket Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/files/minio.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/helmcharts/openreplay/files/minio.sh b/scripts/helmcharts/openreplay/files/minio.sh index 24e07fb17..fc0a7238f 100644 --- a/scripts/helmcharts/openreplay/files/minio.sh +++ b/scripts/helmcharts/openreplay/files/minio.sh @@ -31,10 +31,14 @@ mc mb minio/${bucket} || true done mc ilm import minio/mobs < /tmp/lifecycle.json || true -# Creating frontend bucket +##################################################### +# Creating frontend bucket; Do not change this block! +# !! PUBLIC BUCKETS !! +##################################################### mc mb minio/frontend || true mc policy set download minio/frontend || true mc policy set download minio/sessions-assets || true + } # /bin/bash kafka.sh migrate $migration_versions From 0660e29a12d1e8120cd89347296795acbfefc025 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 16 Nov 2022 16:25:46 +0100 Subject: [PATCH 05/70] feat(chalice): changed EFS file patterns --- api/env.default | 4 ++-- ee/api/env.default | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/env.default b/api/env.default index 82419328c..55aefdc35 100644 --- a/api/env.default +++ b/api/env.default @@ -42,8 +42,8 @@ sourcemaps_reader=http://sourcemaps-reader-openreplay.app.svc.cluster.local:9000 STAGE=default-foss version_number=1.4.0 FS_DIR=/mnt/efs -EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob -EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +EFS_SESSION_MOB_PATTERN=%(sessionId)s +EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)sdevtools SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs diff --git a/ee/api/env.default b/ee/api/env.default index e707bec57..7fd61f4a0 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -61,8 +61,8 @@ EXP_ALERTS=false EXP_FUNNELS=false EXP_RESOURCES=true TRACE_PERIOD=300 -EFS_SESSION_MOB_PATTERN=%(sessionId)s/dom.mob -EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +EFS_SESSION_MOB_PATTERN=%(sessionId)s +EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)sdevtools SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs From 5e616f679d3fd78304e09d67c1db9b04ceb78904 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 16 Nov 2022 18:28:34 +0100 Subject: [PATCH 06/70] fix(ui) - info-circle icon --- .../components/EventRow/EventRow.tsx | 6 +++--- frontend/app/components/ui/SVG.tsx | 2 +- frontend/app/svg/icons/info-circle.svg | 15 ++++----------- 3 files changed, 8 insertions(+), 15 deletions(-) diff --git a/frontend/app/components/Session_/OverviewPanel/components/EventRow/EventRow.tsx b/frontend/app/components/Session_/OverviewPanel/components/EventRow/EventRow.tsx index c5545fbb6..27046bfa3 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/EventRow/EventRow.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/EventRow/EventRow.tsx @@ -46,7 +46,7 @@ const EventRow = React.memo((props: Props) => { > {title}
- {message ? : null} + {message ? : null}
{isGraph ? ( @@ -78,9 +78,9 @@ const EventRow = React.memo((props: Props) => { export default EventRow; -function RowInfo({ message, zIndex }: any) { +function RowInfo({ message }: any) { return ( - + ); diff --git a/frontend/app/components/ui/SVG.tsx b/frontend/app/components/ui/SVG.tsx index 58e73ad61..50a451917 100644 --- a/frontend/app/components/ui/SVG.tsx +++ b/frontend/app/components/ui/SVG.tsx @@ -256,7 +256,7 @@ const SVG = (props: Props) => { case 'id-card': return ; case 'image': return ; case 'info-circle-fill': return ; - case 'info-circle': return ; + case 'info-circle': return ; case 'info-square': return ; case 'info': return ; case 'inspect': return ; diff --git a/frontend/app/svg/icons/info-circle.svg b/frontend/app/svg/icons/info-circle.svg index 035661835..42dc99c8f 100644 --- a/frontend/app/svg/icons/info-circle.svg +++ b/frontend/app/svg/icons/info-circle.svg @@ -1,11 +1,4 @@ - - - - - - - - - - - + + + + \ No newline at end of file From 9542dcad93cac8fe33a16c6b57b61d7d9eb23381 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 17 Nov 2022 12:43:00 +0100 Subject: [PATCH 07/70] feat(chalice): fixed sourcemaps process non js --- api/chalicelib/core/sourcemaps.py | 1 - 1 file changed, 1 deletion(-) diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index 921649d97..597b260d3 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -100,7 +100,6 @@ def get_traces_group(project_id, payload): and not (file_url[:params_idx] if params_idx > -1 else file_url).endswith(".js"): print(f"{u['absPath']} sourcemap is not a JS file") payloads[key] = None - continue if key not in payloads: file_exists_in_bucket = len(file_url) > 0 and s3.exists(config('sourcemaps_bucket'), key) From 84d4de65a3d8cf09472cff2efa09d2cd44eca54f Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 17 Nov 2022 13:38:22 +0100 Subject: [PATCH 08/70] ci(fix): fixing ee build tag Signed-off-by: rjshrjndrn --- .github/workflows/workers-ee.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/workers-ee.yaml b/.github/workflows/workers-ee.yaml index 35580b5a9..3cc9282bc 100644 --- a/.github/workflows/workers-ee.yaml +++ b/.github/workflows/workers-ee.yaml @@ -94,7 +94,7 @@ jobs: for image in $(cat /tmp/images_to_build.txt); do echo "Bulding $image" - PUSH_IMAGE=0 bash -x ./build.sh skip $image + PUSH_IMAGE=0 bash -x ./build.sh ee $image [[ "x$skip_security_checks" == "xtrue" ]] || { curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ ./trivy image --exit-code 1 --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG @@ -105,7 +105,7 @@ jobs: } && { echo "Skipping Security Checks" } - PUSH_IMAGE=1 bash -x ./build.sh skip $image + PUSH_IMAGE=1 bash -x ./build.sh ee $image echo "::set-output name=image::$DOCKER_REPO/$image:$IMAGE_TAG" done From 91709ac90981916cdcbc61a93852ffb34348417a Mon Sep 17 00:00:00 2001 From: Alexander Date: Thu, 17 Nov 2022 16:15:25 +0100 Subject: [PATCH 09/70] [Sink] splitting mob files without folders (#824) * feat(backend): split mob file into 2 without folders --- backend/cmd/sink/main.go | 41 ++--- backend/internal/config/storage/config.go | 2 +- backend/internal/sink/oswriter/oswriter.go | 170 ++++++++++----------- backend/internal/storage/storage.go | 65 ++++---- 4 files changed, 130 insertions(+), 148 deletions(-) diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go index d3cc99e40..675d965c9 100644 --- a/backend/cmd/sink/main.go +++ b/backend/cmd/sink/main.go @@ -3,10 +3,8 @@ package main import ( "context" "log" - "openreplay/backend/pkg/pprof" "os" "os/signal" - "strings" "syscall" "time" @@ -16,6 +14,7 @@ import ( "openreplay/backend/internal/storage" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/url/assets" ) @@ -64,6 +63,9 @@ func main() { if err := producer.Produce(cfg.TopicTrigger, msg.SessionID(), msg.Encode()); err != nil { log.Printf("can't send SessionEnd to trigger topic: %s; sessID: %d", err, msg.SessionID()) } + if err := writer.Close(msg.SessionID()); err != nil { + log.Printf("can't close session file: %s", err) + } return } @@ -98,39 +100,18 @@ func main() { // Write encoded message with index to session file data := msg.EncodeWithIndex() if data == nil { - log.Printf("can't encode with index, err: %s", err) return } - wasWritten := false // To avoid timestamp duplicates in original mob file + + // Write message to file if messages.IsDOMType(msg.TypeID()) { if err := writer.WriteDOM(msg.SessionID(), data); err != nil { - if strings.Contains(err.Error(), "not a directory") { - // Trying to write data to mob file by original path - oldErr := writer.WriteMOB(msg.SessionID(), data) - if oldErr != nil { - log.Printf("MOB Writeer error: %s, prev DOM error: %s, info: %s", oldErr, err, msg.Meta().Batch().Info()) - } else { - wasWritten = true - } - } else { - log.Printf("DOM Writer error: %s, info: %s", err, msg.Meta().Batch().Info()) - } + log.Printf("Writer error: %v\n", err) } } if !messages.IsDOMType(msg.TypeID()) || msg.TypeID() == messages.MsgTimestamp { - // TODO: write only necessary timestamps if err := writer.WriteDEV(msg.SessionID(), data); err != nil { - if strings.Contains(err.Error(), "not a directory") { - if !wasWritten { - // Trying to write data to mob file by original path - oldErr := writer.WriteMOB(msg.SessionID(), data) - if oldErr != nil { - log.Printf("MOB Writeer error: %s, prev DEV error: %s, info: %s", oldErr, err, msg.Meta().Batch().Info()) - } - } - } else { - log.Printf("Devtools Writer error: %s, info: %s", err, msg.Meta().Batch().Info()) - } + log.Printf("Writer error: %v\n", err) } } @@ -167,13 +148,17 @@ func main() { consumer.Close() os.Exit(0) case <-tick: + counter.Print() + s := time.Now() if err := writer.SyncAll(); err != nil { log.Fatalf("sync error: %v\n", err) } - counter.Print() + dur := time.Now().Sub(s).Milliseconds() + s = time.Now() if err := consumer.Commit(); err != nil { log.Printf("can't commit messages: %s", err) } + log.Printf("sync: %d, commit: %d, writer: %s", dur, time.Now().Sub(s).Milliseconds(), writer.Info()) default: err := consumer.ConsumeNext() if err != nil { diff --git a/backend/internal/config/storage/config.go b/backend/internal/config/storage/config.go index fdf29b7db..6083f0249 100644 --- a/backend/internal/config/storage/config.go +++ b/backend/internal/config/storage/config.go @@ -11,7 +11,6 @@ type Config struct { S3Region string `env:"AWS_REGION_WEB,required"` S3Bucket string `env:"S3_BUCKET_WEB,required"` FSDir string `env:"FS_DIR,required"` - FSCleanHRS int `env:"FS_CLEAN_HRS,required"` FileSplitSize int `env:"FILE_SPLIT_SIZE,required"` RetryTimeout time.Duration `env:"RETRY_TIMEOUT,default=2m"` GroupStorage string `env:"GROUP_STORAGE,required"` @@ -21,6 +20,7 @@ type Config struct { DeleteTimeout time.Duration `env:"DELETE_TIMEOUT,default=48h"` ProducerCloseTimeout int `env:"PRODUCER_CLOSE_TIMEOUT,default=15000"` UseFailover bool `env:"USE_FAILOVER,default=false"` + MaxFileSize int64 `env:"MAX_FILE_SIZE,default=524288000"` } func New() *Config { diff --git a/backend/internal/sink/oswriter/oswriter.go b/backend/internal/sink/oswriter/oswriter.go index 070540b1d..ec42d7668 100644 --- a/backend/internal/sink/oswriter/oswriter.go +++ b/backend/internal/sink/oswriter/oswriter.go @@ -1,38 +1,53 @@ package oswriter import ( - "errors" - "log" + "fmt" "math" "os" - "path/filepath" "strconv" "time" ) +type FileType int + +const ( + DOM FileType = 1 + DEV FileType = 2 +) + type Writer struct { - ulimit int - dir string - files map[string]*os.File - atimes map[string]int64 + ulimit int + dir string + files map[uint64]*os.File + devtools map[uint64]*os.File + atimes map[uint64]int64 } func NewWriter(ulimit uint16, dir string) *Writer { return &Writer{ - ulimit: int(ulimit), - dir: dir + "/", - files: make(map[string]*os.File), - atimes: make(map[string]int64), + ulimit: int(ulimit), + dir: dir + "/", + files: make(map[uint64]*os.File, 1024), + devtools: make(map[uint64]*os.File, 1024), + atimes: make(map[uint64]int64, 1024), } } -func (w *Writer) open(fname string) (*os.File, error) { - file, ok := w.files[fname] - if ok { - return file, nil +func (w *Writer) open(key uint64, mode FileType) (*os.File, error) { + if mode == DOM { + file, ok := w.files[key] + if ok { + return file, nil + } + } else { + file, ok := w.devtools[key] + if ok { + return file, nil + } } - if len(w.atimes) == w.ulimit { - var m_k string + + if len(w.atimes) >= w.ulimit { + var m_k uint64 var m_t int64 = math.MaxInt64 for k, t := range w.atimes { if t < m_t { @@ -40,38 +55,30 @@ func (w *Writer) open(fname string) (*os.File, error) { m_t = t } } - if err := w.close(m_k); err != nil { + if err := w.Close(m_k); err != nil { return nil, err } } - - // mkdir if not exist - pathTo := w.dir + filepath.Dir(fname) - if info, err := os.Stat(pathTo); os.IsNotExist(err) { - if err := os.MkdirAll(pathTo, 0755); err != nil { - log.Printf("os.MkdirAll error: %s", err) - } - } else { - if err != nil { - return nil, err - } - if !info.IsDir() { - return nil, errors.New("not a directory") - } + filePath := w.dir + strconv.FormatUint(key, 10) + if mode == DEV { + filePath += "devtools" } - - file, err := os.OpenFile(w.dir+fname, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) + file, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) if err != nil { - log.Printf("os.OpenFile error: %s", err) return nil, err } - w.files[fname] = file - w.atimes[fname] = time.Now().Unix() + if mode == DOM { + w.files[key] = file + } else { + w.devtools[key] = file + } + w.atimes[key] = time.Now().Unix() return file, nil } -func (w *Writer) close(fname string) error { - file := w.files[fname] +func (w *Writer) Close(key uint64) error { + // Close dom file + file := w.files[key] if file == nil { return nil } @@ -81,23 +88,33 @@ func (w *Writer) close(fname string) error { if err := file.Close(); err != nil { return err } - delete(w.files, fname) - delete(w.atimes, fname) + delete(w.files, key) + delete(w.atimes, key) + // Close dev file + file = w.devtools[key] + if file == nil { + return nil + } + if err := file.Sync(); err != nil { + return err + } + if err := file.Close(); err != nil { + return err + } + delete(w.devtools, key) return nil } -func (w *Writer) WriteDOM(sid uint64, data []byte) error { - return w.write(strconv.FormatUint(sid, 10)+"/dom.mob", data) +func (w *Writer) WriteDOM(key uint64, data []byte) error { + return w.Write(key, DOM, data) } -func (w *Writer) WriteDEV(sid uint64, data []byte) error { - return w.write(strconv.FormatUint(sid, 10)+"/devtools.mob", data) +func (w *Writer) WriteDEV(key uint64, data []byte) error { + return w.Write(key, DEV, data) } -func (w *Writer) WriteMOB(sid uint64, data []byte) error { - // Use session id as a file name without directory - fname := strconv.FormatUint(sid, 10) - file, err := w.openWithoutDir(fname) +func (w *Writer) Write(key uint64, mode FileType, data []byte) error { + file, err := w.open(key, mode) if err != nil { return err } @@ -105,49 +122,17 @@ func (w *Writer) WriteMOB(sid uint64, data []byte) error { return err } -func (w *Writer) write(fname string, data []byte) error { - file, err := w.open(fname) - if err != nil { - return err - } - _, err = file.Write(data) - return err -} - -func (w *Writer) openWithoutDir(fname string) (*os.File, error) { - file, ok := w.files[fname] - if ok { - return file, nil - } - if len(w.atimes) == w.ulimit { - var m_k string - var m_t int64 = math.MaxInt64 - for k, t := range w.atimes { - if t < m_t { - m_k = k - m_t = t - } - } - if err := w.close(m_k); err != nil { - return nil, err - } - } - - file, err := os.OpenFile(w.dir+fname, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) - if err != nil { - return nil, err - } - w.files[fname] = file - w.atimes[fname] = time.Now().Unix() - return file, nil -} - func (w *Writer) SyncAll() error { for _, file := range w.files { if err := file.Sync(); err != nil { return err } } + for _, file := range w.devtools { + if err := file.Sync(); err != nil { + return err + } + } return nil } @@ -161,6 +146,19 @@ func (w *Writer) CloseAll() error { } } w.files = nil + for _, file := range w.devtools { + if err := file.Sync(); err != nil { + return err + } + if err := file.Close(); err != nil { + return err + } + } + w.devtools = nil w.atimes = nil return nil } + +func (w *Writer) Info() string { + return fmt.Sprintf("dom: %d, dev: %d", len(w.files), len(w.devtools)) +} diff --git a/backend/internal/storage/storage.go b/backend/internal/storage/storage.go index 7fdc06c4f..9959cc4dd 100644 --- a/backend/internal/storage/storage.go +++ b/backend/internal/storage/storage.go @@ -13,7 +13,6 @@ import ( "openreplay/backend/pkg/storage" "os" "strconv" - "strings" "time" ) @@ -71,43 +70,46 @@ func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Stor } func (s *Storage) UploadSessionFiles(msg *messages.SessionEnd) error { - sessionDir := strconv.FormatUint(msg.SessionID(), 10) - if err := s.uploadKey(msg.SessionID(), sessionDir+"/dom.mob", true, 5, msg.EncryptionKey); err != nil { - oldErr := s.uploadKey(msg.SessionID(), sessionDir, true, 5, msg.EncryptionKey) - if oldErr != nil { - return fmt.Errorf("upload file error: %s. failed checking mob file using old path: %s", err, oldErr) - } - // Exit method anyway because we don't have dev tools separation in prev version - return nil - } - if err := s.uploadKey(msg.SessionID(), sessionDir+"/devtools.mob", false, 4, msg.EncryptionKey); err != nil { + if err := s.uploadKey(msg.SessionID(), "/dom.mob", true, 5, msg.EncryptionKey); err != nil { return err } + if err := s.uploadKey(msg.SessionID(), "/devtools.mob", false, 4, msg.EncryptionKey); err != nil { + log.Printf("can't find devtools for session: %d, err: %s", msg.SessionID(), err) + } return nil } -// TODO: make a bit cleaner -func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCount int, encryptionKey string) error { +// TODO: make a bit cleaner. +// TODO: Of course, I'll do! +func (s *Storage) uploadKey(sessID uint64, suffix string, shouldSplit bool, retryCount int, encryptionKey string) error { if retryCount <= 0 { return nil } - start := time.Now() - file, err := os.Open(s.cfg.FSDir + "/" + key) + fileName := strconv.FormatUint(sessID, 10) + mobFileName := fileName + if suffix == "/devtools.mob" { + mobFileName += "devtools" + } + filePath := s.cfg.FSDir + "/" + mobFileName + + // Check file size before download into memory + info, err := os.Stat(filePath) + if err != nil { + if info.Size() > s.cfg.MaxFileSize { + log.Printf("big file, size: %d, session: %d", info.Size(), sessID) + return nil + } + } + file, err := os.Open(filePath) if err != nil { return fmt.Errorf("File open error: %v; sessID: %s, part: %d, sessStart: %s\n", - err, key, sessID%16, + err, fileName, sessID%16, time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))), ) } defer file.Close() - // Ignore "s" at the end of mob file name for "old" sessions - newVers := false - if strings.Contains(key, "/") { - newVers = true - } - var fileSize int64 = 0 fileInfo, err := file.Stat() if err != nil { @@ -117,17 +119,18 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo } var encryptedData []byte + fileName += suffix if shouldSplit { nRead, err := file.Read(s.startBytes) if err != nil { log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s", err, - key, + fileName, sessID%16, time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))), ) time.AfterFunc(s.cfg.RetryTimeout, func() { - s.uploadKey(sessID, key, shouldSplit, retryCount-1, encryptionKey) + s.uploadKey(sessID, suffix, shouldSplit, retryCount-1, encryptionKey) }) return nil } @@ -146,11 +149,7 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo } // Compress and save to s3 startReader := bytes.NewBuffer(encryptedData) - startKey := key - if newVers { - startKey += "s" - } - if err := s.s3.Upload(s.gzipFile(startReader), startKey, "application/octet-stream", true); err != nil { + if err := s.s3.Upload(s.gzipFile(startReader), fileName+"s", "application/octet-stream", true); err != nil { log.Fatalf("Storage: start upload failed. %v\n", err) } // TODO: fix possible error (if we read less then FileSplitSize) @@ -161,7 +160,7 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo if err != nil { log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s", err, - key, + fileName, sessID%16, time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))), ) @@ -183,7 +182,7 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo } // Compress and save to s3 endReader := bytes.NewBuffer(encryptedData) - if err := s.s3.Upload(s.gzipFile(endReader), key+"e", "application/octet-stream", true); err != nil { + if err := s.s3.Upload(s.gzipFile(endReader), fileName+"e", "application/octet-stream", true); err != nil { log.Fatalf("Storage: end upload failed. %v\n", err) } } @@ -195,7 +194,7 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo if err != nil { log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s", err, - key, + fileName, sessID%16, time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))), ) @@ -216,7 +215,7 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo encryptedData = fileData } endReader := bytes.NewBuffer(encryptedData) - if err := s.s3.Upload(s.gzipFile(endReader), key+"s", "application/octet-stream", true); err != nil { + if err := s.s3.Upload(s.gzipFile(endReader), fileName, "application/octet-stream", true); err != nil { log.Fatalf("Storage: end upload failed. %v\n", err) } s.archivingTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds())) From 1e17c5d2f91293dcdd7a5b6c82187b06fcd7a706 Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Thu, 17 Nov 2022 16:48:15 +0100 Subject: [PATCH 10/70] fix(backend):hash payload to ID only in js-sourced errors --- backend/pkg/db/types/error-event.go | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/backend/pkg/db/types/error-event.go b/backend/pkg/db/types/error-event.go index 826cbba9e..bef9abd99 100644 --- a/backend/pkg/db/types/error-event.go +++ b/backend/pkg/db/types/error-event.go @@ -11,6 +11,8 @@ import ( . "openreplay/backend/pkg/messages" ) +const SOURCE_JS = "js_exception" + type ErrorEvent struct { MessageID uint64 Timestamp uint64 @@ -64,7 +66,7 @@ func WrapJSException(m *JSException) *ErrorEvent { return &ErrorEvent{ MessageID: m.Meta().Index, Timestamp: uint64(m.Meta().Timestamp), - Source: "js_exception", + Source: SOURCE_JS, Name: m.Name, Message: m.Message, Payload: m.Payload, @@ -105,14 +107,16 @@ func (e *ErrorEvent) ID(projectID uint32) string { hash.Write([]byte(e.Source)) hash.Write([]byte(e.Name)) hash.Write([]byte(e.Message)) - frame, err := parseFirstFrame(e.Payload) - if err != nil { - log.Printf("Can't parse stackframe ((( %v ))): %v", e.Payload, err) - } - if frame != nil { - hash.Write([]byte(frame.FileName)) - hash.Write([]byte(strconv.Itoa(frame.LineNo))) - hash.Write([]byte(strconv.Itoa(frame.ColNo))) + if e.Source == SOURCE_JS { + frame, err := parseFirstFrame(e.Payload) + if err != nil { + log.Printf("Can't parse stackframe ((( %v ))): %v", e.Payload, err) + } + if frame != nil { + hash.Write([]byte(frame.FileName)) + hash.Write([]byte(strconv.Itoa(frame.LineNo))) + hash.Write([]byte(strconv.Itoa(frame.ColNo))) + } } return strconv.FormatUint(uint64(projectID), 16) + hex.EncodeToString(hash.Sum(nil)) } From 4865ee42d1fcb43826e1c0dd568a1d475296d1ce Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Thu, 17 Nov 2022 17:10:37 +0100 Subject: [PATCH 11/70] feat(backend): updated golang text lib --- backend/go.mod | 4 ++-- backend/go.sum | 6 ++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/backend/go.mod b/backend/go.mod index 0eead389c..b1046b08e 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -69,8 +69,8 @@ require ( golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect - golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a // indirect - golang.org/x/text v0.3.7 // indirect + golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect + golang.org/x/text v0.4.0 // indirect golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd // indirect diff --git a/backend/go.sum b/backend/go.sum index dbaee7216..fea2aa1a3 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -678,8 +678,9 @@ golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a h1:dGzPydgVsqGcTRVwiLJ1jVbufYwmzD3LfVPLKsKg+0k= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -690,8 +691,9 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= From 78a1f27984d53d3906e00be97b09d4d2657b9b17 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 17 Nov 2022 17:17:02 +0100 Subject: [PATCH 12/70] feat(chalice): changed EFS patterns --- api/env.default | 2 +- ee/api/env.default | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/env.default b/api/env.default index 55aefdc35..3ee65e89c 100644 --- a/api/env.default +++ b/api/env.default @@ -46,7 +46,7 @@ EFS_SESSION_MOB_PATTERN=%(sessionId)s EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)sdevtools SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe -DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs +DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= diff --git a/ee/api/env.default b/ee/api/env.default index 7fd61f4a0..f5574a8a1 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -65,7 +65,7 @@ EFS_SESSION_MOB_PATTERN=%(sessionId)s EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)sdevtools SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe -DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mobs +DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= \ No newline at end of file From 20f5a6e589ed0bb16d240b31ed38fc88c93394bb Mon Sep 17 00:00:00 2001 From: sylenien Date: Thu, 17 Nov 2022 18:45:21 +0100 Subject: [PATCH 13/70] change(ui): remove some calls in storage comp --- .../components/Session_/Storage/Storage.js | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/frontend/app/components/Session_/Storage/Storage.js b/frontend/app/components/Session_/Storage/Storage.js index ac9a0ec48..add061001 100644 --- a/frontend/app/components/Session_/Storage/Storage.js +++ b/frontend/app/components/Session_/Storage/Storage.js @@ -48,7 +48,6 @@ function getActionsName(type) { //@withEnumToggle('activeTab', 'setActiveTab', DIFF) export default class Storage extends React.PureComponent { lastBtnRef = React.createRef(); - state = { showDiffs: false }; focusNextButton() { if (this.lastBtnRef.current) { @@ -130,8 +129,8 @@ export default class Storage extends React.PureComponent { return ; } - renderItem(item, i, prevItem) { - const { type, listNow, list } = this.props; + renderItem(item, i, prevItem, listNowLen, listLen) { + const { type } = this.props; let src; let name; @@ -154,10 +153,6 @@ export default class Storage extends React.PureComponent { name = item.mutation.join(''); } - if (src !== null && !this.state.showDiffs) { - this.setState({ showDiffs: true }) - } - return (
{formatMs(item.duration)}
)}
- {i + 1 < listNow.length && ( + {i + 1 < listNowLen && ( )} - {i + 1 === listNow.length && i + 1 < list.length && ( + {i + 1 === listNowLen && i + 1 < listLen && ( @@ -205,13 +200,15 @@ export default class Storage extends React.PureComponent { const { type, listNow, list, hintIsHidden } = this.props; const showStore = type !== STORAGE_TYPES.MOBX; + const listNowLen = listNow.length + const listLen = list.length return ( {list.length > 0 && (
{showStore &&

{'STATE'}

} - {this.state.showDiffs ? ( + {type !== STORAGE_TYPES.ZUSTAND ? (

DIFFS

@@ -307,7 +304,7 @@ export default class Storage extends React.PureComponent {
{listNow.map((item, i) => - this.renderItem(item, i, i > 0 ? listNow[i - 1] : undefined) + this.renderItem(item, i, i > 0 ? listNow[i - 1] : undefined, listNowLen, listLen) )}
From 2746781d9bbe15e6eb9cd9cb3c4df804dd594b2d Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 17 Nov 2022 18:46:54 +0100 Subject: [PATCH 14/70] ci(actions): Nothing to build is not an error. Signed-off-by: rjshrjndrn --- .github/workflows/workers-ee.yaml | 2 +- .github/workflows/workers.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/workers-ee.yaml b/.github/workflows/workers-ee.yaml index 3cc9282bc..b2b202e93 100644 --- a/.github/workflows/workers-ee.yaml +++ b/.github/workflows/workers-ee.yaml @@ -86,7 +86,7 @@ jobs: ;; esac - [[ $(cat /tmp/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 1) + [[ $(cat /tmp/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) # # Pushing image to registry # diff --git a/.github/workflows/workers.yaml b/.github/workflows/workers.yaml index 341a196ad..4283adef2 100644 --- a/.github/workflows/workers.yaml +++ b/.github/workflows/workers.yaml @@ -86,7 +86,7 @@ jobs: ;; esac - [[ $(cat /tmp/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 1) + [[ $(cat /tmp/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) # # Pushing image to registry # From 4e96bda49b83c6f376eb5141301c8643cb92c5bd Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 17 Nov 2022 18:12:49 +0100 Subject: [PATCH 15/70] change(ui) - network replace 0b with x --- .../components/shared/DevTools/NetworkPanel/NetworkPanel.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx index 6cb6b66bc..e72b7170e 100644 --- a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx +++ b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx @@ -102,7 +102,7 @@ function renderSize(r: any) { if (r.responseBodySize) return formatBytes(r.responseBodySize); let triggerText; let content; - if (r.decodedBodySize == null) { + if (r.decodedBodySize == null || r.decodedBodySize === 0) { triggerText = 'x'; content = 'Not captured'; } else { From bd6fd210f555185a3b3b8870adb19438c21d59eb Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 17 Nov 2022 18:41:04 +0100 Subject: [PATCH 16/70] change(ui) - network details error value --- .../components/FetchBasicDetails/FetchBasicDetails.tsx | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx b/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx index 6c81a744c..3e3811b27 100644 --- a/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx +++ b/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx @@ -1,8 +1,7 @@ import React from 'react'; -import { Icon } from 'UI'; import { formatBytes } from 'App/utils'; import CopyText from 'Shared/CopyText'; -import { TYPES } from 'Types/session/resource'; +import cn from 'classnames'; interface Props { resource: any; @@ -46,7 +45,7 @@ function FetchBasicDetails({ resource }: Props) { {resource.status && (
Status
-
+
{resource.status === '200' && (
)} From 986bfaa65ea676ae269f5ac5c142efd255932ccb Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 11:27:33 +0100 Subject: [PATCH 17/70] fix(ui) - funnel handle NaN --- frontend/app/mstore/types/funnelStage.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/app/mstore/types/funnelStage.ts b/frontend/app/mstore/types/funnelStage.ts index 85cbb7a64..7d79bb402 100644 --- a/frontend/app/mstore/types/funnelStage.ts +++ b/frontend/app/mstore/types/funnelStage.ts @@ -24,10 +24,11 @@ export default class FunnelStage { } fromJSON(json: any, total: number = 0, previousSessionCount: number = 0) { - this.dropDueToIssues = json.dropDueToIssues; + previousSessionCount = previousSessionCount || 0; + this.dropDueToIssues = json.dropDueToIssues || 0; this.dropPct = json.dropPct; this.operator = json.operator; - this.sessionsCount = json.sessionsCount; + this.sessionsCount = json.sessionsCount || 0; this.usersCount = json.usersCount; this.value = json.value; this.type = json.type; From 0d358b13d9e002482637b5b64f99198b09dc9961 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 12:11:42 +0100 Subject: [PATCH 18/70] fix(ui) - metrics x axis tick formating --- frontend/app/mstore/types/widget.ts | 2 +- frontend/app/types/dashboard/helper.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/mstore/types/widget.ts b/frontend/app/mstore/types/widget.ts index c43cf76e2..68e2813f4 100644 --- a/frontend/app/mstore/types/widget.ts +++ b/frontend/app/mstore/types/widget.ts @@ -6,7 +6,7 @@ import Session from "App/mstore/types/session"; import Funnelissue from 'App/mstore/types/funnelIssue'; import { issueOptions } from 'App/constants/filterOptions'; import { FilterKey } from 'Types/filter/filterType'; -import Period, { LAST_24_HOURS, LAST_30_DAYS } from 'Types/app/period'; +import Period, { LAST_24_HOURS } from 'Types/app/period'; export default class Widget { public static get ID_KEY():string { return "metricId" } diff --git a/frontend/app/types/dashboard/helper.js b/frontend/app/types/dashboard/helper.js index 05e50d757..f6e819da8 100644 --- a/frontend/app/types/dashboard/helper.js +++ b/frontend/app/types/dashboard/helper.js @@ -27,7 +27,7 @@ const weekdays = [ 'Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat' ]; // const months = [ "January", "February" ]; export const getTimeString = (ts, period) => { const date = new Date(ts); - const diff = period.end - period.start; + const diff = period.endTimestamp - period.startTimestamp; if (diff <= DAY) { var isPM = date.getHours() >= 12; return `${ isPM ? date.getHours() - 12 : date.getHours() }:${ startWithZero(date.getMinutes()) } ${isPM? 'pm' : 'am'}`; From 4943cfd75a58f1f1b7517ae39613c6f219f5e16b Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 12:24:55 +0100 Subject: [PATCH 19/70] fix(ui) - console log underline --- .../app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx b/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx index 145f14640..f52be65cd 100644 --- a/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx +++ b/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx @@ -1,6 +1,5 @@ import React, { useState } from 'react'; import cn from 'classnames'; -// import stl from '../console.module.css'; import { Icon } from 'UI'; import JumpButton from 'Shared/DevTools/JumpButton'; import { useModal } from 'App/components/Modal'; @@ -32,7 +31,8 @@ function ConsoleRow(props: Props) { info: !log.isYellow() && !log.isRed(), warn: log.isYellow(), error: log.isRed(), - 'cursor-pointer underline decoration-dotted decoration-gray-200': clickable, + 'cursor-pointer': clickable, + 'cursor-pointer underline decoration-dotted decoration-gray-200': !!log.errorId, } )} onClick={ From d8056a3490e5fd55497637392130d9860c6e51e8 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 12:29:56 +0100 Subject: [PATCH 20/70] change(ui) - alert notification --- .../app/components/Dashboard/components/Alerts/NewAlert.tsx | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx index 0ea012e71..6027646f7 100644 --- a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx @@ -122,6 +122,9 @@ const NewAlert = (props: IProps) => { ) { remove(instance.alertId).then(() => { props.history.push(withSiteId(alerts(), siteId)); + toast.success('Alert deleted'); + }).catch(() => { + toast.error('Failed to delete an alert'); }); } }; @@ -135,6 +138,8 @@ const NewAlert = (props: IProps) => { } else { toast.success('Alert updated'); } + }).catch(() => { + toast.error('Failed to create an alert'); }); }; From 42d97bef131ecfeceec7c2c6b704cc3d295ad038 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 18 Nov 2022 13:04:32 +0100 Subject: [PATCH 21/70] feat(chalice): vault support old format and new format feat(chalice): vault support devtools --- api/chalicelib/core/sessions_mobs.py | 25 +++++------ ee/api/chalicelib/core/sessions_favorite.py | 49 ++++++++++----------- ee/api/chalicelib/utils/s3_extra.py | 10 +++-- 3 files changed, 41 insertions(+), 43 deletions(-) diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 3d966a47c..e2b22dc1b 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -15,6 +15,10 @@ def __get_mob_keys(project_id, session_id): ] +def __get_mob_keys_deprecated(session_id): + return [str(session_id), str(session_id) + "e"] + + def get_urls(project_id, session_id): results = [] for k in __get_mob_keys(project_id=project_id, session_id=session_id): @@ -27,23 +31,14 @@ def get_urls(project_id, session_id): def get_urls_depercated(session_id): - return [ - client.generate_presigned_url( + results = [] + for k in __get_mob_keys_deprecated(session_id=session_id): + results.append(client.generate_presigned_url( 'get_object', - Params={ - 'Bucket': config("sessions_bucket"), - 'Key': str(session_id) - }, + Params={'Bucket': config("sessions_bucket"), 'Key': k}, ExpiresIn=100000 - ), - client.generate_presigned_url( - 'get_object', - Params={ - 'Bucket': config("sessions_bucket"), - 'Key': str(session_id) + "e" - }, - ExpiresIn=100000 - )] + )) + return results def get_ios(session_id): diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index 3d6496424..7af995bad 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -1,7 +1,7 @@ from decouple import config import schemas_ee -from chalicelib.core import sessions, sessions_favorite_exp +from chalicelib.core import sessions, sessions_favorite_exp, sessions_mobs, sessions_devtool from chalicelib.utils import pg_client, s3_extra @@ -34,32 +34,31 @@ def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, sess def favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): + keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id) + keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions + keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id) + if favorite_session_exists(user_id=context.user_id, session_id=session_id): - key = str(session_id) - try: - s3_extra.tag_file(session_id=key, tag_value=config('RETENTION_D_VALUE', default='default')) - except Exception as e: - print(f"!!!Error while tagging: {key} to default") - print(str(e)) - key = str(session_id) + "e" - try: - s3_extra.tag_file(session_id=key, tag_value=config('RETENTION_D_VALUE', default='default')) - except Exception as e: - print(f"!!!Error while tagging: {key} to default") - print(str(e)) + tag = config('RETENTION_D_VALUE', default='default') + + for k in keys: + try: + s3_extra.tag_session(file_key=k, tag_value=tag) + except Exception as e: + print(f"!!!Error while tagging: {k} to {tag} for removal") + print(str(e)) + return remove_favorite_session(context=context, project_id=project_id, session_id=session_id) - key = str(session_id) - try: - s3_extra.tag_file(session_id=key, tag_value=config('RETENTION_L_VALUE', default='vault')) - except Exception as e: - print(f"!!!Error while tagging: {key} to vault") - print(str(e)) - key = str(session_id) + "e" - try: - s3_extra.tag_file(session_id=key, tag_value=config('RETENTION_L_VALUE', default='vault')) - except Exception as e: - print(f"!!!Error while tagging: {key} to vault") - print(str(e)) + + tag = config('RETENTION_L_VALUE', default='vault') + + for k in keys: + try: + s3_extra.tag_session(file_key=k, tag_value=tag) + except Exception as e: + print(f"!!!Error while tagging: {k} to {tag} for vault") + print(str(e)) + return add_favorite_session(context=context, project_id=project_id, session_id=session_id) diff --git a/ee/api/chalicelib/utils/s3_extra.py b/ee/api/chalicelib/utils/s3_extra.py index f2a538dcc..4ebf60fec 100644 --- a/ee/api/chalicelib/utils/s3_extra.py +++ b/ee/api/chalicelib/utils/s3_extra.py @@ -3,10 +3,14 @@ from decouple import config from chalicelib.utils.s3 import client -def tag_file(session_id, tag_key='retention', tag_value='vault'): +def tag_session(file_key, tag_key='retention', tag_value='vault'): + return tag_file(file_key=file_key, bucket=config("sessions_bucket"), tag_key=tag_key, tag_value=tag_value) + + +def tag_file(file_key, bucket, tag_key, tag_value): return client.put_object_tagging( - Bucket=config("sessions_bucket"), - Key=session_id, + Bucket=bucket, + Key=file_key, Tagging={ 'TagSet': [ { From b7e6a237e4f697875b96a0b9b9e9f6562d3de4b5 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 18 Nov 2022 13:14:44 +0100 Subject: [PATCH 22/70] feat(chalice): refactored s3 client --- api/chalicelib/core/sessions_devtool.py | 4 ++-- api/chalicelib/core/sessions_mobs.py | 7 +++---- ee/api/chalicelib/core/sessions_devtool.py | 4 ++-- ee/api/chalicelib/utils/s3_extra.py | 4 ++-- 4 files changed, 9 insertions(+), 10 deletions(-) diff --git a/api/chalicelib/core/sessions_devtool.py b/api/chalicelib/core/sessions_devtool.py index eef7b8e6b..2afc3c366 100644 --- a/api/chalicelib/core/sessions_devtool.py +++ b/api/chalicelib/core/sessions_devtool.py @@ -1,6 +1,6 @@ from decouple import config -from chalicelib.utils.s3 import client +from chalicelib.utils import s3 def __get_devtools_keys(project_id, session_id): @@ -16,7 +16,7 @@ def __get_devtools_keys(project_id, session_id): def get_urls(session_id, project_id): results = [] for k in __get_devtools_keys(project_id=project_id, session_id=session_id): - results.append(client.generate_presigned_url( + results.append(s3.client.generate_presigned_url( 'get_object', Params={'Bucket': config("sessions_bucket"), 'Key': k}, ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900) diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index e2b22dc1b..9a9237be8 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -1,7 +1,6 @@ from decouple import config from chalicelib.utils import s3 -from chalicelib.utils.s3 import client def __get_mob_keys(project_id, session_id): @@ -22,7 +21,7 @@ def __get_mob_keys_deprecated(session_id): def get_urls(project_id, session_id): results = [] for k in __get_mob_keys(project_id=project_id, session_id=session_id): - results.append(client.generate_presigned_url( + results.append(s3.client.generate_presigned_url( 'get_object', Params={'Bucket': config("sessions_bucket"), 'Key': k}, ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900) @@ -33,7 +32,7 @@ def get_urls(project_id, session_id): def get_urls_depercated(session_id): results = [] for k in __get_mob_keys_deprecated(session_id=session_id): - results.append(client.generate_presigned_url( + results.append(s3.client.generate_presigned_url( 'get_object', Params={'Bucket': config("sessions_bucket"), 'Key': k}, ExpiresIn=100000 @@ -42,7 +41,7 @@ def get_urls_depercated(session_id): def get_ios(session_id): - return client.generate_presigned_url( + return s3.client.generate_presigned_url( 'get_object', Params={ 'Bucket': config("ios_bucket"), diff --git a/ee/api/chalicelib/core/sessions_devtool.py b/ee/api/chalicelib/core/sessions_devtool.py index ed6ecf694..9435c2e24 100644 --- a/ee/api/chalicelib/core/sessions_devtool.py +++ b/ee/api/chalicelib/core/sessions_devtool.py @@ -3,7 +3,7 @@ from fastapi.security import SecurityScopes import schemas_ee from chalicelib.core import permissions -from chalicelib.utils.s3 import client +from chalicelib.utils import s3 SCOPES = SecurityScopes([schemas_ee.Permissions.dev_tools]) @@ -23,7 +23,7 @@ def get_urls(session_id, project_id, context: schemas_ee.CurrentContext): return [] results = [] for k in __get_devtools_keys(project_id=project_id, session_id=session_id): - results.append(client.generate_presigned_url( + results.append(s3.client.generate_presigned_url( 'get_object', Params={'Bucket': config("sessions_bucket"), 'Key': k}, ExpiresIn=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900) diff --git a/ee/api/chalicelib/utils/s3_extra.py b/ee/api/chalicelib/utils/s3_extra.py index 4ebf60fec..0e594c890 100644 --- a/ee/api/chalicelib/utils/s3_extra.py +++ b/ee/api/chalicelib/utils/s3_extra.py @@ -1,6 +1,6 @@ from decouple import config -from chalicelib.utils.s3 import client +from chalicelib.utils import s3 def tag_session(file_key, tag_key='retention', tag_value='vault'): @@ -8,7 +8,7 @@ def tag_session(file_key, tag_key='retention', tag_value='vault'): def tag_file(file_key, bucket, tag_key, tag_value): - return client.put_object_tagging( + return s3.client.put_object_tagging( Bucket=bucket, Key=file_key, Tagging={ From 605f047e90dcc1271ab5078f9c3e62df95f7fbe4 Mon Sep 17 00:00:00 2001 From: sylenien Date: Fri, 18 Nov 2022 10:18:06 +0100 Subject: [PATCH 23/70] change(ui): add virtual list to storage comp --- .../components/Session_/Storage/Storage.js | 75 ++++++++++++++++--- 1 file changed, 64 insertions(+), 11 deletions(-) diff --git a/frontend/app/components/Session_/Storage/Storage.js b/frontend/app/components/Session_/Storage/Storage.js index add061001..882cb699a 100644 --- a/frontend/app/components/Session_/Storage/Storage.js +++ b/frontend/app/components/Session_/Storage/Storage.js @@ -17,6 +17,7 @@ import BottomBlock from '../BottomBlock/index'; import DiffRow from './DiffRow'; import cn from 'classnames'; import stl from './storage.module.css'; +import { List, CellMeasurer, CellMeasurerCache, AutoSizer } from 'react-virtualized' // const STATE = 'STATE'; // const DIFF = 'DIFF'; @@ -47,7 +48,19 @@ function getActionsName(type) { ) //@withEnumToggle('activeTab', 'setActiveTab', DIFF) export default class Storage extends React.PureComponent { - lastBtnRef = React.createRef(); + constructor(props, ctx) { + super(props, ctx); + + this.lastBtnRef = React.createRef(); + this._list = React.createRef(); + this.cache = new CellMeasurerCache({ + fixedWidth: true, + keyMapper: index => this.props.listNow[index] + }); + this._listNowLen = this.props.listNow.length + this._listLen = this.props.list.length + this._rowRenderer = this._rowRenderer.bind(this) + } focusNextButton() { if (this.lastBtnRef.current) { @@ -62,6 +75,17 @@ export default class Storage extends React.PureComponent { componentDidUpdate(prevProps) { if (prevProps.listNow.length !== this.props.listNow.length) { this.focusNextButton(); + const newRows = this.props.listNow.filter(evt => prevProps.listNow.indexOf(evt.id) < 0); + console.log(newRows, this.props.listNow) + if (newRows.length > 0) { + const newRowsIndexes = newRows.map(r => this.props.listNow.indexOf(r)) + + newRowsIndexes.forEach(ind => this.cache.clean(ind)) + this._list.recomputeRowHeights([...newRowsIndexes]) + } + + this._listNowLen = this.props.listNow.length + this._listLen = this.props.list.length } } @@ -129,7 +153,7 @@ export default class Storage extends React.PureComponent { return ; } - renderItem(item, i, prevItem, listNowLen, listLen) { + renderItem(item, i, prevItem, style) { const { type } = this.props; let src; let name; @@ -155,8 +179,10 @@ export default class Storage extends React.PureComponent { return (
this._list.recomputeRowHeights(i)} > {src === null ? (
@@ -180,12 +206,12 @@ export default class Storage extends React.PureComponent {
{formatMs(item.duration)}
)}
- {i + 1 < listNowLen && ( + {i + 1 < this._listNowLen && ( )} - {i + 1 === listNowLen && i + 1 < listLen && ( + {i + 1 === this._listNowLen && i + 1 < this._listLen && ( @@ -196,12 +222,28 @@ export default class Storage extends React.PureComponent { ); } + _rowRenderer({index, key, parent, style}) { + // listNow.map((item, i) => + // this.renderItem(item, i, i > 0 ? listNow[i - 1] : undefined, listNowLen, listLen) + // ) + const { listNow } = this.props; + return ( + + {this.renderItem(listNow[index], index, index > 0 ? listNow[index - 1] : undefined, style)} + + ) + } + render() { const { type, listNow, list, hintIsHidden } = this.props; const showStore = type !== STORAGE_TYPES.MOBX; - const listNowLen = listNow.length - const listLen = list.length return ( @@ -302,11 +344,22 @@ export default class Storage extends React.PureComponent {
)}
- - {listNow.map((item, i) => - this.renderItem(item, i, i > 0 ? listNow[i - 1] : undefined, listNowLen, listLen) - )} - + + {({ height, width }) => ( + { + this._list = element; + }} + deferredMeasurementCache={this.cache} + overscanRowCount={2} + rowCount={this._listNowLen} + rowHeight={this.cache.rowHeight} + rowRenderer={this._rowRenderer} + width={width} + height={height} + /> + )} +
From a111dc95e96016d605969df4ef688eeb33d5ec86 Mon Sep 17 00:00:00 2001 From: sylenien Date: Fri, 18 Nov 2022 14:34:00 +0100 Subject: [PATCH 24/70] change(ui): add unified row height to state tab, add virt to console tab --- .../Console/ConsoleRow/ConsoleRow.tsx | 7 +- .../components/Session_/Storage/DiffRow.tsx | 14 +++- .../components/Session_/Storage/Storage.js | 75 ++++++++++--------- .../DevTools/ConsolePanel/ConsolePanel.tsx | 59 ++++++++++++--- .../shared/DevTools/ConsoleRow/ConsoleRow.tsx | 14 +++- .../app/components/ui/JSONTree/JSONTree.js | 4 +- 6 files changed, 116 insertions(+), 57 deletions(-) diff --git a/frontend/app/components/Session_/Console/ConsoleRow/ConsoleRow.tsx b/frontend/app/components/Session_/Console/ConsoleRow/ConsoleRow.tsx index c87ff3f9c..85457d6b1 100644 --- a/frontend/app/components/Session_/Console/ConsoleRow/ConsoleRow.tsx +++ b/frontend/app/components/Session_/Console/ConsoleRow/ConsoleRow.tsx @@ -9,12 +9,14 @@ interface Props { iconProps: any; jump?: any; renderWithNL?: any; + style?: any; } function ConsoleRow(props: Props) { - const { log, iconProps, jump, renderWithNL } = props; + const { log, iconProps, jump, renderWithNL, style } = props; const [expanded, setExpanded] = useState(false); const lines = log.value.split('\n').filter((l: any) => !!l); const canExpand = lines.length > 1; + return (
setExpanded(!expanded)} >
@@ -38,7 +41,7 @@ function ConsoleRow(props: Props) { )} {renderWithNL(lines.pop())}
- {canExpand && expanded && lines.map((l: any) =>
{l}
)} + {canExpand && expanded && lines.map((l: any, i: number) =>
{l}
)}
jump(log.time)} />
diff --git a/frontend/app/components/Session_/Storage/DiffRow.tsx b/frontend/app/components/Session_/Storage/DiffRow.tsx index 3ecb4f615..4e6c936a7 100644 --- a/frontend/app/components/Session_/Storage/DiffRow.tsx +++ b/frontend/app/components/Session_/Storage/DiffRow.tsx @@ -63,17 +63,27 @@ function DiffRow({ diff, path }: Props) { )} > {oldValueSafe || 'undefined'} + {diffLengths[0] > 50 + ? ( +
setShortenOldVal(!shortenOldVal)} className="cursor-pointer px-1 text-white bg-gray-light rounded text-sm w-fit"> + {!shortenOldVal ? 'collapse' : 'expand'} +
+ ) : null} {' -> '} setShortenNewVal(!shortenNewVal)} className={cn( 'whitespace-pre', newValue ? 'text-red' : 'text-green', - diffLengths[1] > 50 ? 'cursor-pointer' : '' )} > {newValueSafe || 'undefined'} + {diffLengths[1] > 50 + ? ( +
setShortenNewVal(!shortenNewVal)} className="cursor-pointer px-1 text-white bg-gray-light rounded text-sm w-fit"> + {!shortenNewVal ? 'collapse' : 'expand'} +
+ ) : null}
); diff --git a/frontend/app/components/Session_/Storage/Storage.js b/frontend/app/components/Session_/Storage/Storage.js index 882cb699a..20fa1c703 100644 --- a/frontend/app/components/Session_/Storage/Storage.js +++ b/frontend/app/components/Session_/Storage/Storage.js @@ -12,7 +12,6 @@ import { JSONTree, NoContent, Tooltip } from 'UI'; import { formatMs } from 'App/date'; import { diff } from 'deep-diff'; import { jump } from 'Player'; -import Autoscroll from '../Autoscroll'; import BottomBlock from '../BottomBlock/index'; import DiffRow from './DiffRow'; import cn from 'classnames'; @@ -22,6 +21,7 @@ import { List, CellMeasurer, CellMeasurerCache, AutoSizer } from 'react-virtuali // const STATE = 'STATE'; // const DIFF = 'DIFF'; // const TABS = [ DIFF, STATE ].map(tab => ({ text: tab, key: tab })); +const ROW_HEIGHT = 90; function getActionsName(type) { switch (type) { @@ -48,8 +48,8 @@ function getActionsName(type) { ) //@withEnumToggle('activeTab', 'setActiveTab', DIFF) export default class Storage extends React.PureComponent { - constructor(props, ctx) { - super(props, ctx); + constructor(props) { + super(props); this.lastBtnRef = React.createRef(); this._list = React.createRef(); @@ -57,8 +57,6 @@ export default class Storage extends React.PureComponent { fixedWidth: true, keyMapper: index => this.props.listNow[index] }); - this._listNowLen = this.props.listNow.length - this._listLen = this.props.list.length this._rowRenderer = this._rowRenderer.bind(this) } @@ -72,27 +70,25 @@ export default class Storage extends React.PureComponent { this.focusNextButton(); } - componentDidUpdate(prevProps) { + componentDidUpdate(prevProps, prevState) { if (prevProps.listNow.length !== this.props.listNow.length) { this.focusNextButton(); - const newRows = this.props.listNow.filter(evt => prevProps.listNow.indexOf(evt.id) < 0); - console.log(newRows, this.props.listNow) - if (newRows.length > 0) { - const newRowsIndexes = newRows.map(r => this.props.listNow.indexOf(r)) - - newRowsIndexes.forEach(ind => this.cache.clean(ind)) - this._list.recomputeRowHeights([...newRowsIndexes]) - } - - this._listNowLen = this.props.listNow.length - this._listLen = this.props.list.length + /** possible performance gain, but does not work with dynamic list insertion for some reason + * getting NaN offsets, maybe I detect changed rows wrongly + */ + // const newRows = this.props.listNow.filter(evt => prevProps.listNow.indexOf(evt._index) < 0); + // if (newRows.length > 0) { + // const newRowsIndexes = newRows.map(r => this.props.listNow.indexOf(r)) + // newRowsIndexes.forEach(ind => this.cache.clear(ind)) + // this._list.recomputeRowHeights(newRowsIndexes) + // } } } renderDiff(item, prevItem) { if (!prevItem) { // we don't have state before first action - return
; + return
; } const stateDiff = diff(prevItem.state, item.state); @@ -106,7 +102,7 @@ export default class Storage extends React.PureComponent { } return ( -
+
{stateDiff.map((d, i) => this.renderDiffs(d, i))}
); @@ -114,6 +110,7 @@ export default class Storage extends React.PureComponent { renderDiffs(diff, i) { const path = this.createPath(diff); + return ( @@ -153,7 +150,7 @@ export default class Storage extends React.PureComponent { return ; } - renderItem(item, i, prevItem, style) { + renderItem(item, i, prevItem, style, measure) { const { type } = this.props; let src; let name; @@ -179,10 +176,10 @@ export default class Storage extends React.PureComponent { return (
this._list.recomputeRowHeights(i)} + // onClick={() => {measure(); this._list.recomputeRowHeights(i)}} > {src === null ? (
@@ -190,13 +187,14 @@ export default class Storage extends React.PureComponent {
) : ( <> - {this.renderDiff(item, prevItem)} -
+ {this.renderDiff(item, prevItem, i)} +
console.log('test')} />
@@ -206,12 +204,12 @@ export default class Storage extends React.PureComponent {
{formatMs(item.duration)}
)}
- {i + 1 < this._listNowLen && ( + {i + 1 < this.props.listNow.length && ( )} - {i + 1 === this._listNowLen && i + 1 < this._listLen && ( + {i + 1 === this.props.listNow.length && i + 1 < this.props.list.length && ( @@ -227,15 +225,18 @@ export default class Storage extends React.PureComponent { // this.renderItem(item, i, i > 0 ? listNow[i - 1] : undefined, listNowLen, listLen) // ) const { listNow } = this.props; + + if (!listNow[index]) return console.warn(index, listNow) + return ( - {this.renderItem(listNow[index], index, index > 0 ? listNow[index - 1] : undefined, style)} + {({ measure }) => this.renderItem(listNow[index], index, index > 0 ? listNow[index - 1] : undefined, style, measure)} ) } @@ -345,20 +346,20 @@ export default class Storage extends React.PureComponent { )}
- {({ height, width }) => ( + {({ height, width }) => ( { this._list = element; }} deferredMeasurementCache={this.cache} - overscanRowCount={2} - rowCount={this._listNowLen} - rowHeight={this.cache.rowHeight} + overscanRowCount={1} + rowCount={Math.ceil(parseInt(this.props.listNow.length) || 1)} + rowHeight={ROW_HEIGHT} rowRenderer={this._rowRenderer} width={width} height={height} /> - )} + )}
diff --git a/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx b/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx index 361084221..320f76341 100644 --- a/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx +++ b/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx @@ -8,6 +8,12 @@ import { Tabs, Input, Icon, NoContent } from 'UI'; import cn from 'classnames'; import ConsoleRow from '../ConsoleRow'; import { getRE } from 'App/utils'; +import { + List, + CellMeasurer, + CellMeasurerCache, + AutoSizer, +} from 'react-virtualized'; const ALL = 'ALL'; const INFO = 'INFO'; @@ -62,6 +68,34 @@ function ConsolePanel(props: Props) { const [activeTab, setActiveTab] = useState(ALL); const [filter, setFilter] = useState(''); + const cache = new CellMeasurerCache({ + fixedWidth: true, + keyMapper: (index: number) => filtered[index], + }); + const _list = React.useRef(); + + const _rowRenderer = ({ index, key, parent, style }: any) => { + const item = filtered[index]; + + return ( + + {({ measure }: any) => ( + { + measure(); + (_list as any).current.recomputeRowHeights(index); + }} + /> + )} + + ); + }; + let filtered = React.useMemo(() => { const filterRE = getRE(filter, 'i'); let list = logs; @@ -105,17 +139,20 @@ function ConsolePanel(props: Props) { size="small" show={filtered.length === 0} > - {/* */} - {filtered.map((l: any, index: any) => ( - - ))} - {/* */} + + {({ height, width }: any) => ( + + )} + diff --git a/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx b/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx index f52be65cd..aae911d42 100644 --- a/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx +++ b/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx @@ -10,9 +10,11 @@ interface Props { iconProps: any; jump?: any; renderWithNL?: any; + style?: any; + recalcHeight?: () => void; } function ConsoleRow(props: Props) { - const { log, iconProps, jump, renderWithNL } = props; + const { log, iconProps, jump, renderWithNL, style, recalcHeight } = props; const { showModal } = useModal(); const [expanded, setExpanded] = useState(false); const lines = log.value.split('\n').filter((l: any) => !!l); @@ -23,8 +25,14 @@ function ConsoleRow(props: Props) { const onErrorClick = () => { showModal(, { right: true }); }; + + const toggleExpand = () => { + setExpanded(!expanded) + setTimeout(() => recalcHeight(), 0) + } return (
(!!log.errorId ? onErrorClick() : setExpanded(!expanded)) : () => {} + clickable ? () => (!!log.errorId ? onErrorClick() : toggleExpand()) : () => {} } >
@@ -49,7 +57,7 @@ function ConsoleRow(props: Props) { )} {renderWithNL(lines.pop())}
- {canExpand && expanded && lines.map((l: any) =>
{l}
)} + {canExpand && expanded && lines.map((l: string, i: number) =>
{l}
)}
jump(log.time)} />
diff --git a/frontend/app/components/ui/JSONTree/JSONTree.js b/frontend/app/components/ui/JSONTree/JSONTree.js index dc6ab786c..b94324ebd 100644 --- a/frontend/app/components/ui/JSONTree/JSONTree.js +++ b/frontend/app/components/ui/JSONTree/JSONTree.js @@ -8,7 +8,7 @@ function updateObjectLink(obj) { } export default ({ src, ...props }) => ( - ( iconStle="triangle" { ...props } /> -); \ No newline at end of file +); From a2b2cd6f8694a44ff224accad2658757fcc24105 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 18 Nov 2022 15:49:14 +0100 Subject: [PATCH 25/70] feat(alerts): format values for notification --- api/chalicelib/core/alerts_processor.py | 15 +++++++++++++-- ee/api/chalicelib/core/alerts_processor.py | 15 +++++++++++++-- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py index 2ed9105b2..76ae5c615 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts_processor.py @@ -199,7 +199,8 @@ def process(): logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}") notifications.append(generate_notification(alert, result)) except Exception as e: - logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}") + logging.error( + f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}") logging.error(query) logging.error(e) cur = cur.recreate(rollback=True) @@ -212,12 +213,22 @@ def process(): alerts.process_notifications(notifications) +def __format_value(x): + if x % 1 == 0: + x = int(x) + else: + x = round(x, 2) + return f"{x:,}" + + def generate_notification(alert, result): + left = __format_value(result['value']) + right = __format_value(alert['query']['right']) return { "alertId": alert["alertId"], "tenantId": alert["tenantId"], "title": alert["name"], - "description": f"has been triggered, {alert['query']['left']} = {round(result['value'], 2)} ({alert['query']['operator']} {alert['query']['right']}).", + "description": f"has been triggered, {alert['query']['left']} = {left} ({alert['query']['operator']} {right}).", "buttonText": "Check metrics for more details", "buttonUrl": f"/{alert['projectId']}/metrics", "imageUrl": None, diff --git a/ee/api/chalicelib/core/alerts_processor.py b/ee/api/chalicelib/core/alerts_processor.py index 087f23a05..326d17ffc 100644 --- a/ee/api/chalicelib/core/alerts_processor.py +++ b/ee/api/chalicelib/core/alerts_processor.py @@ -204,7 +204,8 @@ def process(): logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}") notifications.append(generate_notification(alert, result)) except Exception as e: - logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}") + logging.error( + f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}") logging.error(query) logging.error(e) cur = cur.recreate(rollback=True) @@ -217,12 +218,22 @@ def process(): alerts.process_notifications(notifications) +def __format_value(x): + if x % 1 == 0: + x = int(x) + else: + x = round(x, 2) + return f"{x:,}" + + def generate_notification(alert, result): + left = __format_value(result['value']) + right = __format_value(alert['query']['right']) return { "alertId": alert["alertId"], "tenantId": alert["tenantId"], "title": alert["name"], - "description": f"has been triggered, {alert['query']['left']} = {round(result['value'], 2)} ({alert['query']['operator']} {alert['query']['right']}).", + "description": f"has been triggered, {alert['query']['left']} = {left} ({alert['query']['operator']} {right}).", "buttonText": "Check metrics for more details", "buttonUrl": f"/{alert['projectId']}/metrics", "imageUrl": None, From 287ba8163c018133140eeb2236a1e882894e2456 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 18 Nov 2022 16:23:25 +0100 Subject: [PATCH 26/70] feat(chalice): ignore more html-sourcemaps --- api/chalicelib/core/sourcemaps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index 597b260d3..89df77926 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -77,7 +77,7 @@ def format_payload(p, truncate_to_first=False): def url_exists(url): try: r = requests.head(url, allow_redirects=False) - return r.status_code == 200 and r.headers.get("Content-Type") != "text/html" + return r.status_code == 200 and "text/html" not in r.headers.get("Content-Type", "") except Exception as e: print(f"!! Issue checking if URL exists: {url}") print(e) From f31ae3efa075cd434cfeda848cbcce96253a29da Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Fri, 18 Nov 2022 16:52:38 +0100 Subject: [PATCH 27/70] refactor(frontend/player):player lists --- .../app/player/MessageDistributor/Lists.ts | 83 +++++++++++++++---- .../MessageDistributor/MessageDistributor.ts | 71 ++++++---------- .../MessageDistributor/managers/ListWalker.ts | 25 +++++- .../managers/ListWalkerWithMarks.ts | 42 ++++++++++ 4 files changed, 158 insertions(+), 63 deletions(-) create mode 100644 frontend/app/player/MessageDistributor/managers/ListWalkerWithMarks.ts diff --git a/frontend/app/player/MessageDistributor/Lists.ts b/frontend/app/player/MessageDistributor/Lists.ts index cb7e4d192..bae8d46de 100644 --- a/frontend/app/player/MessageDistributor/Lists.ts +++ b/frontend/app/player/MessageDistributor/Lists.ts @@ -1,23 +1,76 @@ -import type { Message } from './messages' import ListWalker from './managers/ListWalker'; +import ListWalkerWithMarks from './managers/ListWalkerWithMarks'; -export const LIST_NAMES = ["redux", "mobx", "vuex", "zustand", "ngrx", "graphql", "exceptions", "profiles"] as const; +import type { Message } from './messages' -export const INITIAL_STATE = {} -LIST_NAMES.forEach(name => { - INITIAL_STATE[`${name}ListNow`] = [] - INITIAL_STATE[`${name}List`] = [] -}) +const SIMPLE_LIST_NAMES = [ "event", "redux", "mobx", "vuex", "zustand", "ngrx", "graphql", "exceptions", "profiles"] as const; +const MARKED_LIST_NAMES = [ "log", "resource", "fetch", "stack" ] as const; +//const entityNamesSimple = [ "event", "profile" ]; +const LIST_NAMES = [...SIMPLE_LIST_NAMES, ...MARKED_LIST_NAMES ]; -type ListsObject = { - [key in typeof LIST_NAMES[number]]: ListWalker -} +// TODO: provide correct types -export function initLists(): ListsObject { - const lists: Partial = {}; - for (var i = 0; i < LIST_NAMES.length; i++) { - lists[LIST_NAMES[i]] = new ListWalker(); +export const INITIAL_STATE = LIST_NAMES.reduce((state, name) => { + state[`${name}List`] = [] + state[`${name}ListNow`] = [] + if (MARKED_LIST_NAMES.includes(name)) { + state[`${name}MarkedCountNow`] = 0 + state[`${name}MarkedCount`] = 0 } - return lists as ListsObject; + return state +}, {}) + + +type SimpleListsObject = { + [key in typeof SIMPLE_LIST_NAMES[number]]: ListWalker } +type MarkedListsObject = { + [key in typeof MARKED_LIST_NAMES[number]]: ListWalkerWithMarks +} +type ListsObject = SimpleListsObject & MarkedListsObject + +type InitialLists = { + [key in typeof LIST_NAMES[number]]: any[] +} + +export default class Lists { + lists: ListsObject + constructor(initialLists: Partial = {}) { + const lists: Partial = {} + for (const name of SIMPLE_LIST_NAMES) { + lists[name] = new ListWalker(initialLists[name]) + } + for (const name of MARKED_LIST_NAMES) { + // TODO: provide types + lists[name] = new ListWalkerWithMarks((el) => el.isRed(), initialLists[name]) + } + this.lists = lists as ListsObject + } + + getFullListsState() { + return LIST_NAMES.reduce((state, name) => { + state[`${name}List`] = this.lists[name].list + return state + }, MARKED_LIST_NAMES.reduce((state, name) => { + state[`${name}MarkedCount`] = this.lists[name].markedCount + return state + }, {}) + ) + } + + moveGetState(t: number)/* : Partial */ { + return LIST_NAMES.reduce((state, name) => { + const lastMsg = this.lists[name].moveGetLast(t) // index: name === 'exceptions' ? undefined : index); + if (lastMsg != null) { + state[`${name}ListNow`] = this.lists[name].listNow + } + return state + }, MARKED_LIST_NAMES.reduce((state, name) => { + state[`${name}MarkedCountNow`] = this.lists[name].markedCountNow + return state + }, {}) + ); + } + +} \ No newline at end of file diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.ts b/frontend/app/player/MessageDistributor/MessageDistributor.ts index 85b01e6a3..a80676a61 100644 --- a/frontend/app/player/MessageDistributor/MessageDistributor.ts +++ b/frontend/app/player/MessageDistributor/MessageDistributor.ts @@ -9,12 +9,6 @@ import Log from 'Types/session/log'; import { update } from '../store'; import { toast } from 'react-toastify'; -import { - init as initListsDepr, - append as listAppend, - setStartTime as setListsStartTime -} from '../lists'; - import StatedScreen from './StatedScreen/StatedScreen'; import ListWalker from './managers/ListWalker'; @@ -32,7 +26,7 @@ import { decryptSessionBytes } from './network/crypto'; import { INITIAL_STATE as SUPER_INITIAL_STATE, State as SuperState } from './StatedScreen/StatedScreen'; import { INITIAL_STATE as ASSIST_INITIAL_STATE, State as AssistState } from './managers/AssistManager'; -import { INITIAL_STATE as LISTS_INITIAL_STATE , LIST_NAMES, initLists } from './Lists'; +import Lists, { INITIAL_STATE as LISTS_INITIAL_STATE } from './Lists'; import type { PerformanceChartPoint } from './managers/PerformanceTrackManager'; import type { SkipInterval } from './managers/ActivityManager'; @@ -100,7 +94,7 @@ export default class MessageDistributor extends StatedScreen { private scrollManager: ListWalker = new ListWalker(); private readonly decoder = new Decoder(); - private readonly lists = initLists(); + private readonly lists: Lists; private activityManager: ActivityManager | null = null; @@ -118,28 +112,27 @@ export default class MessageDistributor extends StatedScreen { this.sessionStart = this.session.startedAt; if (live) { - initListsDepr({}) + this.lists = new Lists() this.assistManager.connect(this.session.agentToken); } else { this.activityManager = new ActivityManager(this.session.duration.milliseconds); /* == REFACTOR_ME == */ - const eventList = this.session.events.toJSON(); - - initListsDepr({ - event: eventList, - stack: this.session.stackEvents.toJSON(), - resource: this.session.resources.toJSON(), - }); - + const eventList = session.events.toJSON(); // TODO: fix types for events, remove immutable js eventList.forEach((e: Record) => { if (e.type === EVENT_TYPES.LOCATION) { //TODO type system this.locationEventManager.append(e); } - }); - this.session.errors.forEach((e: Record) => { - this.lists.exceptions.append(e); - }); + }) + + this.lists = new Lists({ + event: eventList, + stack: session.stackEvents.toJSON(), + resource: session.resources.toJSON(), + exceptions: session.errors.toJSON(), + }) + + /* === */ this.loadMessages(); } @@ -187,13 +180,11 @@ export default class MessageDistributor extends StatedScreen { private waitingForFiles: boolean = false private onFileReadSuccess = () => { - const stateToUpdate: {[key:string]: any} = { + const stateToUpdate = { performanceChartData: this.performanceTrackManager.chartData, performanceAvaliability: this.performanceTrackManager.avaliability, + ...this.lists.getFullListsState() } - LIST_NAMES.forEach(key => { - stateToUpdate[ `${ key }List` ] = this.lists[ key ].list - }) if (this.activityManager) { this.activityManager.end() stateToUpdate.skipIntervals = this.activityManager.list @@ -304,7 +295,6 @@ export default class MessageDistributor extends StatedScreen { /* == REFACTOR_ME == */ const lastLoadedLocationMsg = this.loadedLocationManager.moveGetLast(t, index); if (!!lastLoadedLocationMsg) { - setListsStartTime(lastLoadedLocationMsg.time) this.navigationStartOffset = lastLoadedLocationMsg.navigationStart - this.sessionStart; } const llEvent = this.locationEventManager.moveGetLast(t, index); @@ -340,14 +330,7 @@ export default class MessageDistributor extends StatedScreen { stateToUpdate.performanceChartTime = lastPerformanceTrackMessage.time; } - LIST_NAMES.forEach(key => { - const lastMsg = this.lists[key].moveGetLast(t, key === 'exceptions' ? undefined : index); - if (lastMsg != null) { - // @ts-ignore TODO: fix types - stateToUpdate[`${key}ListNow`] = this.lists[key].listNow; - } - }); - + Object.assign(stateToUpdate, this.lists.moveGetState(t)) Object.keys(stateToUpdate).length > 0 && update(stateToUpdate); /* Sequence of the managers is important here */ @@ -414,15 +397,15 @@ export default class MessageDistributor extends StatedScreen { /* Lists: */ case "console_log": if (msg.level === 'debug') break; - listAppend("log", Log({ + this.lists.lists.log.append(Log({ level: msg.level, value: msg.value, time, index, - })); + })) break; case "fetch": - listAppend("fetch", Resource({ + this.lists.lists.fetch.append(Resource({ method: msg.method, url: msg.url, payload: msg.request, @@ -469,42 +452,42 @@ export default class MessageDistributor extends StatedScreen { decoded = this.decodeStateMessage(msg, ["state", "action"]); logger.log('redux', decoded) if (decoded != null) { - this.lists.redux.append(decoded); + this.lists.lists.redux.append(decoded); } break; case "ng_rx": decoded = this.decodeStateMessage(msg, ["state", "action"]); logger.log('ngrx', decoded) if (decoded != null) { - this.lists.ngrx.append(decoded); + this.lists.lists.ngrx.append(decoded); } break; case "vuex": decoded = this.decodeStateMessage(msg, ["state", "mutation"]); logger.log('vuex', decoded) if (decoded != null) { - this.lists.vuex.append(decoded); + this.lists.lists.vuex.append(decoded); } break; case "zustand": decoded = this.decodeStateMessage(msg, ["state", "mutation"]) logger.log('zustand', decoded) if (decoded != null) { - this.lists.zustand.append(decoded) + this.lists.lists.zustand.append(decoded) } case "mob_x": decoded = this.decodeStateMessage(msg, ["payload"]); logger.log('mobx', decoded) if (decoded != null) { - this.lists.mobx.append(decoded); + this.lists.lists.mobx.append(decoded); } break; case "graph_ql": - this.lists.graphql.append(msg); + this.lists.lists.graphql.append(msg); break; case "profiler": - this.lists.profiles.append(msg); + this.lists.lists.profiles.append(msg); break; default: switch (msg.tp) { diff --git a/frontend/app/player/MessageDistributor/managers/ListWalker.ts b/frontend/app/player/MessageDistributor/managers/ListWalker.ts index e04c5bb83..c0d59c354 100644 --- a/frontend/app/player/MessageDistributor/managers/ListWalker.ts +++ b/frontend/app/player/MessageDistributor/managers/ListWalker.ts @@ -79,6 +79,23 @@ export default class ListWalker { return this.p; } + private hasNext() { + return this.p < this.length + } + private hasPrev() { + return this.p > 0 + } + protected moveNext(): T | null { + return this.hasNext() + ? this.list[ this.p++ ] + : null + } + protected movePrev(): T | null { + return this.hasPrev() + ? this.list[ --this.p ] + : null + } + /* Returns last message with the time <= t. Assumed that the current message is already handled so @@ -94,11 +111,11 @@ export default class ListWalker { let changed = false; while (this.p < this.length && this.list[this.p][key] <= val) { - this.p++; + this.moveNext() changed = true; } while (this.p > 0 && this.list[ this.p - 1 ][key] > val) { - this.p--; + this.movePrev() changed = true; } return changed ? this.list[ this.p - 1 ] : null; @@ -112,10 +129,10 @@ export default class ListWalker { const list = this.list while (list[this.p] && list[this.p].time <= t) { - fn(list[ this.p++ ]); + fn(this.moveNext()) } while (fnBack && this.p > 0 && list[ this.p - 1 ].time > t) { - fnBack(list[ --this.p ]); + fnBack(this.movePrev()); } } diff --git a/frontend/app/player/MessageDistributor/managers/ListWalkerWithMarks.ts b/frontend/app/player/MessageDistributor/managers/ListWalkerWithMarks.ts new file mode 100644 index 000000000..d2f2ccee3 --- /dev/null +++ b/frontend/app/player/MessageDistributor/managers/ListWalkerWithMarks.ts @@ -0,0 +1,42 @@ +import type { Timed } from '../messages/timed'; +import ListWalker from './ListWalker' + + +type CheckFn = (t: T) => boolean + + +export default class ListWalkerWithMarks extends ListWalker { + private _markCountNow: number = 0 + private _markCount: number = 0 + constructor(private isMarked: CheckFn, initialList: T[] = []) { + super(initialList) + this._markCount = initialList.reduce((n, item) => isMarked(item) ? n+1 : n, 0) + } + + append(item: T) { + if (this.isMarked(item)) { this._markCount++ } + super.append(item) + } + + protected moveNext() { + const val = super.moveNext() + if (val && this.isMarked(val)) { + this._markCountNow++ + } + return val + } + protected movePrev() { + const val = super.movePrev() + if (val && this.isMarked(val)) { + this._markCountNow-- + } + return val + } + get markedCountNow(): number { + return this._markCountNow + } + get markedCount(): number { + return this._markCount + } + +} \ No newline at end of file From 695a414caf2d1624b30fc13f97bae089a5792cb1 Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Fri, 18 Nov 2022 16:55:26 +0100 Subject: [PATCH 28/70] fixup! refactor(frontend/player):player lists --- frontend/app/player/Player.ts | 2 - frontend/app/player/lists/ListReader.js | 124 ------------------ .../app/player/lists/ListReaderWithRed.js | 48 ------- frontend/app/player/lists/index.js | 68 ---------- frontend/app/player/singletone.js | 2 - frontend/app/player/store/duck.js | 2 - 6 files changed, 246 deletions(-) delete mode 100644 frontend/app/player/lists/ListReader.js delete mode 100644 frontend/app/player/lists/ListReaderWithRed.js delete mode 100644 frontend/app/player/lists/index.js diff --git a/frontend/app/player/Player.ts b/frontend/app/player/Player.ts index 320b141ec..369f854ea 100644 --- a/frontend/app/player/Player.ts +++ b/frontend/app/player/Player.ts @@ -1,4 +1,3 @@ -import { goTo as listsGoTo } from './lists'; import { update, getState } from './store'; import MessageDistributor, { INITIAL_STATE as SUPER_INITIAL_STATE } from './MessageDistributor/MessageDistributor'; import { Note } from 'App/services/NotesService'; @@ -68,7 +67,6 @@ export default class Player extends MessageDistributor { completed: false, }); super.move(time, index); - listsGoTo(time, index); } private _startAnimation() { diff --git a/frontend/app/player/lists/ListReader.js b/frontend/app/player/lists/ListReader.js deleted file mode 100644 index 641d3341a..000000000 --- a/frontend/app/player/lists/ListReader.js +++ /dev/null @@ -1,124 +0,0 @@ -export default class ListReader { - _callback; - _p = -1; - _list = []; - _offset = 0; - - constructor(callback = Function.prototype) { - if (typeof callback !== 'function') { - return console.error("List Reader: wrong constructor argument. `callback` must be a function."); - } - this._callback = callback; - } - - static checkItem(item) { - if(typeof item !== 'object' || item === null) { - console.error("List Reader: expected item to be not null object but got ", item); - return false; - } - if (typeof item.time !== 'number') { - console.error("List Reader: expected item to have number property 'time', ", item); - return false; - } - // if (typeof item.index !== 'number') { - // console.error("List Reader: expected item to have number property 'index', ", item); - // return false; - // } // future: All will have index - return true; - } - /* EXTENDABLE METHODS */ - _onIncrement() {} - _onDecrement() {} - _onStartTimeChange() {} - - inc() { - const item = this._list[ ++this._p ]; - this._onIncrement(item); - return item; - } - - dec() { - const item = this._list[ this._p-- ]; - this._onDecrement(item); - return item - } - - get _goToReturn() { - return { listNow: this.listNow }; - } - - goTo(time) { - const prevPointer = this._p; - while (!!this._list[ this._p + 1 ] && this._list[ this._p + 1 ].time <= time) { - this.inc(); - } - while (this._p >= 0 && this._list[ this._p ].time > time) { - this.dec(); - } - if (prevPointer !== this._p) { - //this._notify([ "listNow" ]); - return this._goToReturn; - } - } - - goToIndex(index) { // thinkaboutit - const prevPointer = this._p; - while (!!this._list[ this._p + 1 ] && - this._list[ this._p + 1 ].index <= index - ) { - this.inc(); - } - while (this._p >= 0 && this._list[ this._p ].index > index) { - this.dec(); - } - if (prevPointer !== this._p) { - //this._notify([ "listNow" ]); - return this._goToReturn; - } - } - - // happens rare MBTODO only in class ResourceListReader extends ListReaderWithRed - set startTime(time) { - const prevOffset = this._offset; - const prevPointer = this._p; - this._offset = this._list.findIndex(({ time, duration = 0 }) => time + duration >= time); // TODO: strict for duration rrrrr - this._p = Math.max(this._p, this._offset - 1); - if (prevOffset !== this._offset || prevPointer !== this._p) { - this._notify([ "listNow" ]); - } - this._onStartTimeChange(); - } - - get list() { - return this._list; - } - get count() { - return this._list.length; - } - get listNow() { - return this._list.slice(this._offset, this._p + 1); - } - - set list(_list) { - if (!Array.isArray(_list)) { - console.error("List Reader: wrong list value.", _list) - } - const valid = _list.every(this.constructor.checkItem); - if (!valid) return; - this._list = _list; // future: time + index sort - this._notify([ "list", "count" ]); - } - - append(item) { - if (!this.constructor.checkItem(item)) return; - this._list.push(item); // future: time + index sort - this._notify([ "count" ]); // list is the same by ref, CAREFULL - } - - _notify(propertyList) { - const changedState = {}; - propertyList.forEach(p => changedState[ p ] = this[ p ]); - this._callback(changedState); - } - -} \ No newline at end of file diff --git a/frontend/app/player/lists/ListReaderWithRed.js b/frontend/app/player/lists/ListReaderWithRed.js deleted file mode 100644 index 84da42138..000000000 --- a/frontend/app/player/lists/ListReaderWithRed.js +++ /dev/null @@ -1,48 +0,0 @@ -import ListReader from './ListReader'; - -export default class ListReaderWithRed extends ListReader { - _redCountNow = 0; - - static checkItem(item) { - const superCheckResult = super.checkItem(item); - if (typeof item.isRed !== 'function') { - console.error("List Reader With Red: expected item to have method 'isRed', ", item); - return false; - } - return superCheckResult; - } - - get _goToReturn() { - return { - listNow: this.listNow, - redCountNow: this.redCountNow, - } - } - - _onIncrement(item) { - if (item.isRed()) { - this._redCountNow++; - //this._notify([ "redCountNow" ]); - } - } - - _onDecrement(item) { - if (item.isRed()) { - this._redCountNow--; - //this._notify([ "redCountNow" ]); - } - } - - _onStartTimeChange() { - this._redCountNow = this._list - .slice(this._offset, this._p + 1) - .filter(item => item.isRed()) - .length; - this._notify([ "redCountNow" ]); - } - - get redCountNow() { - return this._redCountNow; - } - -} \ No newline at end of file diff --git a/frontend/app/player/lists/index.js b/frontend/app/player/lists/index.js deleted file mode 100644 index edae90b0c..000000000 --- a/frontend/app/player/lists/index.js +++ /dev/null @@ -1,68 +0,0 @@ -import ListReader from './ListReader'; -import ListReaderWithRed from './ListReaderWithRed'; -import { update as updateStore } from '../store'; - -const l = n => `${ n }List`; -const c = n => `${ n }Count`; -const ln = n => `${ n }ListNow`; -const rcn = n => `${ n }RedCountNow`; - -const entityNamesWithRed = [ "log", "resource", "fetch", "stack" ]; -const entityNamesSimple = [ "event", "profile" ]; -const entityNames = /*[ "redux" ].*/entityNamesWithRed.concat(entityNamesSimple); - -const is = {}; -entityNames.forEach(n => { - is[ l(n) ] = []; - is[ c(n) ] = 0; - is[ ln(n) ] = []; - if (entityNamesWithRed.includes(n)) { - is[ rcn(n) ] = 0; - } -}); -//is["reduxState"] = {}; -//is["reduxFinalStates"] = []; - - -const createCallback = n => { - const entityfy = s => `${ n }${ s[ 0 ].toUpperCase() }${ s.slice(1) }`; - return state => { - if (!state) return; - const namedState = {}; - Object.keys(state).forEach(key => { - namedState[ entityfy(key) ] = state[ key ]; - }); - return updateStore(namedState); - } -} - -let readers = null; - -export function init(lists) { - readers = {}; - entityNamesSimple.forEach(n => readers[ n ] = new ListReader(createCallback(n))); - entityNamesWithRed.forEach(n => readers[ n ] = new ListReaderWithRed(createCallback(n))); - - entityNames.forEach(n => readers[ n ].list = lists[ n ] || []); -} -export function append(name, item) { - readers[ name ].append(item); -} -export function setStartTime(time) { - readers.resource.startTime = time; -} -const byTimeNames = [ "event", "stack" ]; // TEMP -const byIndexNames = entityNames.filter(n => !byTimeNames.includes(n)); -export function goTo(time, index) { - if (readers === null) return; - if (typeof index === 'number') { - byTimeNames.forEach(n => readers[ n ] && readers[ n ]._callback(readers[ n ].goTo(time))); - byIndexNames.forEach(n => readers[ n ] && readers[ n ]._callback(readers[ n ].goToIndex(index))); - } else { - entityNames.forEach(n => readers[ n ] && readers[ n ]._callback(readers[ n ].goTo(time))); - } -} -export function clean() { - entityNames.forEach(n => delete readers[ n ]); -} -export const INITIAL_STATE = is; diff --git a/frontend/app/player/singletone.js b/frontend/app/player/singletone.js index feb82ec78..357c76a90 100644 --- a/frontend/app/player/singletone.js +++ b/frontend/app/player/singletone.js @@ -1,6 +1,5 @@ import Player from './Player'; import { update, cleanStore, getState } from './store'; -import { clean as cleanLists } from './lists'; /** @type {Player} */ let instance = null; @@ -49,7 +48,6 @@ export function clean() { if (instance === null) return; instance.clean(); cleanStore(); - cleanLists(); instance = null; } export const jump = initCheck((...args) => instance.jump(...args)); diff --git a/frontend/app/player/store/duck.js b/frontend/app/player/store/duck.js index faf77041c..f57a0ed54 100644 --- a/frontend/app/player/store/duck.js +++ b/frontend/app/player/store/duck.js @@ -1,5 +1,4 @@ import { applyChange, revertChange } from 'deep-diff'; -import { INITIAL_STATE as listsInitialState } from '../lists'; import { INITIAL_STATE as playerInitialState, INITIAL_NON_RESETABLE_STATE as playerInitialNonResetableState } from '../Player'; const UPDATE = 'player/UPDATE'; @@ -7,7 +6,6 @@ const CLEAN = 'player/CLEAN'; const REDUX = 'player/REDUX'; const resetState = { - ...listsInitialState, ...playerInitialState, initialized: false, }; From 4aa80a23b5417bb16b28bf2dfebbb9924c064bbb Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 12:35:36 +0100 Subject: [PATCH 29/70] change(ui) - alert list item number formatting --- .../components/Dashboard/components/Alerts/AlertListItem.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx index aaecc0b14..3e8a68f11 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertListItem.tsx @@ -2,6 +2,7 @@ import React from 'react'; import { Icon } from 'UI'; import { checkForRecent } from 'App/date'; import { withSiteId, alertEdit } from 'App/routes'; +import { numberWithCommas } from 'App/utils'; // @ts-ignore import { DateTime } from 'luxon'; import { withRouter, RouteComponentProps } from 'react-router-dom'; @@ -108,7 +109,7 @@ function AlertListItem(props: Props) { {' is '} {alert.query.operator} - {alert.query.right} {alert.metric.unit} + {numberWithCommas(alert.query.right)} {alert.metric.unit} {' over the past '} {getThreshold(alert.currentPeriod)} From a191d996664e0e8c68c02cddb06505292e50dd7f Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 16:16:59 +0100 Subject: [PATCH 30/70] change(ui) - dashboard text --- .../Dashboard/components/DashboardList/DashboardsView.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/DashboardList/DashboardsView.tsx b/frontend/app/components/Dashboard/components/DashboardList/DashboardsView.tsx index 5341c3487..7378e88f8 100644 --- a/frontend/app/components/Dashboard/components/DashboardList/DashboardsView.tsx +++ b/frontend/app/components/Dashboard/components/DashboardList/DashboardsView.tsx @@ -35,7 +35,7 @@ function DashboardsView({ history, siteId }: { history: any, siteId: string }) {
- A dashboard is a custom visualization using your OpenReplay data. + A Dashboard is a collection of Metrics that can be shared across teams.
From aedd1907268b157592d14095e76ddc3b683c644c Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 16:21:19 +0100 Subject: [PATCH 31/70] change(ui) - metrics text --- .../components/Dashboard/components/MetricsView/MetricsView.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx b/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx index dd87b2fef..6c39114cd 100644 --- a/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx +++ b/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx @@ -30,7 +30,7 @@ function MetricsView({ siteId }: Props) {
- Create custom Metrics to capture key interactions and track KPIs. + Create custom Metrics to capture user frustrations, monitor your app's performance and track other KPIs.
From 4ed8119a24020389eb62448e07b4863278142bb0 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 16:23:50 +0100 Subject: [PATCH 32/70] change(ui) - network error message --- .../components/TimelinePointer/TimelinePointer.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx b/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx index 5c780b4ba..818cdfc4a 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx @@ -47,7 +47,7 @@ const TimelinePointer = React.memo((props: Props) => { - {item.success ? 'Slow resource: ' : 'Missing resource:'} + {item.success ? 'Slow resource: ' : '4xx/5xx Error:'}
{name.length > 200 ? name.slice(0, 100) + ' ... ' + name.slice(-50) From e63a9e79b4b67ecc0b7ec31bf55e64c8c8e97707 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 16:24:17 +0100 Subject: [PATCH 33/70] change(ui) - xray options map key --- .../components/FeatureSelection/FeatureSelection.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx b/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx index c91e2b362..bf4599e10 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/FeatureSelection/FeatureSelection.tsx @@ -30,7 +30,7 @@ function FeatureSelection(props: Props) { const checked = list.includes(feature); const _disabled = disabled && !checked; return ( - + Date: Fri, 18 Nov 2022 16:25:40 +0100 Subject: [PATCH 34/70] change(ui) - red count check --- .../app/components/Session_/Player/Controls/Controls.js | 7 ++++--- .../shared/DevTools/NetworkPanel/NetworkPanel.tsx | 1 - 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/frontend/app/components/Session_/Player/Controls/Controls.js b/frontend/app/components/Session_/Player/Controls/Controls.js index ae829caa7..27a928b6f 100644 --- a/frontend/app/components/Session_/Player/Controls/Controls.js +++ b/frontend/app/components/Session_/Player/Controls/Controls.js @@ -78,8 +78,8 @@ function getStorageName(type) { // logCount: state.logList.length, logRedCount: state.logRedCount, showExceptions: state.exceptionsList.length > 0, - resourceRedCount: state.resourceRedCount, - fetchRedCount: state.fetchRedCount, + resourceRedCount: state.resourceRedCountNow, // TODO missing state.resourceRedCount + fetchRedCount: state.fetchRedCountNow, showStack: state.stackList.length > 0, stackCount: state.stackList.length, stackRedCount: state.stackRedCount, @@ -261,6 +261,7 @@ export default class Controls extends React.Component { logRedCount, showExceptions, resourceRedCount, + fetchRedCount, showStack, stackRedCount, showStorage, @@ -352,7 +353,7 @@ export default class Controls extends React.Component { onClick={() => toggleBottomTools(NETWORK)} active={bottomBlock === NETWORK && !inspectorMode} label="NETWORK" - hasErrors={resourceRedCount > 0} + hasErrors={resourceRedCount > 0 || fetchRedCount > 0} noIcon labelClassName="!text-base font-semibold" containerClassName="mx-2" diff --git a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx index e72b7170e..e62bf25ff 100644 --- a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx +++ b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx @@ -394,7 +394,6 @@ export default connectPlayer((state: any) => ({ fetchList: state.fetchList.map((i: any) => Resource({ ...i.toJS(), type: TYPES.XHR })), domContentLoadedTime: state.domContentLoadedTime, loadTime: state.loadTime, - // time: state.time, playing: state.playing, domBuildingTime: state.domBuildingTime, }))(NetworkPanel); From 9f4cef2f8bc46d31cbdbd0b08fa9d559e763e8ef Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 17:25:41 +0100 Subject: [PATCH 35/70] change(ui) - red count check --- .../app/components/Session_/Player/Controls/Controls.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/frontend/app/components/Session_/Player/Controls/Controls.js b/frontend/app/components/Session_/Player/Controls/Controls.js index 27a928b6f..6bba1d0ad 100644 --- a/frontend/app/components/Session_/Player/Controls/Controls.js +++ b/frontend/app/components/Session_/Player/Controls/Controls.js @@ -76,13 +76,13 @@ function getStorageName(type) { inspectorMode: state.inspectorMode, fullscreenDisabled: state.messagesLoading, // logCount: state.logList.length, - logRedCount: state.logRedCount, + logRedCount: state.logMarkedCount, showExceptions: state.exceptionsList.length > 0, - resourceRedCount: state.resourceRedCountNow, // TODO missing state.resourceRedCount - fetchRedCount: state.fetchRedCountNow, + resourceRedCount: state.resourceMarkedCount, + fetchRedCount: state.fetchMarkedCount, showStack: state.stackList.length > 0, stackCount: state.stackList.length, - stackRedCount: state.stackRedCount, + stackRedCount: state.stackMarkedCount, profilesCount: state.profilesList.length, storageCount: selectStorageListNow(state).length, storageType: selectStorageType(state), From 4e9473d4b5302da696acee7a8bc5bacbc49189d9 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 18:21:57 +0100 Subject: [PATCH 36/70] fix(ui) - player rewind to 0 --- frontend/app/components/Session_/Player/Controls/Controls.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/Session_/Player/Controls/Controls.js b/frontend/app/components/Session_/Player/Controls/Controls.js index 6bba1d0ad..d41fe2a97 100644 --- a/frontend/app/components/Session_/Player/Controls/Controls.js +++ b/frontend/app/components/Session_/Player/Controls/Controls.js @@ -203,7 +203,7 @@ export default class Controls extends React.Component { backTenSeconds = () => { //shouldComponentUpdate const { time, jump, skipInterval } = this.props; - jump(Math.max(0, time - SKIP_INTERVALS[skipInterval])); + jump(Math.max(1, time - SKIP_INTERVALS[skipInterval])); }; goLive = () => this.props.jump(this.props.endTime); From 3a9d629f8f45f2f4fb28190a69274bfc2996c278 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 18:25:54 +0100 Subject: [PATCH 37/70] fix(ui) - skip duration popup close --- .../Player/Controls/components/PlayerControls.tsx | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/frontend/app/components/Session_/Player/Controls/components/PlayerControls.tsx b/frontend/app/components/Session_/Player/Controls/components/PlayerControls.tsx index b28cd1990..6d42b3c6c 100644 --- a/frontend/app/components/Session_/Player/Controls/components/PlayerControls.tsx +++ b/frontend/app/components/Session_/Player/Controls/components/PlayerControls.tsx @@ -109,7 +109,7 @@ function PlayerControls(props: Props) {
- + {/* */} ( + render={({ close }: any) => (
Jump (Secs) @@ -128,7 +128,7 @@ function PlayerControls(props: Props) {
{ - toggleTooltip(); + close(); setSkipInterval(parseInt(interval, 10)); }} className={cn( @@ -143,14 +143,14 @@ function PlayerControls(props: Props) {
)} > -
+
{/* @ts-ignore */} {currentInterval}s
- + {/* */}
{/* @ts-ignore */} From f9ccfb9c9db8699e64728fb9175ff48edd73fd47 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Fri, 18 Nov 2022 18:51:35 +0100 Subject: [PATCH 38/70] fix(ui) - skip duration button alignments --- .../Session_/Player/Controls/Controls.js | 2 +- .../Controls/components/PlayerControls.tsx | 112 +++++++++--------- 2 files changed, 56 insertions(+), 58 deletions(-) diff --git a/frontend/app/components/Session_/Player/Controls/Controls.js b/frontend/app/components/Session_/Player/Controls/Controls.js index d41fe2a97..005403731 100644 --- a/frontend/app/components/Session_/Player/Controls/Controls.js +++ b/frontend/app/components/Session_/Player/Controls/Controls.js @@ -242,7 +242,7 @@ export default class Controls extends React.Component { controlIcon = (icon, size, action, isBackwards, additionalClasses) => (
diff --git a/frontend/app/components/Session_/Player/Controls/components/PlayerControls.tsx b/frontend/app/components/Session_/Player/Controls/components/PlayerControls.tsx index 6d42b3c6c..bd1225ad4 100644 --- a/frontend/app/components/Session_/Player/Controls/components/PlayerControls.tsx +++ b/frontend/app/components/Session_/Player/Controls/components/PlayerControls.tsx @@ -93,12 +93,11 @@ function PlayerControls(props: Props) { )}
- {/* @ts-ignore */} - - - -
- {/* */} - ( -
-
- Jump (Secs) -
- {Object.keys(skipIntervals).map((interval) => ( -
{ - close(); - setSkipInterval(parseInt(interval, 10)); - }} - className={cn( - 'py-2 px-4 cursor-pointer w-full text-left font-semibold', - 'hover:bg-active-blue border-t border-borderColor-gray-light-shade' - )} - > - {interval} - s -
- ))} + + + +
+ ( +
+
+ Jump (Secs)
- )} - > -
- {/* @ts-ignore */} - - {currentInterval}s - + {Object.keys(skipIntervals).map((interval) => ( +
{ + close(); + setSkipInterval(parseInt(interval, 10)); + }} + className={cn( + 'py-2 px-4 cursor-pointer w-full text-left font-semibold', + 'hover:bg-active-blue border-t border-borderColor-gray-light-shade' + )} + > + {interval} + s +
+ ))}
- - {/* */} -
- {/* @ts-ignore */} - -
+ + - + +
{!live && ( From b26a1c28b6167a5b8db6e5be6c61c2a6cb661b60 Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Fri, 18 Nov 2022 19:43:35 +0100 Subject: [PATCH 39/70] fix(tracker):4.1.7:re-send metadata on start, clean session data logic a bit --- tracker/tracker/src/main/app/index.ts | 63 +++++++++++++------------ tracker/tracker/src/main/app/session.ts | 2 +- 2 files changed, 33 insertions(+), 32 deletions(-) diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index 9c640af29..ce2473ef5 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -164,7 +164,7 @@ export default class App { this.worker.onmessage = ({ data }: MessageEvent) => { if (data === 'restart') { this.stop(false) - this.start({ forceNew: true }) // TODO: keep userID & metadata (draw scenarios) + this.start({}, true) } else if (data.type === 'failure') { this.stop(false) this._debug('worker_failed', data.reason) @@ -201,7 +201,6 @@ export default class App { send(message: Message, urgent = false): void { if (this.activityState === ActivityState.NotActive) { - // this.debug.log('SendiTrying to send when not active', message) <- crashing the app return } this.messages.push(message) @@ -370,7 +369,7 @@ export default class App { this.sessionStorage.removeItem(this.options.session_reset_key) } } - private _start(startOpts: StartOptions): Promise { + private _start(startOpts: StartOptions = {}, resetByWorker = false): Promise { if (!this.worker) { return Promise.resolve(UnsuccessfulStart('No worker found: perhaps, CSP is not set.')) } @@ -382,9 +381,19 @@ export default class App { ) } this.activityState = ActivityState.Starting + if (startOpts.sessionHash) { this.session.applySessionHash(startOpts.sessionHash) } + if (startOpts.forceNew) { + // Reset session metadata only if requested directly + this.session.reset() + } + this.session.assign({ + // MBTODO: maybe it would make sense to `forceNew` if the `userID` was changed + userID: startOpts.userID, + metadata: startOpts.metadata, + }) const timestamp = now() this.worker.postMessage({ @@ -397,17 +406,9 @@ export default class App { connAttemptGap: this.options.connAttemptGap, }) - this.session.update({ - // TODO: transparent "session" module logic AND explicit internal api for plugins. - // "updating" with old metadata in order to trigger session's UpdateCallbacks. - // (for the case of internal .start() calls, like on "restart" webworker signal or assistent connection in tracker-assist ) - metadata: startOpts.metadata || this.session.getInfo().metadata, - userID: startOpts.userID, - }) - - const sReset = this.sessionStorage.getItem(this.options.session_reset_key) + const lsReset = this.sessionStorage.getItem(this.options.session_reset_key) !== null this.sessionStorage.removeItem(this.options.session_reset_key) - const shouldReset = startOpts.forceNew || sReset !== null + const needNewSessionID = startOpts.forceNew || lsReset || resetByWorker return window .fetch(this.options.ingestPoint + '/v1/web/start', { @@ -419,7 +420,7 @@ export default class App { ...this.getTrackerInfo(), timestamp, userID: this.session.getInfo().userID, - token: shouldReset ? undefined : this.session.getSessionToken(), + token: needNewSessionID ? undefined : this.session.getSessionToken(), deviceMemory, jsHeapSizeLimit, }), @@ -447,29 +448,33 @@ export default class App { const { token, userUUID, - sessionID, projectID, beaconSizeLimit, - startTimestamp, // real startTS, derived from sessionID - delay, + delay, // derived from token + sessionID, // derived from token + startTimestamp, // real startTS (server time), derived from sessionID } = r if ( typeof token !== 'string' || typeof userUUID !== 'string' || - //typeof startTimestamp !== 'number' || - //typeof sessionID !== 'string' || + (typeof startTimestamp !== 'number' && typeof startTimestamp !== 'undefined') || + typeof sessionID !== 'string' || typeof delay !== 'number' || (typeof beaconSizeLimit !== 'number' && typeof beaconSizeLimit !== 'undefined') ) { return Promise.reject(`Incorrect server response: ${JSON.stringify(r)}`) } this.delay = delay - const prevSessionID = this.session.getInfo().sessionID - if (prevSessionID && prevSessionID !== sessionID) { - this.session.reset() - } this.session.setSessionToken(token) - this.session.update({ sessionID, timestamp: startTimestamp || timestamp, projectID }) // TODO: no no-explicit 'any' + this.session.assign({ + sessionID, + timestamp: startTimestamp || timestamp, + projectID, + }) + // (Re)send Metadata for the case of a new session + Object.entries(this.session.getInfo().metadata).forEach(([key, value]) => + this.send(Metadata(key, value)), + ) this.localStorage.setItem(this.options.local_uuid_key, userUUID) this.worker.postMessage({ @@ -506,15 +511,15 @@ export default class App { }) } - start(options: StartOptions = {}): Promise { + start(...args: Parameters): Promise { if (!document.hidden) { - return this._start(options) + return this._start(...args) } else { return new Promise((resolve) => { const onVisibilityChange = () => { if (!document.hidden) { document.removeEventListener('visibilitychange', onVisibilityChange) - resolve(this._start(options)) + resolve(this._start(...args)) } } document.addEventListener('visibilitychange', onVisibilityChange) @@ -538,8 +543,4 @@ export default class App { } } } - restart() { - this.stop(false) - this.start({ forceNew: false }) - } } diff --git a/tracker/tracker/src/main/app/session.ts b/tracker/tracker/src/main/app/session.ts index 5c3db5ac5..4682bcc43 100644 --- a/tracker/tracker/src/main/app/session.ts +++ b/tracker/tracker/src/main/app/session.ts @@ -37,7 +37,7 @@ export default class Session { this.callbacks.forEach((cb) => cb(newInfo)) } - update(newInfo: Partial): void { + assign(newInfo: Partial): void { if (newInfo.userID !== undefined) { // TODO clear nullable/undefinable types this.userID = newInfo.userID From c33279c141acc3a36fd321ecbb40fe3dfccdb880 Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Mon, 21 Nov 2022 11:02:00 +0100 Subject: [PATCH 40/70] fix(backend): fixed wrong error check --- backend/internal/storage/storage.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/internal/storage/storage.go b/backend/internal/storage/storage.go index 9959cc4dd..12a37183f 100644 --- a/backend/internal/storage/storage.go +++ b/backend/internal/storage/storage.go @@ -95,7 +95,7 @@ func (s *Storage) uploadKey(sessID uint64, suffix string, shouldSplit bool, retr // Check file size before download into memory info, err := os.Stat(filePath) - if err != nil { + if err == nil { if info.Size() > s.cfg.MaxFileSize { log.Printf("big file, size: %d, session: %d", info.Size(), sessID) return nil From 4dfebf92e1d84438e03ca9beddea0bbe379f72df Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 21 Nov 2022 11:05:58 +0100 Subject: [PATCH 41/70] change(ui) - xray line alignment --- .../app/components/Session_/OverviewPanel/OverviewPanel.tsx | 2 +- .../Session_/OverviewPanel/components/EventRow/EventRow.tsx | 2 +- .../components/VerticalPointerLine/VerticalPointerLine.tsx | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx b/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx index 360baba2a..a81fcd6b8 100644 --- a/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx +++ b/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx @@ -86,7 +86,7 @@ function OverviewPanel(props: Props) { -
+
{
Date: Mon, 21 Nov 2022 11:09:51 +0100 Subject: [PATCH 42/70] change(ui) - fetch details show full url --- .../components/FetchBasicDetails/FetchBasicDetails.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx b/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx index 3e3811b27..1088fd727 100644 --- a/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx +++ b/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx @@ -13,7 +13,7 @@ function FetchBasicDetails({ resource }: Props) {
Name
- {resource.name} + {resource.url}
From 06403285c4d9a06daa9838852a09a806262ef3c6 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 21 Nov 2022 11:25:02 +0100 Subject: [PATCH 43/70] change(ui) - request type check includes tracked_fetch --- .../shared/FetchDetailsModal/FetchDetailsModal.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx b/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx index 2121c9aa1..1ab311bfa 100644 --- a/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx +++ b/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx @@ -15,6 +15,7 @@ function FetchDetailsModal(props: Props) { const [resource, setResource] = useState(props.resource); const [first, setFirst] = useState(false); const [last, setLast] = useState(false); + const isXHR = resource.type === TYPES.XHR || resource.type === TYPES.FETCH; useEffect(() => { const index = rows.indexOf(resource); @@ -42,9 +43,8 @@ function FetchDetailsModal(props: Props) {
Network Request
- {resource.type === TYPES.XHR && !fetchPresented && } - - {resource.type === TYPES.XHR && fetchPresented && } + {isXHR && !fetchPresented && } + {isXHR && } {rows && rows.length > 0 && (
From b1c1fdeac47ff0ec4d6bcb875e1b624b2a21b8cd Mon Sep 17 00:00:00 2001 From: sylenien Date: Mon, 21 Nov 2022 11:35:59 +0100 Subject: [PATCH 44/70] fix(ui): performance improvements for storage tab, fix error modal network loop --- .../app/components/Errors/Error/ErrorInfo.js | 6 +- .../components/Session_/Storage/Storage.js | 82 +++++++++---------- 2 files changed, 44 insertions(+), 44 deletions(-) diff --git a/frontend/app/components/Errors/Error/ErrorInfo.js b/frontend/app/components/Errors/Error/ErrorInfo.js index 91c06b617..abfa4e76b 100644 --- a/frontend/app/components/Errors/Error/ErrorInfo.js +++ b/frontend/app/components/Errors/Error/ErrorInfo.js @@ -35,8 +35,10 @@ export default class ErrorInfo extends React.PureComponent { componentDidMount() { this.ensureInstance(); } - componentDidUpdate() { - this.ensureInstance(); + componentDidUpdate(prevProps) { + if (prevProps.errorId !== this.props.errorId || prevProps.errorIdInStore !== this.props.errorIdInStore) { + this.ensureInstance(); + } } next = () => { const { list, errorId } = this.props; diff --git a/frontend/app/components/Session_/Storage/Storage.js b/frontend/app/components/Session_/Storage/Storage.js index 20fa1c703..e0fbb73f6 100644 --- a/frontend/app/components/Session_/Storage/Storage.js +++ b/frontend/app/components/Session_/Storage/Storage.js @@ -14,13 +14,9 @@ import { diff } from 'deep-diff'; import { jump } from 'Player'; import BottomBlock from '../BottomBlock/index'; import DiffRow from './DiffRow'; -import cn from 'classnames'; import stl from './storage.module.css'; -import { List, CellMeasurer, CellMeasurerCache, AutoSizer } from 'react-virtualized' +import { List, CellMeasurer, CellMeasurerCache, AutoSizer } from 'react-virtualized'; -// const STATE = 'STATE'; -// const DIFF = 'DIFF'; -// const TABS = [ DIFF, STATE ].map(tab => ({ text: tab, key: tab })); const ROW_HEIGHT = 90; function getActionsName(type) { @@ -46,7 +42,6 @@ function getActionsName(type) { hideHint, } ) -//@withEnumToggle('activeTab', 'setActiveTab', DIFF) export default class Storage extends React.PureComponent { constructor(props) { super(props); @@ -55,9 +50,9 @@ export default class Storage extends React.PureComponent { this._list = React.createRef(); this.cache = new CellMeasurerCache({ fixedWidth: true, - keyMapper: index => this.props.listNow[index] + keyMapper: (index) => this.props.listNow[index], }); - this._rowRenderer = this._rowRenderer.bind(this) + this._rowRenderer = this._rowRenderer.bind(this); } focusNextButton() { @@ -102,7 +97,10 @@ export default class Storage extends React.PureComponent { } return ( -
+
{stateDiff.map((d, i) => this.renderDiffs(d, i))}
); @@ -150,7 +148,7 @@ export default class Storage extends React.PureComponent { return ; } - renderItem(item, i, prevItem, style, measure) { + renderItem(item, i, prevItem, style) { const { type } = this.props; let src; let name; @@ -177,9 +175,8 @@ export default class Storage extends React.PureComponent { return (
{measure(); this._list.recomputeRowHeights(i)}} > {src === null ? (
@@ -188,7 +185,10 @@ export default class Storage extends React.PureComponent { ) : ( <> {this.renderDiff(item, prevItem, i)} -
+
)} -
+
{typeof item.duration === 'number' && (
{formatMs(item.duration)}
)} @@ -209,7 +212,7 @@ export default class Storage extends React.PureComponent { {'JUMP'} )} - {i + 1 === this.props.listNow.length && i + 1 < this.props.list.length && ( + {i + 1 === this.props.listNow.length && i + 1 < this.props.list.length && ( @@ -220,29 +223,20 @@ export default class Storage extends React.PureComponent { ); } - _rowRenderer({index, key, parent, style}) { - // listNow.map((item, i) => - // this.renderItem(item, i, i > 0 ? listNow[i - 1] : undefined, listNowLen, listLen) - // ) + _rowRenderer({ index, parent, key, style }) { const { listNow } = this.props; - if (!listNow[index]) return console.warn(index, listNow) + if (!listNow[index]) return console.warn(index, listNow); return ( - - {({ measure }) => this.renderItem(listNow[index], index, index > 0 ? listNow[index - 1] : undefined, style, measure)} + + {this.renderItem(listNow[index], index, index > 0 ? listNow[index - 1] : undefined, style)} - ) + ); } render() { - const { type, listNow, list, hintIsHidden } = this.props; + const { type, list, listNow, hintIsHidden } = this.props; const showStore = type !== STORAGE_TYPES.MOBX; return ( @@ -250,17 +244,21 @@ export default class Storage extends React.PureComponent { {list.length > 0 && (
- {showStore &&

{'STATE'}

} + {showStore && ( +

+ {'STATE'} +

+ )} {type !== STORAGE_TYPES.ZUSTAND ? ( -

+

DIFFS

) : null} -

{getActionsName(type)}

-

- - TTE - +

+ {getActionsName(type)} +

+

+ TTE

)} @@ -345,10 +343,10 @@ export default class Storage extends React.PureComponent {
)}
- - {({ height, width }) => ( + + {({ height, width }) => ( { + ref={(element) => { this._list = element; }} deferredMeasurementCache={this.cache} @@ -359,8 +357,8 @@ export default class Storage extends React.PureComponent { width={width} height={height} /> - )} - + )} +
From d6c880b508a8e218788191dfe9fe0f65fa416254 Mon Sep 17 00:00:00 2001 From: sylenien Date: Mon, 21 Nov 2022 11:41:34 +0100 Subject: [PATCH 45/70] fix(ui): fix diffrow expand button --- .../app/components/Session_/Storage/DiffRow.tsx | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/frontend/app/components/Session_/Storage/DiffRow.tsx b/frontend/app/components/Session_/Storage/DiffRow.tsx index 4e6c936a7..ebc54ad4d 100644 --- a/frontend/app/components/Session_/Storage/DiffRow.tsx +++ b/frontend/app/components/Session_/Storage/DiffRow.tsx @@ -50,28 +50,28 @@ function DiffRow({ diff, path }: Props) { : newValue; return ( -
+
20 ? 'cursor-pointer' : ''} onClick={() => setShorten(!shorten)}> {pathStr} {': '} - setShortenOldVal(!shortenOldVal)} className={cn( - 'line-through text-disabled-text', + 'text-disabled-text', diffLengths[0] > 50 ? 'cursor-pointer' : '' )} > - {oldValueSafe || 'undefined'} + {oldValueSafe || 'undefined'} {diffLengths[0] > 50 ? (
setShortenOldVal(!shortenOldVal)} className="cursor-pointer px-1 text-white bg-gray-light rounded text-sm w-fit"> {!shortenOldVal ? 'collapse' : 'expand'}
) : null} -
+
{' -> '} - ) : null} - +
); } From 70cb76e382f55ec84de27e6f702c4ce145940f05 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 21 Nov 2022 12:21:28 +0100 Subject: [PATCH 46/70] change(ui) - fetch plugin check --- .../Session_/OverviewPanel/OverviewPanel.tsx | 14 ++++++++++++-- .../components/TimelinePointer/TimelinePointer.tsx | 3 ++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx b/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx index a81fcd6b8..de3cbdf5f 100644 --- a/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx +++ b/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx @@ -23,8 +23,10 @@ interface Props { issuesList: any[]; performanceChartData: any; endTime: number; + fetchPresented?: boolean; } function OverviewPanel(props: Props) { + const { fetchPresented = false } = props; const [dataLoaded, setDataLoaded] = React.useState(false); const [selectedFeatures, setSelectedFeatures] = React.useState([ 'PERFORMANCE', @@ -86,7 +88,10 @@ function OverviewPanel(props: Props) { -
+
( - + )} endTime={props.endTime} message={HELP_MESSAGE[feature]} @@ -132,6 +141,7 @@ export default connect( } )( connectPlayer((state: any) => ({ + fetchPresented: state.fetchList.length > 0, resourceList: state.resourceList .filter((r: any) => r.isRed() || r.isYellow()) .concat(state.fetchList.filter((i: any) => parseInt(i.status) >= 400)) diff --git a/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx b/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx index 818cdfc4a..5b6434794 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx @@ -12,6 +12,7 @@ interface Props { pointer: any; type: any; noClick?: boolean; + fetchPresented?: boolean; } const TimelinePointer = React.memo((props: Props) => { const { showModal } = useModal(); @@ -35,7 +36,7 @@ const TimelinePointer = React.memo((props: Props) => { if (pointer.tp === 'graph_ql') { showModal(, { right: true }); } else { - showModal(, { right: true }); + showModal(, { right: true }); } } // props.toggleBottomBlock(type); From 8a9fc7a453759902a72e5befe0966929ca74e589 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 21 Nov 2022 12:37:24 +0100 Subject: [PATCH 47/70] change(ui) - fetch url ellipsis in between --- .../FetchBasicDetails/FetchBasicDetails.tsx | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx b/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx index 1088fd727..49e16c00f 100644 --- a/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx +++ b/frontend/app/components/shared/FetchDetailsModal/components/FetchBasicDetails/FetchBasicDetails.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useMemo } from 'react'; import { formatBytes } from 'App/utils'; import CopyText from 'Shared/CopyText'; import cn from 'classnames'; @@ -8,12 +8,20 @@ interface Props { } function FetchBasicDetails({ resource }: Props) { const _duration = parseInt(resource.duration); + const text = useMemo(() => { + if (resource.url.length > 50) { + const endText = resource.url.split('/').pop(); + return resource.url.substring(0, 50 - endText.length) + '.../' + endText; + } + return resource.url; + }, [resource]); + return (
Name
- {resource.url} + {text}
@@ -45,7 +53,12 @@ function FetchBasicDetails({ resource }: Props) { {resource.status && (
Status
-
+
{resource.status === '200' && (
)} From 806b67d6ca5f00ce1267c05899803e5e35d06098 Mon Sep 17 00:00:00 2001 From: MauricioGarciaS <47052044+MauricioGarciaS@users.noreply.github.com> Date: Mon, 21 Nov 2022 18:17:33 +0100 Subject: [PATCH 48/70] Updated snowflake modules --- ee/connectors/deploy/requirements_snowflake.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ee/connectors/deploy/requirements_snowflake.txt b/ee/connectors/deploy/requirements_snowflake.txt index 983a313d6..895326b32 100644 --- a/ee/connectors/deploy/requirements_snowflake.txt +++ b/ee/connectors/deploy/requirements_snowflake.txt @@ -1,8 +1,8 @@ pandas==1.5.1 kafka-python==2.0.2 SQLAlchemy==1.4.43 -snowflake-connector-python==2.8.1 -snowflake-sqlalchemy==1.4.3 +snowflake-connector-python==2.8.2 +snowflake-sqlalchemy==1.4.4 PyYAML asn1crypto==1.5.1 azure-common==1.1.28 From ac578d927e65d0f14bb8c2571483f956d1bb629a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 21 Nov 2022 18:24:27 +0100 Subject: [PATCH 49/70] feat(DB): remove unused indexes --- ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql | 3 +++ scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql | 3 +++ 2 files changed, 6 insertions(+) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql index 3315df03f..6da0eebed 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql @@ -74,4 +74,7 @@ DROP INDEX IF EXISTS events_common.requests_url_idx; DROP INDEX IF EXISTS events_common.requests_url_gin_idx; DROP INDEX IF EXISTS events_common.requests_url_gin_idx2; +DROP INDEX IF EXISTS events.resources_url_gin_idx; +DROP INDEX IF EXISTS events.resources_url_idx; + COMMIT; \ No newline at end of file diff --git a/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql b/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql index a9ef541a5..c4c146d9b 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql @@ -64,4 +64,7 @@ DROP INDEX IF EXISTS events_common.requests_url_idx; DROP INDEX IF EXISTS events_common.requests_url_gin_idx; DROP INDEX IF EXISTS events_common.requests_url_gin_idx2; +DROP INDEX IF EXISTS events.resources_url_gin_idx; +DROP INDEX IF EXISTS events.resources_url_idx; + COMMIT; \ No newline at end of file From 81795681d0259fabda896256ad516b345d62e389 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 21 Nov 2022 18:38:50 +0100 Subject: [PATCH 50/70] feat(chalice): user url_hostpath instead of url for metrics --- api/chalicelib/core/metrics.py | 20 ++++++++++---------- ee/api/chalicelib/core/metrics.py | 26 +++++++++++++------------- ee/api/chalicelib/core/metrics_exp.py | 24 ++++++++++++------------ 3 files changed, 35 insertions(+), 35 deletions(-) diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index bf388c093..b25b441ed 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -419,7 +419,7 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, chart=True, data=args) pg_sub_query_chart.append("resources.type = 'img'") - pg_sub_query_chart.append("resources.url = top_img.url") + pg_sub_query_chart.append("resources.url_hostpath = top_img.url_hostpath") pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) @@ -431,13 +431,13 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), with pg_client.PostgresClient() as cur: pg_query = f"""SELECT * - FROM (SELECT resources.url, + FROM (SELECT resources.url_hostpath, COALESCE(AVG(resources.duration), 0) AS avg_duration, COUNT(resources.session_id) AS sessions_count FROM events.resources INNER JOIN sessions USING (session_id) WHERE {" AND ".join(pg_sub_query_subset)} - GROUP BY resources.url + GROUP BY resources.url_hostpath ORDER BY avg_duration DESC LIMIT 10) AS top_img LEFT JOIN LATERAL ( @@ -485,13 +485,13 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi if resources and len(resources) > 0: for r in resources: if r["type"] == "IMG": - img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s") + img_constraints.append(f"resources.url_hostpath = %(val_{len(img_constraints)})s") img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value'] elif r["type"] == "LOCATION": location_constraints.append(f"pages.path = %(val_{len(location_constraints)})s") location_constraints_vals["val_" + str(len(location_constraints) - 1)] = r['value'] else: - request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s") + request_constraints.append(f"resources.url_hostpath = %(val_{len(request_constraints)})s") request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value'] params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp} @@ -627,12 +627,12 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, pg_sub_query.append("url_hostpath ILIKE %(value)s") with pg_client.PostgresClient() as cur: pg_query = f"""SELECT key, value - FROM ( SELECT DISTINCT ON (url) ROW_NUMBER() OVER (PARTITION BY type ORDER BY url) AS r, - url AS value, + FROM ( SELECT DISTINCT ON (url_hostpath) ROW_NUMBER() OVER (PARTITION BY type ORDER BY url_hostpath) AS r, + url_hostpath AS value, type AS key FROM events.resources INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} - ORDER BY url, type ASC) AS ranked_values + ORDER BY url_hostpath, type ASC) AS ranked_values WHERE ranked_values.r<=5;""" cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text)})) rows = cur.fetchall() @@ -893,7 +893,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days if type is not None: pg_sub_query_subset.append(f"resources.type = '{__get_resource_db_type_from_type(type)}'") if url is not None: - pg_sub_query_subset.append(f"resources.url = %(value)s") + pg_sub_query_subset.append(f"resources.url_hostpath = %(value)s") with pg_client.PostgresClient() as cur: pg_query = f"""WITH resources AS (SELECT resources.duration, timestamp @@ -1009,7 +1009,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1), ORDER BY avg DESC LIMIT 10) AS main_list INNER JOIN LATERAL ( - SELECT url, type + SELECT url_hostpath AS url, type FROM events.resources INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 452566194..2a2f6ee20 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -452,18 +452,18 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), ch_sub_query.append("resources.type = 'img'") ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args) ch_sub_query_chart.append("resources.type = 'img'") - ch_sub_query_chart.append("resources.url IN %(url)s") + ch_sub_query_chart.append("resources.url_hostpath IN %(url)s") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition ch_sub_query_chart += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT resources.url, + ch_query = f"""SELECT resources.url_hostpath AS url, COALESCE(avgOrNull(resources.duration),0) AS avg, COUNT(1) AS count FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0 - GROUP BY resources.url ORDER BY avg DESC LIMIT 10;""" + GROUP BY resources.url_hostpath ORDER BY avg DESC LIMIT 10;""" params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) @@ -474,13 +474,13 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), urls = [row["url"] for row in rows] charts = {} - ch_query = f"""SELECT url, + ch_query = f"""SELECT url_hostpath AS url, toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, COALESCE(avgOrNull(resources.duration),0) AS avg FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} WHERE {" AND ".join(ch_sub_query_chart)} AND resources.duration>0 - GROUP BY url, timestamp - ORDER BY url, timestamp;""" + GROUP BY url_hostpath, timestamp + ORDER BY url_hostpath, timestamp;""" params["url"] = urls u_rows = ch.execute(query=ch_query, params=params) for url in urls: @@ -526,13 +526,13 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi if resources and len(resources) > 0: for r in resources: if r["type"] == "IMG": - img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s") + img_constraints.append(f"resources.url_hostpath = %(val_{len(img_constraints)})s") img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value'] elif r["type"] == "LOCATION": location_constraints.append(f"pages.url_path = %(val_{len(location_constraints)})s") location_constraints_vals["val_" + str(len(location_constraints) - 1)] = r['value'] else: - request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s") + request_constraints.append(f"resources.url_hostpath = %(val_{len(request_constraints)})s") request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value'] params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp} @@ -638,7 +638,7 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, if resource_type == "ALL" and not pages_only and not events_only: ch_sub_query.append("positionUTF8(url_hostpath,%(value)s)!=0") with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT arrayJoin(arraySlice(arrayReverseSort(arrayDistinct(groupArray(url))), 1, 5)) AS value, + ch_query = f"""SELECT arrayJoin(arraySlice(arrayReverseSort(arrayDistinct(groupArray(url_hostpath))), 1, 5)) AS value, type AS key FROM resources WHERE {" AND ".join(ch_sub_query)} @@ -884,7 +884,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days if type is not None: ch_sub_query_chart.append(f"resources.type = '{__get_resource_db_type_from_type(type)}'") if url is not None: - ch_sub_query_chart.append(f"resources.url = %(value)s") + ch_sub_query_chart.append(f"resources.url_hostpath = %(value)s") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition ch_sub_query_chart.append("resources.duration>0") @@ -966,7 +966,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1), ch_sub_query_chart.append("isNotNull(resources.duration)") ch_sub_query_chart.append("resources.duration>0") with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT any(url) AS url, any(type) AS type, + ch_query = f"""SELECT any(url_hostpath) AS url, any(type) AS type, splitByChar('/', resources.url_hostpath)[-1] AS name, COALESCE(avgOrNull(NULLIF(resources.duration,0)),0) AS avg FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} @@ -2179,7 +2179,7 @@ def get_performance_avg_image_load_time(ch, project_id, startTimestamp=TimeUTC.n if resources and len(resources) > 0: for r in resources: if r["type"] == "IMG": - img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s") + img_constraints.append(f"resources.url_hostpath = %(val_{len(img_constraints)})s") img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value'] params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, @@ -2254,7 +2254,7 @@ def get_performance_avg_request_load_time(ch, project_id, startTimestamp=TimeUTC if resources and len(resources) > 0: for r in resources: if r["type"] != "IMG" and r["type"] == "LOCATION": - request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s") + request_constraints.append(f"resources.url_hostpath = %(val_{len(request_constraints)})s") request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value'] params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp} diff --git a/ee/api/chalicelib/core/metrics_exp.py b/ee/api/chalicelib/core/metrics_exp.py index 9a8af012b..c41676d4a 100644 --- a/ee/api/chalicelib/core/metrics_exp.py +++ b/ee/api/chalicelib/core/metrics_exp.py @@ -462,18 +462,18 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args) # ch_sub_query_chart.append("events.event_type='RESOURCE'") ch_sub_query_chart.append("resources.type = 'img'") - ch_sub_query_chart.append("resources.url IN %(url)s") + ch_sub_query_chart.append("resources.url_hostpath IN %(url)s") meta_condition = __get_meta_constraint(args) ch_sub_query += meta_condition ch_sub_query_chart += meta_condition with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT resources.url, + ch_query = f"""SELECT resources.url_hostpath AS url, COALESCE(avgOrNull(resources.duration),0) AS avg, COUNT(1) AS count FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0 - GROUP BY resources.url ORDER BY avg DESC LIMIT 10;""" + GROUP BY resources.url_hostpath ORDER BY avg DESC LIMIT 10;""" params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} rows = ch.execute(query=ch_query, params=params) @@ -484,13 +484,13 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1), urls = [row["url"] for row in rows] charts = {} - ch_query = f"""SELECT url, + ch_query = f"""SELECT url_hostpath AS url, toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp, COALESCE(avgOrNull(resources.duration),0) AS avg FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources WHERE {" AND ".join(ch_sub_query_chart)} AND resources.duration>0 - GROUP BY url, timestamp - ORDER BY url, timestamp;""" + GROUP BY url_hostpath, timestamp + ORDER BY url_hostpath, timestamp;""" params["url"] = urls # print(ch.format(query=ch_query, params=params)) u_rows = ch.execute(query=ch_query, params=params) @@ -538,13 +538,13 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi if resources and len(resources) > 0: for r in resources: if r["type"] == "IMG": - img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s") + img_constraints.append(f"resources.url_hostpath = %(val_{len(img_constraints)})s") img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value'] elif r["type"] == "LOCATION": location_constraints.append(f"pages.url_path = %(val_{len(location_constraints)})s") location_constraints_vals["val_" + str(len(location_constraints) - 1)] = r['value'] else: - request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s") + request_constraints.append(f"resources.url_hostpath = %(val_{len(request_constraints)})s") request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value'] params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp} @@ -891,7 +891,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days if type is not None: ch_sub_query_chart.append(f"resources.type = '{__get_resource_db_type_from_type(type)}'") if url is not None: - ch_sub_query_chart.append(f"resources.url = %(value)s") + ch_sub_query_chart.append(f"resources.url_hostpath = %(value)s") meta_condition = __get_meta_constraint(args) ch_sub_query_chart += meta_condition ch_sub_query_chart.append("resources.duration>0") @@ -974,7 +974,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1), ch_sub_query_chart.append("isNotNull(resources.duration)") ch_sub_query_chart.append("resources.duration>0") with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT any(url) AS url, any(type) AS type, name, + ch_query = f"""SELECT any(url_hostpath) AS url, any(type) AS type, name, COALESCE(avgOrNull(NULLIF(resources.duration,0)),0) AS avg FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources WHERE {" AND ".join(ch_sub_query)} @@ -2185,7 +2185,7 @@ def get_performance_avg_image_load_time(ch, project_id, startTimestamp=TimeUTC.n if resources and len(resources) > 0: for r in resources: if r["type"] == "IMG": - img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s") + img_constraints.append(f"resources.url_hostpath = %(val_{len(img_constraints)})s") img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value'] params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, @@ -2260,7 +2260,7 @@ def get_performance_avg_request_load_time(ch, project_id, startTimestamp=TimeUTC if resources and len(resources) > 0: for r in resources: if r["type"] != "IMG" and r["type"] == "LOCATION": - request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s") + request_constraints.append(f"resources.url_hostpath = %(val_{len(request_constraints)})s") request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value'] params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp} From f42f9321bdee91998b8738fca4f479c171733cac Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 21 Nov 2022 19:05:07 +0100 Subject: [PATCH 51/70] feat(peers): use peer:v1.0.0-rc.4 --- peers/package-lock.json | 200 ++-------------------------------------- peers/package.json | 2 +- 2 files changed, 9 insertions(+), 193 deletions(-) diff --git a/peers/package-lock.json b/peers/package-lock.json index a903cfd08..ce7c3c1c4 100644 --- a/peers/package-lock.json +++ b/peers/package-lock.json @@ -10,87 +10,7 @@ "license": "Elastic License 2.0 (ELv2)", "dependencies": { "express": "^4.18.1", - "peer": "^0.6.1" - } - }, - "node_modules/@types/body-parser": { - "version": "1.19.2", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", - "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", - "dependencies": { - "@types/connect": "*", - "@types/node": "*" - } - }, - "node_modules/@types/connect": { - "version": "3.4.35", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", - "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/cors": { - "version": "2.8.12", - "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.12.tgz", - "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" - }, - "node_modules/@types/express": { - "version": "4.17.13", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", - "integrity": "sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==", - "dependencies": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.18", - "@types/qs": "*", - "@types/serve-static": "*" - } - }, - "node_modules/@types/express-serve-static-core": { - "version": "4.17.30", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.30.tgz", - "integrity": "sha512-gstzbTWro2/nFed1WXtf+TtrpwxH7Ggs4RLYTLbeVgIkUQOI3WG/JKjgeOU1zXDvezllupjrf8OPIdvTbIaVOQ==", - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*" - } - }, - "node_modules/@types/mime": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-3.0.1.tgz", - "integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==" - }, - "node_modules/@types/node": { - "version": "18.7.16", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.16.tgz", - "integrity": "sha512-EQHhixfu+mkqHMZl1R2Ovuvn47PUw18azMJOTwSZr9/fhzHNGXAJ0ma0dayRVchprpCj0Kc1K1xKoWaATWF1qg==" - }, - "node_modules/@types/qs": { - "version": "6.9.7", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", - "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" - }, - "node_modules/@types/range-parser": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", - "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" - }, - "node_modules/@types/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==", - "dependencies": { - "@types/mime": "*", - "@types/node": "*" - } - }, - "node_modules/@types/ws": { - "version": "7.4.7", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-7.4.7.tgz", - "integrity": "sha512-JQbbmxZTZehdc2iszGKs5oC3NFnjeay7mtAWrdt7qNtAVK0g19muApzAy4bm9byz79xa2ZnO/BOBC2R8RC5Lww==", - "dependencies": { - "@types/node": "*" + "peer": "^v1.0.0-rc.4" } }, "node_modules/accepts": { @@ -655,17 +575,12 @@ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "node_modules/peer": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/peer/-/peer-0.6.1.tgz", - "integrity": "sha512-zPJSPoZvo+83sPJNrW8o93QTktx7dKk67965RRDDNAIelWw1ZwE6ZmmhsvRrdNRlK0knQb3rR8GBdZlbWzCYJw==", + "version": "1.0.0-rc.4", + "resolved": "https://registry.npmjs.org/peer/-/peer-1.0.0-rc.4.tgz", + "integrity": "sha512-xaNIDm3yWR5m8cuijK7jEFAMOWqNJDGSVJ0+Y3qKW5XTNYsNWEdqtg/Btq9eznGxTTeqQZGNw/SxwyrCVdmmDg==", "dependencies": { - "@types/cors": "^2.8.6", - "@types/express": "^4.17.3", - "@types/ws": "^7.2.3", - "body-parser": "^1.19.0", "cors": "^2.8.5", "express": "^4.17.1", - "uuid": "^3.4.0", "ws": "^7.2.3", "yargs": "^15.3.1" }, @@ -673,7 +588,7 @@ "peerjs": "bin/peerjs" }, "engines": { - "node": ">=10" + "node": ">=14" } }, "node_modules/proxy-addr": { @@ -894,15 +809,6 @@ "node": ">= 0.4.0" } }, - "node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", - "bin": { - "uuid": "bin/uuid" - } - }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", @@ -989,86 +895,6 @@ } }, "dependencies": { - "@types/body-parser": { - "version": "1.19.2", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", - "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", - "requires": { - "@types/connect": "*", - "@types/node": "*" - } - }, - "@types/connect": { - "version": "3.4.35", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", - "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", - "requires": { - "@types/node": "*" - } - }, - "@types/cors": { - "version": "2.8.12", - "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.12.tgz", - "integrity": "sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==" - }, - "@types/express": { - "version": "4.17.13", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", - "integrity": "sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==", - "requires": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.18", - "@types/qs": "*", - "@types/serve-static": "*" - } - }, - "@types/express-serve-static-core": { - "version": "4.17.30", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.30.tgz", - "integrity": "sha512-gstzbTWro2/nFed1WXtf+TtrpwxH7Ggs4RLYTLbeVgIkUQOI3WG/JKjgeOU1zXDvezllupjrf8OPIdvTbIaVOQ==", - "requires": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*" - } - }, - "@types/mime": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-3.0.1.tgz", - "integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==" - }, - "@types/node": { - "version": "18.7.16", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.16.tgz", - "integrity": "sha512-EQHhixfu+mkqHMZl1R2Ovuvn47PUw18azMJOTwSZr9/fhzHNGXAJ0ma0dayRVchprpCj0Kc1K1xKoWaATWF1qg==" - }, - "@types/qs": { - "version": "6.9.7", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", - "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" - }, - "@types/range-parser": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", - "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" - }, - "@types/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==", - "requires": { - "@types/mime": "*", - "@types/node": "*" - } - }, - "@types/ws": { - "version": "7.4.7", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-7.4.7.tgz", - "integrity": "sha512-JQbbmxZTZehdc2iszGKs5oC3NFnjeay7mtAWrdt7qNtAVK0g19muApzAy4bm9byz79xa2ZnO/BOBC2R8RC5Lww==", - "requires": { - "@types/node": "*" - } - }, "accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -1482,17 +1308,12 @@ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "peer": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/peer/-/peer-0.6.1.tgz", - "integrity": "sha512-zPJSPoZvo+83sPJNrW8o93QTktx7dKk67965RRDDNAIelWw1ZwE6ZmmhsvRrdNRlK0knQb3rR8GBdZlbWzCYJw==", + "version": "1.0.0-rc.4", + "resolved": "https://registry.npmjs.org/peer/-/peer-1.0.0-rc.4.tgz", + "integrity": "sha512-xaNIDm3yWR5m8cuijK7jEFAMOWqNJDGSVJ0+Y3qKW5XTNYsNWEdqtg/Btq9eznGxTTeqQZGNw/SxwyrCVdmmDg==", "requires": { - "@types/cors": "^2.8.6", - "@types/express": "^4.17.3", - "@types/ws": "^7.2.3", - "body-parser": "^1.19.0", "cors": "^2.8.5", "express": "^4.17.1", - "uuid": "^3.4.0", "ws": "^7.2.3", "yargs": "^15.3.1" } @@ -1655,11 +1476,6 @@ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" }, - "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" - }, "vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", diff --git a/peers/package.json b/peers/package.json index 51f37b5fa..a38ad3343 100644 --- a/peers/package.json +++ b/peers/package.json @@ -19,6 +19,6 @@ "homepage": "https://github.com/openreplay/openreplay#readme", "dependencies": { "express": "^4.18.1", - "peer": "^0.6.1" + "peer": "^v1.0.0-rc.4" } } From 5a1cd27ebca08886afc44a1b3c2bc8c482e5e795 Mon Sep 17 00:00:00 2001 From: Alexander Date: Mon, 21 Nov 2022 19:22:10 +0100 Subject: [PATCH 52/70] [Sink] Async session writer (#826) * feat(backend): implemented async session writer module --- backend/cmd/sink/main.go | 22 +-- backend/internal/sink/oswriter/oswriter.go | 164 ------------------ .../internal/sink/sessionwriter/session.go | 73 ++++++++ backend/internal/sink/sessionwriter/types.go | 8 + backend/internal/sink/sessionwriter/writer.go | 157 +++++++++++++++++ 5 files changed, 245 insertions(+), 179 deletions(-) delete mode 100644 backend/internal/sink/oswriter/oswriter.go create mode 100644 backend/internal/sink/sessionwriter/session.go create mode 100644 backend/internal/sink/sessionwriter/types.go create mode 100644 backend/internal/sink/sessionwriter/writer.go diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go index 675d965c9..d6ebc0abc 100644 --- a/backend/cmd/sink/main.go +++ b/backend/cmd/sink/main.go @@ -10,7 +10,7 @@ import ( "openreplay/backend/internal/config/sink" "openreplay/backend/internal/sink/assetscache" - "openreplay/backend/internal/sink/oswriter" + "openreplay/backend/internal/sink/sessionwriter" "openreplay/backend/internal/storage" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/monitoring" @@ -32,7 +32,7 @@ func main() { log.Fatalf("%v doesn't exist. %v", cfg.FsDir, err) } - writer := oswriter.NewWriter(cfg.FsUlimit, cfg.FsDir) + writer := sessionwriter.NewWriter(cfg.FsUlimit, cfg.FsDir) producer := queue.NewProducer(cfg.MessageSizeLimit, true) defer producer.Close(cfg.ProducerCloseTimeout) @@ -63,9 +63,7 @@ func main() { if err := producer.Produce(cfg.TopicTrigger, msg.SessionID(), msg.Encode()); err != nil { log.Printf("can't send SessionEnd to trigger topic: %s; sessID: %d", err, msg.SessionID()) } - if err := writer.Close(msg.SessionID()); err != nil { - log.Printf("can't close session file: %s", err) - } + writer.Close(msg.SessionID()) return } @@ -139,9 +137,9 @@ func main() { select { case sig := <-sigchan: log.Printf("Caught signal %v: terminating\n", sig) - if err := writer.CloseAll(); err != nil { - log.Printf("closeAll error: %v\n", err) - } + // Sync and stop writer + writer.Stop() + // Commit and stop consumer if err := consumer.Commit(); err != nil { log.Printf("can't commit messages: %s", err) } @@ -149,16 +147,10 @@ func main() { os.Exit(0) case <-tick: counter.Print() - s := time.Now() - if err := writer.SyncAll(); err != nil { - log.Fatalf("sync error: %v\n", err) - } - dur := time.Now().Sub(s).Milliseconds() - s = time.Now() if err := consumer.Commit(); err != nil { log.Printf("can't commit messages: %s", err) } - log.Printf("sync: %d, commit: %d, writer: %s", dur, time.Now().Sub(s).Milliseconds(), writer.Info()) + log.Printf("writer: %s", writer.Info()) default: err := consumer.ConsumeNext() if err != nil { diff --git a/backend/internal/sink/oswriter/oswriter.go b/backend/internal/sink/oswriter/oswriter.go deleted file mode 100644 index ec42d7668..000000000 --- a/backend/internal/sink/oswriter/oswriter.go +++ /dev/null @@ -1,164 +0,0 @@ -package oswriter - -import ( - "fmt" - "math" - "os" - "strconv" - "time" -) - -type FileType int - -const ( - DOM FileType = 1 - DEV FileType = 2 -) - -type Writer struct { - ulimit int - dir string - files map[uint64]*os.File - devtools map[uint64]*os.File - atimes map[uint64]int64 -} - -func NewWriter(ulimit uint16, dir string) *Writer { - return &Writer{ - ulimit: int(ulimit), - dir: dir + "/", - files: make(map[uint64]*os.File, 1024), - devtools: make(map[uint64]*os.File, 1024), - atimes: make(map[uint64]int64, 1024), - } -} - -func (w *Writer) open(key uint64, mode FileType) (*os.File, error) { - if mode == DOM { - file, ok := w.files[key] - if ok { - return file, nil - } - } else { - file, ok := w.devtools[key] - if ok { - return file, nil - } - } - - if len(w.atimes) >= w.ulimit { - var m_k uint64 - var m_t int64 = math.MaxInt64 - for k, t := range w.atimes { - if t < m_t { - m_k = k - m_t = t - } - } - if err := w.Close(m_k); err != nil { - return nil, err - } - } - filePath := w.dir + strconv.FormatUint(key, 10) - if mode == DEV { - filePath += "devtools" - } - file, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) - if err != nil { - return nil, err - } - if mode == DOM { - w.files[key] = file - } else { - w.devtools[key] = file - } - w.atimes[key] = time.Now().Unix() - return file, nil -} - -func (w *Writer) Close(key uint64) error { - // Close dom file - file := w.files[key] - if file == nil { - return nil - } - if err := file.Sync(); err != nil { - return err - } - if err := file.Close(); err != nil { - return err - } - delete(w.files, key) - delete(w.atimes, key) - // Close dev file - file = w.devtools[key] - if file == nil { - return nil - } - if err := file.Sync(); err != nil { - return err - } - if err := file.Close(); err != nil { - return err - } - delete(w.devtools, key) - return nil -} - -func (w *Writer) WriteDOM(key uint64, data []byte) error { - return w.Write(key, DOM, data) -} - -func (w *Writer) WriteDEV(key uint64, data []byte) error { - return w.Write(key, DEV, data) -} - -func (w *Writer) Write(key uint64, mode FileType, data []byte) error { - file, err := w.open(key, mode) - if err != nil { - return err - } - _, err = file.Write(data) - return err -} - -func (w *Writer) SyncAll() error { - for _, file := range w.files { - if err := file.Sync(); err != nil { - return err - } - } - for _, file := range w.devtools { - if err := file.Sync(); err != nil { - return err - } - } - return nil -} - -func (w *Writer) CloseAll() error { - for _, file := range w.files { - if err := file.Sync(); err != nil { - return err - } - if err := file.Close(); err != nil { - return err - } - } - w.files = nil - for _, file := range w.devtools { - if err := file.Sync(); err != nil { - return err - } - if err := file.Close(); err != nil { - return err - } - } - w.devtools = nil - w.atimes = nil - return nil -} - -func (w *Writer) Info() string { - return fmt.Sprintf("dom: %d, dev: %d", len(w.files), len(w.devtools)) -} diff --git a/backend/internal/sink/sessionwriter/session.go b/backend/internal/sink/sessionwriter/session.go new file mode 100644 index 000000000..110da588e --- /dev/null +++ b/backend/internal/sink/sessionwriter/session.go @@ -0,0 +1,73 @@ +package sessionwriter + +import ( + "fmt" + "os" + "strconv" + "sync" +) + +type Session struct { + lock *sync.Mutex + dom *os.File + dev *os.File +} + +func NewSession(dir string, id uint64) (*Session, error) { + if id == 0 { + return nil, fmt.Errorf("wrong session id") + } + + filePath := dir + strconv.FormatUint(id, 10) + domFile, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) + if err != nil { + return nil, err + } + filePath += "devtools" + devFile, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644) + if err != nil { + domFile.Close() // should close first file descriptor + return nil, err + } + + return &Session{ + lock: &sync.Mutex{}, + dom: domFile, + dev: devFile, + }, nil +} + +func (s *Session) Lock() { + s.lock.Lock() +} + +func (s *Session) Unlock() { + s.lock.Unlock() +} + +func (s *Session) Write(mode FileType, data []byte) (err error) { + if mode == DOM { + _, err = s.dom.Write(data) + } else { + _, err = s.dev.Write(data) + } + return err +} + +func (s *Session) Sync() error { + domErr := s.dom.Sync() + devErr := s.dev.Sync() + if domErr == nil && devErr == nil { + return nil + } + return fmt.Errorf("dom: %s, dev: %s", domErr, devErr) +} + +func (s *Session) Close() error { + domErr := s.dom.Close() + devErr := s.dev.Close() + if domErr == nil && devErr == nil { + return nil + } + return fmt.Errorf("dom: %s, dev: %s", domErr, devErr) +} diff --git a/backend/internal/sink/sessionwriter/types.go b/backend/internal/sink/sessionwriter/types.go new file mode 100644 index 000000000..a20f61375 --- /dev/null +++ b/backend/internal/sink/sessionwriter/types.go @@ -0,0 +1,8 @@ +package sessionwriter + +type FileType int + +const ( + DOM FileType = 1 + DEV FileType = 2 +) diff --git a/backend/internal/sink/sessionwriter/writer.go b/backend/internal/sink/sessionwriter/writer.go new file mode 100644 index 000000000..1883b4c40 --- /dev/null +++ b/backend/internal/sink/sessionwriter/writer.go @@ -0,0 +1,157 @@ +package sessionwriter + +import ( + "fmt" + "log" + "math" + "sync" + "time" +) + +type SessionWriter struct { + ulimit int + dir string + lock *sync.Mutex + sessions *sync.Map + meta map[uint64]int64 + count int + done chan struct{} + stopped chan struct{} +} + +func NewWriter(ulimit uint16, dir string) *SessionWriter { + w := &SessionWriter{ + ulimit: int(ulimit), + dir: dir + "/", + lock: &sync.Mutex{}, + sessions: &sync.Map{}, + meta: make(map[uint64]int64, ulimit), + done: make(chan struct{}), + stopped: make(chan struct{}), + } + go w.synchronizer() + return w +} + +func (w *SessionWriter) WriteDOM(sid uint64, data []byte) error { + return w.write(sid, DOM, data) +} + +func (w *SessionWriter) WriteDEV(sid uint64, data []byte) error { + return w.write(sid, DEV, data) +} + +func (w *SessionWriter) Close(sid uint64) { + w.close(sid) +} + +func (w *SessionWriter) Stop() { + w.done <- struct{}{} + <-w.stopped +} + +func (w *SessionWriter) Info() string { + w.lock.Lock() + count := w.count + w.lock.Unlock() + return fmt.Sprintf("%d files", count) +} + +func (w *SessionWriter) write(sid uint64, mode FileType, data []byte) error { + var ( + sess *Session + err error + ) + + sessObj, ok := w.sessions.Load(sid) + if !ok { + sess, err = NewSession(w.dir, sid) + if err != nil { + return fmt.Errorf("can't write to session: %d, err: %s", sid, err) + } + sess.Lock() + defer sess.Unlock() + w.sessions.Store(sid, sess) + + // Check opened files limit + w.meta[sid] = time.Now().Unix() + if len(w.meta) >= w.ulimit { + var oldSessID uint64 + var minTimestamp int64 = math.MaxInt64 + for sessID, timestamp := range w.meta { + if timestamp < minTimestamp { + oldSessID = sessID + minTimestamp = timestamp + } + } + delete(w.meta, oldSessID) + if err := w.close(oldSessID); err != nil { + log.Printf("can't close session: %s", err) + } + } + } else { + sess = sessObj.(*Session) + sess.Lock() + defer sess.Unlock() + } + + // Update info + w.lock.Lock() + w.count = len(w.meta) + w.lock.Unlock() + + // Write data to session + return sess.Write(mode, data) +} + +func (w *SessionWriter) sync(sid uint64) error { + sessObj, ok := w.sessions.Load(sid) + if !ok { + return fmt.Errorf("can't sync, session: %d not found", sid) + } + sess := sessObj.(*Session) + sess.Lock() + defer sess.Unlock() + + return sess.Sync() +} + +func (w *SessionWriter) close(sid uint64) error { + sessObj, ok := w.sessions.LoadAndDelete(sid) + if !ok { + return fmt.Errorf("can't close, session: %d not found", sid) + } + sess := sessObj.(*Session) + sess.Lock() + defer sess.Unlock() + + if err := sess.Sync(); err != nil { + log.Printf("can't sync session: %d, err: %s", sid, err) + } + err := sess.Close() + return err +} + +func (w *SessionWriter) synchronizer() { + tick := time.Tick(2 * time.Second) + for { + select { + case <-tick: + w.sessions.Range(func(sid, lockObj any) bool { + if err := w.sync(sid.(uint64)); err != nil { + log.Printf("can't sync file descriptor: %s", err) + } + return true + }) + case <-w.done: + w.sessions.Range(func(sid, lockObj any) bool { + if err := w.close(sid.(uint64)); err != nil { + log.Printf("can't close file descriptor: %s", err) + } + return true + }) + w.stopped <- struct{}{} + return + } + } +} From e67c3ec876209068e410cbfc35b41e8dc9e81098 Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 22 Nov 2022 11:53:21 +0100 Subject: [PATCH 53/70] [Sink] Zombie session killer feature (#829) * feat(backend): added zombie session killer feature --- backend/cmd/sink/main.go | 2 +- backend/internal/config/sink/config.go | 3 +- .../internal/sink/sessionwriter/session.go | 20 +++-- backend/internal/sink/sessionwriter/writer.go | 76 ++++++++++++------- 4 files changed, 66 insertions(+), 35 deletions(-) diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go index d6ebc0abc..84520dd33 100644 --- a/backend/cmd/sink/main.go +++ b/backend/cmd/sink/main.go @@ -32,7 +32,7 @@ func main() { log.Fatalf("%v doesn't exist. %v", cfg.FsDir, err) } - writer := sessionwriter.NewWriter(cfg.FsUlimit, cfg.FsDir) + writer := sessionwriter.NewWriter(cfg.FsUlimit, cfg.FsDir, cfg.DeadSessionTimeout) producer := queue.NewProducer(cfg.MessageSizeLimit, true) defer producer.Close(cfg.ProducerCloseTimeout) diff --git a/backend/internal/config/sink/config.go b/backend/internal/config/sink/config.go index a7481f93a..a8703a596 100644 --- a/backend/internal/config/sink/config.go +++ b/backend/internal/config/sink/config.go @@ -9,6 +9,7 @@ type Config struct { common.Config FsDir string `env:"FS_DIR,required"` FsUlimit uint16 `env:"FS_ULIMIT,required"` + DeadSessionTimeout int64 `env:"DEAD_SESSION_TIMEOUT,default=600"` GroupSink string `env:"GROUP_SINK,required"` TopicRawWeb string `env:"TOPIC_RAW_WEB,required"` TopicRawIOS string `env:"TOPIC_RAW_IOS,required"` @@ -17,7 +18,7 @@ type Config struct { CacheAssets bool `env:"CACHE_ASSETS,required"` AssetsOrigin string `env:"ASSETS_ORIGIN,required"` ProducerCloseTimeout int `env:"PRODUCER_CLOSE_TIMEOUT,default=15000"` - CacheThreshold int64 `env:"CACHE_THRESHOLD,default=75"` + CacheThreshold int64 `env:"CACHE_THRESHOLD,default=5"` CacheExpiration int64 `env:"CACHE_EXPIRATION,default=120"` } diff --git a/backend/internal/sink/sessionwriter/session.go b/backend/internal/sink/sessionwriter/session.go index 110da588e..f107c387b 100644 --- a/backend/internal/sink/sessionwriter/session.go +++ b/backend/internal/sink/sessionwriter/session.go @@ -5,12 +5,14 @@ import ( "os" "strconv" "sync" + "time" ) type Session struct { - lock *sync.Mutex - dom *os.File - dev *os.File + lock *sync.Mutex + dom *os.File + dev *os.File + lastUpdate time.Time } func NewSession(dir string, id uint64) (*Session, error) { @@ -31,9 +33,10 @@ func NewSession(dir string, id uint64) (*Session, error) { } return &Session{ - lock: &sync.Mutex{}, - dom: domFile, - dev: devFile, + lock: &sync.Mutex{}, + dom: domFile, + dev: devFile, + lastUpdate: time.Now(), }, nil } @@ -51,9 +54,14 @@ func (s *Session) Write(mode FileType, data []byte) (err error) { } else { _, err = s.dev.Write(data) } + s.lastUpdate = time.Now() return err } +func (s *Session) LastUpdate() time.Time { + return s.lastUpdate +} + func (s *Session) Sync() error { domErr := s.dom.Sync() devErr := s.dev.Sync() diff --git a/backend/internal/sink/sessionwriter/writer.go b/backend/internal/sink/sessionwriter/writer.go index 1883b4c40..94ff5dd66 100644 --- a/backend/internal/sink/sessionwriter/writer.go +++ b/backend/internal/sink/sessionwriter/writer.go @@ -9,25 +9,26 @@ import ( ) type SessionWriter struct { - ulimit int - dir string - lock *sync.Mutex - sessions *sync.Map - meta map[uint64]int64 - count int - done chan struct{} - stopped chan struct{} + ulimit int + dir string + zombieSessionTimeout float64 + lock *sync.Mutex + sessions *sync.Map + meta map[uint64]int64 + done chan struct{} + stopped chan struct{} } -func NewWriter(ulimit uint16, dir string) *SessionWriter { +func NewWriter(ulimit uint16, dir string, zombieSessionTimeout int64) *SessionWriter { w := &SessionWriter{ - ulimit: int(ulimit), - dir: dir + "/", - lock: &sync.Mutex{}, - sessions: &sync.Map{}, - meta: make(map[uint64]int64, ulimit), - done: make(chan struct{}), - stopped: make(chan struct{}), + ulimit: int(ulimit) / 2, // should divide by 2 because each session has 2 files + dir: dir + "/", + zombieSessionTimeout: float64(zombieSessionTimeout), + lock: &sync.Mutex{}, + sessions: &sync.Map{}, + meta: make(map[uint64]int64, ulimit), + done: make(chan struct{}), + stopped: make(chan struct{}), } go w.synchronizer() return w @@ -51,10 +52,25 @@ func (w *SessionWriter) Stop() { } func (w *SessionWriter) Info() string { + return fmt.Sprintf("%d sessions", w.numberOfSessions()) +} + +func (w *SessionWriter) addSession(sid uint64) { w.lock.Lock() - count := w.count + w.meta[sid] = time.Now().Unix() w.lock.Unlock() - return fmt.Sprintf("%d files", count) +} + +func (w *SessionWriter) deleteSession(sid uint64) { + w.lock.Lock() + delete(w.meta, sid) + w.lock.Unlock() +} + +func (w *SessionWriter) numberOfSessions() int { + w.lock.Lock() + defer w.lock.Unlock() + return len(w.meta) } func (w *SessionWriter) write(sid uint64, mode FileType, data []byte) error { @@ -71,10 +87,8 @@ func (w *SessionWriter) write(sid uint64, mode FileType, data []byte) error { } sess.Lock() defer sess.Unlock() - w.sessions.Store(sid, sess) // Check opened files limit - w.meta[sid] = time.Now().Unix() if len(w.meta) >= w.ulimit { var oldSessID uint64 var minTimestamp int64 = math.MaxInt64 @@ -84,22 +98,20 @@ func (w *SessionWriter) write(sid uint64, mode FileType, data []byte) error { minTimestamp = timestamp } } - delete(w.meta, oldSessID) if err := w.close(oldSessID); err != nil { log.Printf("can't close session: %s", err) } } + + // Add new session to manager + w.sessions.Store(sid, sess) + w.addSession(sid) } else { sess = sessObj.(*Session) sess.Lock() defer sess.Unlock() } - // Update info - w.lock.Lock() - w.count = len(w.meta) - w.lock.Unlock() - // Write data to session return sess.Write(mode, data) } @@ -113,7 +125,16 @@ func (w *SessionWriter) sync(sid uint64) error { sess.Lock() defer sess.Unlock() - return sess.Sync() + err := sess.Sync() + if time.Now().Sub(sess.LastUpdate()).Seconds() > w.zombieSessionTimeout { + if err != nil { + log.Printf("can't sync session: %d, err: %s", sid, err) + } + // Close "zombie" session + err = sess.Close() + w.deleteSession(sid) + } + return err } func (w *SessionWriter) close(sid uint64) error { @@ -129,6 +150,7 @@ func (w *SessionWriter) close(sid uint64) error { log.Printf("can't sync session: %d, err: %s", sid, err) } err := sess.Close() + w.deleteSession(sid) return err } From 5d37d2da1eff49ec6d4ae31c8e1a776a02f3544a Mon Sep 17 00:00:00 2001 From: Dayan Graham Date: Tue, 22 Nov 2022 12:10:01 +0100 Subject: [PATCH 54/70] =?UTF-8?q?feat(backend):=20enable=20Kerberos=20auth?= =?UTF-8?q?entication=20for=20Kafka=20communication=E2=80=A6=20(#807)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(backend): enable Kerberos authentication for Kafka communications between services for EnterpriseEdition * feat(backend): put default value for KAFKA_USE_KERBEROS * feat(backend): Add Kerberos auth for Kafka - Update with comments for the envvars that configure the Kerberos auth --- backend/Dockerfile | 18 +++++++++++++++--- backend/Dockerfile.bundle | 18 +++++++++++++++--- ee/backend/pkg/kafka/consumer.go | 10 ++++++++++ ee/backend/pkg/kafka/producer.go | 9 +++++++++ 4 files changed, 49 insertions(+), 6 deletions(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index 4e0064e9d..0d7cad075 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,6 +1,6 @@ FROM golang:1.18-alpine3.15 AS prepare -RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash +RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5 WORKDIR /root @@ -15,11 +15,11 @@ COPY pkg pkg COPY internal internal ARG SERVICE_NAME -RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags musl openreplay/backend/cmd/$SERVICE_NAME +RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags dynamic openreplay/backend/cmd/$SERVICE_NAME FROM alpine AS entrypoint -RUN apk add --no-cache ca-certificates +RUN apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5 RUN adduser -u 1001 openreplay -D ENV TZ=UTC \ @@ -29,6 +29,18 @@ ENV TZ=UTC \ UAPARSER_FILE=/home/openreplay/regexes.yaml \ HTTP_PORT=8080 \ KAFKA_USE_SSL=true \ + # KAFKA_USE_KERBEROS should be set true if you wish to use Kerberos auth for Kafka + KAFKA_USE_KERBEROS=false \ + # KERBEROS_SERVICE_NAME is the primary name of the Brokers configured in the Broker JAAS file + KERBEROS_SERVICE_NAME="" \ + # KERBEROS_PRINCIPAL is this client's principal name + KERBEROS_PRINCIPAL="" \ + # KERBEROS_PRINCIPAL is the absolute path to the keytab to be used for authentication + KERBEROS_KEYTAB_LOCATION="" \ + # KAFKA_SSL_KEY is the absolute path to the CA cert for verifying the broker's key + KAFKA_SSL_KEY="" \ + # KAFKA_SSL_CERT is a CA cert string (PEM format) for verifying the broker's key + KAFKA_SSL_CERT="" \ KAFKA_MAX_POLL_INTERVAL_MS=400000 \ REDIS_STREAMS_MAX_LEN=10000 \ TOPIC_RAW_WEB=raw \ diff --git a/backend/Dockerfile.bundle b/backend/Dockerfile.bundle index 407a7b9d8..19c3b325c 100644 --- a/backend/Dockerfile.bundle +++ b/backend/Dockerfile.bundle @@ -1,6 +1,6 @@ FROM golang:1.18-alpine3.15 AS prepare -RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash +RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash librdkafka-dev cyrus-sasl-gssapi cyrus-sasl-devel WORKDIR /root @@ -14,11 +14,11 @@ COPY cmd cmd COPY pkg pkg COPY internal internal -RUN for name in assets db ender http integrations sink storage;do CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o bin/$name -tags musl openreplay/backend/cmd/$name; done +RUN for name in assets db ender http integrations sink storage;do CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o bin/$name -tags dynamic openreplay/backend/cmd/$name; done FROM alpine AS entrypoint #FROM pygmy/alpine-tini:latest -RUN apk add --no-cache ca-certificates +RUN apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl-gssapi cyrus-sasl-devel pkgconf ENV TZ=UTC \ FS_ULIMIT=1000 \ @@ -28,6 +28,18 @@ ENV TZ=UTC \ HTTP_PORT=80 \ BEACON_SIZE_LIMIT=7000000 \ KAFKA_USE_SSL=true \ + # KAFKA_USE_KERBEROS should be set true if you wish to use Kerberos auth for Kafka + KAFKA_USE_KERBEROS=false \ + # KERBEROS_SERVICE_NAME is the primary name of the Brokers configured in the Broker JAAS file + KERBEROS_SERVICE_NAME="" \ + # KERBEROS_PRINCIPAL is this client's principal name + KERBEROS_PRINCIPAL="" \ + # KERBEROS_PRINCIPAL is the absolute path to the keytab to be used for authentication + KERBEROS_KEYTAB_LOCATION="" \ + # KAFKA_SSL_KEY is the absolute path to the CA cert for verifying the broker's key + KAFKA_SSL_KEY="" \ + # KAFKA_SSL_CERT is a CA cert string (PEM format) for verifying the broker's key + KAFKA_SSL_CERT="" \ KAFKA_MAX_POLL_INTERVAL_MS=400000 \ REDIS_STREAMS_MAX_LEN=3000 \ TOPIC_RAW_WEB=raw \ diff --git a/ee/backend/pkg/kafka/consumer.go b/ee/backend/pkg/kafka/consumer.go index b951fcd9c..14f8d5a68 100644 --- a/ee/backend/pkg/kafka/consumer.go +++ b/ee/backend/pkg/kafka/consumer.go @@ -47,6 +47,16 @@ func NewConsumer( kafkaConfig.SetKey("ssl.key.location", os.Getenv("KAFKA_SSL_KEY")) kafkaConfig.SetKey("ssl.certificate.location", os.Getenv("KAFKA_SSL_CERT")) } + + // Apply Kerberos configuration + if env.Bool("KAFKA_USE_KERBEROS") { + kafkaConfig.SetKey("security.protocol", "sasl_plaintext") + kafkaConfig.SetKey("sasl.mechanisms", "GSSAPI") + kafkaConfig.SetKey("sasl.kerberos.service.name", os.Getenv("KERBEROS_SERVICE_NAME")) + kafkaConfig.SetKey("sasl.kerberos.principal", os.Getenv("KERBEROS_PRINCIPAL")) + kafkaConfig.SetKey("sasl.kerberos.keytab", os.Getenv("KERBEROS_KEYTAB_LOCATION")) + } + c, err := kafka.NewConsumer(kafkaConfig) if err != nil { log.Fatalln(err) diff --git a/ee/backend/pkg/kafka/producer.go b/ee/backend/pkg/kafka/producer.go index 6fb893b7a..f895241a7 100644 --- a/ee/backend/pkg/kafka/producer.go +++ b/ee/backend/pkg/kafka/producer.go @@ -30,6 +30,15 @@ func NewProducer(messageSizeLimit int, useBatch bool) *Producer { kafkaConfig.SetKey("ssl.key.location", os.Getenv("KAFKA_SSL_KEY")) kafkaConfig.SetKey("ssl.certificate.location", os.Getenv("KAFKA_SSL_CERT")) } + // Apply Kerberos configuration + if env.Bool("KAFKA_USE_KERBEROS") { + kafkaConfig.SetKey("security.protocol", "sasl_plaintext") + kafkaConfig.SetKey("sasl.mechanisms", "GSSAPI") + kafkaConfig.SetKey("sasl.kerberos.service.name", os.Getenv("KERBEROS_SERVICE_NAME")) + kafkaConfig.SetKey("sasl.kerberos.principal", os.Getenv("KERBEROS_PRINCIPAL")) + kafkaConfig.SetKey("sasl.kerberos.keytab", os.Getenv("KERBEROS_KEYTAB_LOCATION")) + } + producer, err := kafka.NewProducer(kafkaConfig) if err != nil { log.Fatalln(err) From 1b5b04d122a82c5a6439da69aeb10776cce704b5 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 22 Nov 2022 12:38:12 +0100 Subject: [PATCH 55/70] chore(actions): skip nothing to build Signed-off-by: rjshrjndrn --- .github/workflows/workers-ee.yaml | 7 ++++++- .github/workflows/workers.yaml | 8 +++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/.github/workflows/workers-ee.yaml b/.github/workflows/workers-ee.yaml index b2b202e93..e434d2716 100644 --- a/.github/workflows/workers-ee.yaml +++ b/.github/workflows/workers-ee.yaml @@ -86,7 +86,11 @@ jobs: ;; esac - [[ $(cat /tmp/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) + if [[ $(cat /tmp/images_to_build.txt) == "" ]]; then + echo "Nothing to build here" + touch /tmp/nothing-to-build-here + exit 0 + fi # # Pushing image to registry # @@ -118,6 +122,7 @@ jobs: # Deploying image to environment. # set -x + [[ -f /tmp/nothing-to-build-here ]] && exit 0 cd scripts/helmcharts/ ## Update secerts diff --git a/.github/workflows/workers.yaml b/.github/workflows/workers.yaml index 4283adef2..e222e00fb 100644 --- a/.github/workflows/workers.yaml +++ b/.github/workflows/workers.yaml @@ -86,7 +86,11 @@ jobs: ;; esac - [[ $(cat /tmp/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0) + if [[ $(cat /tmp/images_to_build.txt) == "" ]]; then + echo "Nothing to build here" + touch /tmp/nothing-to-build-here + exit 0 + fi # # Pushing image to registry # @@ -116,6 +120,8 @@ jobs: # # Deploying image to environment. # + set -x + [[ -f /tmp/nothing-to-build-here ]] && exit 0 cd scripts/helmcharts/ ## Update secerts From abe780cb971b87de963da63a6d16d5cf3571e37f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 22 Nov 2022 15:12:21 +0100 Subject: [PATCH 56/70] feat(chalice): upgraded dependencies feat(alerts): upgraded dependencies feat(crons): upgraded dependencies --- api/requirements-alerts.txt | 10 +++++----- api/requirements.txt | 10 +++++----- ee/api/requirements-alerts.txt | 10 +++++----- ee/api/requirements-crons.txt | 10 +++++----- ee/api/requirements.txt | 10 +++++----- 5 files changed, 25 insertions(+), 25 deletions(-) diff --git a/api/requirements-alerts.txt b/api/requirements-alerts.txt index b30e65988..ff36f3099 100644 --- a/api/requirements-alerts.txt +++ b/api/requirements-alerts.txt @@ -1,15 +1,15 @@ requests==2.28.1 urllib3==1.26.12 -boto3==1.26.4 +boto3==1.26.14 pyjwt==2.6.0 psycopg2-binary==2.9.5 -elasticsearch==8.5.0 +elasticsearch==8.5.1 jira==3.4.1 -fastapi==0.86.0 -uvicorn[standard]==0.19.0 +fastapi==0.87.0 +uvicorn[standard]==0.20.0 python-decouple==3.6 pydantic[email]==1.10.2 -apscheduler==3.9.1 \ No newline at end of file +apscheduler==3.9.1.post1 \ No newline at end of file diff --git a/api/requirements.txt b/api/requirements.txt index b30e65988..ff36f3099 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,15 +1,15 @@ requests==2.28.1 urllib3==1.26.12 -boto3==1.26.4 +boto3==1.26.14 pyjwt==2.6.0 psycopg2-binary==2.9.5 -elasticsearch==8.5.0 +elasticsearch==8.5.1 jira==3.4.1 -fastapi==0.86.0 -uvicorn[standard]==0.19.0 +fastapi==0.87.0 +uvicorn[standard]==0.20.0 python-decouple==3.6 pydantic[email]==1.10.2 -apscheduler==3.9.1 \ No newline at end of file +apscheduler==3.9.1.post1 \ No newline at end of file diff --git a/ee/api/requirements-alerts.txt b/ee/api/requirements-alerts.txt index fce0ba6cc..02042a778 100644 --- a/ee/api/requirements-alerts.txt +++ b/ee/api/requirements-alerts.txt @@ -1,18 +1,18 @@ requests==2.28.1 urllib3==1.26.12 -boto3==1.26.4 +boto3==1.26.14 pyjwt==2.6.0 psycopg2-binary==2.9.5 -elasticsearch==8.5.0 +elasticsearch==8.5.1 jira==3.4.1 -fastapi==0.86.0 -uvicorn[standard]==0.19.0 +fastapi==0.87.0 +uvicorn[standard]==0.20.0 python-decouple==3.6 pydantic[email]==1.10.2 -apscheduler==3.9.1 +apscheduler==3.9.1.post1 clickhouse-driver==0.2.4 python-multipart==0.0.5 \ No newline at end of file diff --git a/ee/api/requirements-crons.txt b/ee/api/requirements-crons.txt index fce0ba6cc..02042a778 100644 --- a/ee/api/requirements-crons.txt +++ b/ee/api/requirements-crons.txt @@ -1,18 +1,18 @@ requests==2.28.1 urllib3==1.26.12 -boto3==1.26.4 +boto3==1.26.14 pyjwt==2.6.0 psycopg2-binary==2.9.5 -elasticsearch==8.5.0 +elasticsearch==8.5.1 jira==3.4.1 -fastapi==0.86.0 -uvicorn[standard]==0.19.0 +fastapi==0.87.0 +uvicorn[standard]==0.20.0 python-decouple==3.6 pydantic[email]==1.10.2 -apscheduler==3.9.1 +apscheduler==3.9.1.post1 clickhouse-driver==0.2.4 python-multipart==0.0.5 \ No newline at end of file diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 23fc32fe7..ac4f27a9d 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,18 +1,18 @@ requests==2.28.1 urllib3==1.26.12 -boto3==1.26.4 +boto3==1.26.14 pyjwt==2.6.0 psycopg2-binary==2.9.5 -elasticsearch==8.5.0 +elasticsearch==8.5.1 jira==3.4.1 -fastapi==0.86.0 -uvicorn[standard]==0.19.0 +fastapi==0.87.0 +uvicorn[standard]==0.20.0 python-decouple==3.6 pydantic[email]==1.10.2 -apscheduler==3.9.1 +apscheduler==3.9.1.post1 clickhouse-driver==0.2.4 python3-saml==1.14.0 From a2a956b4d454ae3cf74e2a7cd35e80ce96dba02d Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Tue, 22 Nov 2022 19:10:17 +0100 Subject: [PATCH 57/70] change(ui) - wip --- .../DevTools/BottomBlock/BottomBlock.js | 24 +- .../DevTools/ConsolePanel/ConsolePanel.tsx | 1 - .../shared/DevTools/JumpButton/JumpButton.tsx | 4 +- .../DevTools/NetworkPanel/NetworkPanel.tsx | 208 ++++++++++-------- .../shared/DevTools/TimeTable/TimeTable.tsx | 73 +++--- frontend/app/mstore/sessionStore.ts | 140 +++++++----- .../MessageDistributor/MessageDistributor.ts | 1 - frontend/app/styles/general.css | 4 + frontend/package.json | 1 + 9 files changed, 267 insertions(+), 189 deletions(-) diff --git a/frontend/app/components/shared/DevTools/BottomBlock/BottomBlock.js b/frontend/app/components/shared/DevTools/BottomBlock/BottomBlock.js index 069757e60..8b7826755 100644 --- a/frontend/app/components/shared/DevTools/BottomBlock/BottomBlock.js +++ b/frontend/app/components/shared/DevTools/BottomBlock/BottomBlock.js @@ -1,17 +1,29 @@ -import React from 'react'; +import React, { useEffect } from 'react'; import cn from 'classnames'; import stl from './bottomBlock.module.css'; +let timer = null; const BottomBlock = ({ children = null, className = '', additionalHeight = 0, + onMouseEnter = () => {}, + onMouseLeave = () => {}, ...props -}) => ( -
- { children } -
-); +}) => { + useEffect(() => {}, []); + + return ( +
+ {children} +
+ ); +}; BottomBlock.displayName = 'BottomBlock'; diff --git a/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx b/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx index 320f76341..8f5835cfa 100644 --- a/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx +++ b/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx @@ -4,7 +4,6 @@ import Log from 'Types/session/log'; import BottomBlock from '../BottomBlock'; import { LEVEL } from 'Types/session/log'; import { Tabs, Input, Icon, NoContent } from 'UI'; -// import Autoscroll from 'App/components/Session_/Autoscroll'; import cn from 'classnames'; import ConsoleRow from '../ConsoleRow'; import { getRE } from 'App/utils'; diff --git a/frontend/app/components/shared/DevTools/JumpButton/JumpButton.tsx b/frontend/app/components/shared/DevTools/JumpButton/JumpButton.tsx index c52b0cffd..31307fd9b 100644 --- a/frontend/app/components/shared/DevTools/JumpButton/JumpButton.tsx +++ b/frontend/app/components/shared/DevTools/JumpButton/JumpButton.tsx @@ -6,10 +6,10 @@ interface Props { tooltip?: string; } function JumpButton(props: Props) { - const { tooltip = '' } = props; + const { tooltip } = props; return (
- +
{ diff --git a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx index e62bf25ff..8c087d868 100644 --- a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx +++ b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx @@ -1,5 +1,5 @@ -import React, { useState } from 'react'; -import { QuestionMarkHint, Tooltip, Tabs, Input, NoContent, Icon, Toggler, Button } from 'UI'; +import React, { useEffect, useMemo, useRef, useState } from 'react'; +import { Tooltip, Tabs, Input, NoContent, Icon, Toggler } from 'UI'; import { getRE } from 'App/utils'; import Resource, { TYPES } from 'Types/session/resource'; import { formatBytes } from 'App/utils'; @@ -12,6 +12,11 @@ import { Duration } from 'luxon'; import { connectPlayer, jump } from 'Player'; import { useModal } from 'App/components/Modal'; import FetchDetailsModal from 'Shared/FetchDetailsModal'; +import { useStore } from 'App/mstore'; +import { useObserver } from 'mobx-react-lite'; + +const INDEX_KEY = 'networkIndex'; +const INDEX_KEY_ACTIVE = 'networkActive'; const ALL = 'ALL'; const XHR = 'xhr'; @@ -67,37 +72,6 @@ export function renderStart(r: any) { ); } -// const renderXHRText = () => ( -// -// {XHR} -// -// Use our{' '} -// -// Fetch plugin -// -// {' to capture HTTP requests and responses, including status codes and bodies.'}
-// We also provide{' '} -// -// support for GraphQL -// -// {' for easy debugging of your queries.'} -// -// } -// className="ml-1" -// /> -//
-// ); - function renderSize(r: any) { if (r.responseBodySize) return formatBytes(r.responseBodySize); let triggerText; @@ -160,45 +134,76 @@ interface Props { loadTime: any; playing: boolean; domBuildingTime: any; - currentIndex: any; time: any; } function NetworkPanel(props: Props) { - const { - resources, - time, - currentIndex, - domContentLoadedTime, - loadTime, - playing, - domBuildingTime, - fetchList, - } = props; - const { showModal, hideModal } = useModal(); + const { resources, time, domContentLoadedTime, loadTime, domBuildingTime, fetchList } = props; + const { showModal } = useModal(); const [activeTab, setActiveTab] = useState(ALL); const [sortBy, setSortBy] = useState('time'); const [sortAscending, setSortAscending] = useState(true); const [filter, setFilter] = useState(''); + const [filteredList, setFilteredList] = useState([]); const [showOnlyErrors, setShowOnlyErrors] = useState(false); - const [activeRequest, setActiveRequest] = useState(false ) const onTabClick = (activeTab: any) => setActiveTab(activeTab); const onFilterChange = ({ target: { value } }: any) => setFilter(value); const additionalHeight = 0; const fetchPresented = fetchList.length > 0; + const { + sessionStore: { devTools }, + } = useStore(); - const resourcesSize = resources.reduce( - (sum: any, { decodedBodySize }: any) => sum + (decodedBodySize || 0), - 0 - ); + const activeIndex = useObserver(() => devTools[INDEX_KEY]); + const activeClick = useObserver(() => devTools[INDEX_KEY_ACTIVE]); + const [pauseSync, setPauseSync] = useState(!!activeClick); + const synRef: any = useRef({}); - const transferredSize = resources.reduce( - (sum: any, { headerSize, encodedBodySize }: any) => - sum + (headerSize || 0) + (encodedBodySize || 0), - 0 - ); + synRef.current = { + pauseSync, + activeIndex, + activeClick, + }; - const filterRE = getRE(filter, 'i'); - let filtered = React.useMemo(() => { + useEffect(() => { + if (!!activeClick) { + setPauseSync(true); + devTools.update(INDEX_KEY, activeClick); + console.log('mounting at: ', activeClick); + } + return () => { + if (synRef.current.pauseSync) { + console.log('unmouting at: ', synRef.current.activeIndex); + devTools.update(INDEX_KEY_ACTIVE, synRef.current.activeIndex); + } + }; + }, []); + + useEffect(() => { + const lastIndex = filteredList.filter((item: any) => item.time <= time).length - 1; + if (lastIndex !== activeIndex && !pauseSync) { + devTools.update(INDEX_KEY, lastIndex); + } + }, [time]); + + const { resourcesSize, transferredSize } = useMemo(() => { + const resourcesSize = resources.reduce( + (sum: any, { decodedBodySize }: any) => sum + (decodedBodySize || 0), + 0 + ); + + const transferredSize = resources.reduce( + (sum: any, { headerSize, encodedBodySize }: any) => + sum + (headerSize || 0) + (encodedBodySize || 0), + 0 + ); + return { + resourcesSize, + transferredSize, + }; + }, [resources]); + + useEffect(() => { + const filterRE = getRE(filter, 'i'); let list = resources; fetchList.forEach( (fetchCall: any) => @@ -209,9 +214,9 @@ function NetworkPanel(props: Props) { return compare(a, b, sortBy); }); - if (!sortAscending) { - list = list.reverse(); - } + // if (!sortAscending) { + // list = list.reverse(); + // } list = list.filter( ({ type, name, status, success }: any) => @@ -219,41 +224,53 @@ function NetworkPanel(props: Props) { (activeTab === ALL || type === TAB_TO_TYPE_MAP[activeTab]) && (showOnlyErrors ? parseInt(status) >= 400 || !success : true) ); - return list; - }, [filter, sortBy, sortAscending, showOnlyErrors, activeTab]); + setFilteredList(list); + }, [resources, filter, sortBy, sortAscending, showOnlyErrors, activeTab]); - // const lastIndex = currentIndex || filtered.filter((item: any) => item.time <= time).length - 1; - const referenceLines = []; - if (domContentLoadedTime != null) { - referenceLines.push({ - time: domContentLoadedTime.time, - color: DOM_LOADED_TIME_COLOR, - }); - } - if (loadTime != null) { - referenceLines.push({ - time: loadTime.time, - color: LOAD_TIME_COLOR, - }); - } + const referenceLines = useMemo(() => { + const arr = []; + + if (domContentLoadedTime != null) { + arr.push({ + time: domContentLoadedTime.time, + color: DOM_LOADED_TIME_COLOR, + }); + } + if (loadTime != null) { + arr.push({ + time: loadTime.time, + color: LOAD_TIME_COLOR, + }); + } + + return arr; + }, []); const onRowClick = (row: any) => { - showModal(, { - right: true, - }); + showModal( + , + { + right: true, + } + ); + devTools.update(INDEX_KEY, filteredList.indexOf(row)); + setPauseSync(true); }; const handleSort = (sortKey: string) => { if (sortKey === sortBy) { setSortAscending(!sortAscending); - // setSortBy('time'); } setSortBy(sortKey); }; return ( - + setPauseSync(true)} + >
Network @@ -287,7 +304,7 @@ function NetworkPanel(props: Props) { />
- + } size="small" - show={filtered.length === 0} + show={filteredList.length === 0} > { + setPauseSync(true); + devTools.update(INDEX_KEY, filteredList.indexOf(row)); + jump(row.time); + }} sortBy={sortBy} sortAscending={sortAscending} - // activeIndex={lastIndex} + activeIndex={activeIndex} > {[ // { @@ -348,28 +369,28 @@ function NetworkPanel(props: Props) { label: 'Status', dataKey: 'status', width: 70, - onClick: handleSort, + // onClick: handleSort, }, { label: 'Type', dataKey: 'type', width: 90, render: renderType, - onClick: handleSort, + // onClick: handleSort, }, { label: 'Name', width: 240, dataKey: 'name', render: renderName, - onClick: handleSort, + // onClick: handleSort, }, { label: 'Size', width: 80, dataKey: 'decodedBodySize', render: renderSize, - onClick: handleSort, + // onClick: handleSort, hidden: activeTab === XHR, }, { @@ -377,7 +398,7 @@ function NetworkPanel(props: Props) { width: 80, dataKey: 'duration', render: renderDuration, - onClick: handleSort, + // onClick: handleSort, }, ]} @@ -391,9 +412,12 @@ function NetworkPanel(props: Props) { export default connectPlayer((state: any) => ({ location: state.location, resources: state.resourceList, - fetchList: state.fetchList.map((i: any) => Resource({ ...i.toJS(), type: TYPES.XHR })), + fetchList: state.fetchList.map((i: any) => + Resource({ ...i.toJS(), type: TYPES.XHR, time: i.time < 0 ? 0 : i.time }) + ), domContentLoadedTime: state.domContentLoadedTime, loadTime: state.loadTime, + time: state.time, playing: state.playing, domBuildingTime: state.domBuildingTime, }))(NetworkPanel); diff --git a/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx b/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx index 2b242f331..1a224d968 100644 --- a/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx +++ b/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx @@ -145,8 +145,19 @@ export default class TimeTable extends React.PureComponent { scroller = React.createRef(); autoScroll = true; - componentDidMount() { - if (this.scroller.current) { + // componentDidMount() { + // if (this.scroller.current) { + // this.scroller.current.scrollToRow(this.props.activeIndex); + // } + // } + + adjustScroll(prevActiveIndex: number) { + if ( + this.props.activeIndex && + this.props.activeIndex >= 0 && + prevActiveIndex !== this.props.activeIndex && + this.scroller.current + ) { this.scroller.current.scrollToRow(this.props.activeIndex); } } @@ -161,14 +172,8 @@ export default class TimeTable extends React.PureComponent { ...computeTimeLine(this.props.rows, this.state.firstVisibleRowIndex, this.visibleCount), }); } - if ( - this.props.activeIndex && - this.props.activeIndex >= 0 && - prevProps.activeIndex !== this.props.activeIndex && - this.scroller.current - ) { - this.scroller.current.scrollToRow(this.props.activeIndex); - } + + // this.adjustScroll(prevProps.activeIndex); } onScroll = ({ @@ -190,7 +195,7 @@ export default class TimeTable extends React.PureComponent { onJump = (index: any) => { if (this.props.onJump) { - this.props.onJump(this.props.rows[index].time); + this.props.onJump(this.props.rows[index]); } }; @@ -203,23 +208,29 @@ export default class TimeTable extends React.PureComponent {
activeIndex, - })} + className={cn( + 'dev-row border-b border-color-gray-light-shade group items-center', + stl.row, + { + [stl.hoverable]: hoverable, + 'error color-red': !!row.isRed && row.isRed(), + 'cursor-pointer': typeof onRowClick === 'function', + [stl.activeRow]: activeIndex === index, + // [stl.inactiveRow]: !activeIndex || index > activeIndex, + } + )} onClick={typeof onRowClick === 'function' ? () => onRowClick(row, index) : undefined} id="table-row" > - {columns.filter((i: any) => !i.hidden).map(({ dataKey, render, width }) => ( -
- {render - ? render(row) - : row[dataKey || ''] || {'empty'}} -
- ))} + {columns + .filter((i: any) => !i.hidden) + .map(({ dataKey, render, width }) => ( +
+ {render + ? render(row) + : row[dataKey || ''] || {'empty'}} +
+ ))}
@@ -324,10 +335,15 @@ export default class TimeTable extends React.PureComponent { 'cursor-pointer': typeof onClick === 'function', })} style={{ width: `${width}px` }} - onClick={() => this.onColumnClick(dataKey, onClick)} + // onClick={() => this.onColumnClick(dataKey, onClick)} > {label} - {!!sortBy && sortBy === dataKey && } + {!!sortBy && sortBy === dataKey && ( + + )}
))}
@@ -360,6 +376,7 @@ export default class TimeTable extends React.PureComponent { {({ width }: { width: number }) => ( { rowHeight={ROW_HEIGHT} rowRenderer={this.renderRow} onScroll={this.onScroll} - scrollToAlignment="start" + scrollToAlignment="center" forceUpdateProp={timestart | timewidth | (activeIndex || 0)} /> )} diff --git a/frontend/app/mstore/sessionStore.ts b/frontend/app/mstore/sessionStore.ts index 98a7061e6..ec72c19ba 100644 --- a/frontend/app/mstore/sessionStore.ts +++ b/frontend/app/mstore/sessionStore.ts @@ -5,75 +5,97 @@ import Session from './types/session'; import Record, { LAST_7_DAYS } from 'Types/app/period'; class UserFilter { - endDate: number = new Date().getTime(); - startDate: number = new Date().getTime() - 24 * 60 * 60 * 1000; - rangeName: string = LAST_7_DAYS; - filters: any = []; - page: number = 1; - limit: number = 10; - period: any = Record({ rangeName: LAST_7_DAYS }); + endDate: number = new Date().getTime(); + startDate: number = new Date().getTime() - 24 * 60 * 60 * 1000; + rangeName: string = LAST_7_DAYS; + filters: any = []; + page: number = 1; + limit: number = 10; + period: any = Record({ rangeName: LAST_7_DAYS }); - constructor() { - makeAutoObservable(this, { - page: observable, - update: action, - }); + constructor() { + makeAutoObservable(this, { + page: observable, + update: action, + }); + } + + update(key: string, value: any) { + // @ts-ignore + this[key] = value; + + if (key === 'period') { + this.startDate = this.period.start; + this.endDate = this.period.end; } + } - update(key: string, value: any) { - this[key] = value; + setFilters(filters: any[]) { + this.filters = filters; + } - if (key === 'period') { - this.startDate = this.period.start; - this.endDate = this.period.end; - } - } + setPage(page: number) { + this.page = page; + } - setFilters(filters: any[]) { - this.filters = filters; - } + toJson() { + return { + endDate: this.period.end, + startDate: this.period.start, + filters: this.filters.map(filterMap), + page: this.page, + limit: this.limit, + }; + } +} - setPage(page: number) { - this.page = page; - } +class DevTools { + networkIndex: 0; + consoleIndex: 0; + eventsIndex: 0; + networkActive: null; + consoleActive: null; + eventsActive: null; + constructor() { + makeAutoObservable(this, { + update: action, + }); + } - toJson() { - return { - endDate: this.period.end, - startDate: this.period.start, - filters: this.filters.map(filterMap), - page: this.page, - limit: this.limit, - }; - } + update(key: string, value: any) { + // @ts-ignore + this[key] = value; + } } export default class SessionStore { - userFilter: UserFilter = new UserFilter(); + userFilter: UserFilter = new UserFilter(); + devTools: DevTools = new DevTools(); - constructor() { - makeAutoObservable(this, { - userFilter: observable, + constructor() { + makeAutoObservable(this, { + userFilter: observable, + devTools: observable, + }); + } + + resetUserFilter() { + this.userFilter = new UserFilter(); + } + + getSessions(filter: any): Promise { + return new Promise((resolve, reject) => { + sessionService + .getSessions(filter.toJson()) + .then((response: any) => { + resolve({ + sessions: response.sessions.map((session: any) => new Session().fromJson(session)), + total: response.total, + }); + }) + .catch((error: any) => { + reject(error); }); - } - - resetUserFilter() { - this.userFilter = new UserFilter(); - } - - getSessions(filter: any): Promise { - return new Promise((resolve, reject) => { - sessionService - .getSessions(filter.toJson()) - .then((response: any) => { - resolve({ - sessions: response.sessions.map((session: any) => new Session().fromJson(session)), - total: response.total, - }); - }) - .catch((error: any) => { - reject(error); - }); - }); - } + }); + } } diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.ts b/frontend/app/player/MessageDistributor/MessageDistributor.ts index a80676a61..e1b59940a 100644 --- a/frontend/app/player/MessageDistributor/MessageDistributor.ts +++ b/frontend/app/player/MessageDistributor/MessageDistributor.ts @@ -132,7 +132,6 @@ export default class MessageDistributor extends StatedScreen { exceptions: session.errors.toJSON(), }) - /* === */ this.loadMessages(); } diff --git a/frontend/app/styles/general.css b/frontend/app/styles/general.css index cce982514..a21cfe239 100644 --- a/frontend/app/styles/general.css +++ b/frontend/app/styles/general.css @@ -355,4 +355,8 @@ p { width: 80px; height: 80px; transform: rotate(45deg); +} + +.dev-row { + transition: all 0.5s; } \ No newline at end of file diff --git a/frontend/package.json b/frontend/package.json index af78fbdaf..c4f0a68de 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -91,6 +91,7 @@ "@types/react-dom": "^18.0.4", "@types/react-redux": "^7.1.24", "@types/react-router-dom": "^5.3.3", + "@types/react-virtualized": "^9.21.21", "@typescript-eslint/eslint-plugin": "^5.24.0", "@typescript-eslint/parser": "^5.24.0", "autoprefixer": "^10.4.7", From cf2c0ef4a5a48f7e4b17ae9aa4d57164ce1a47aa Mon Sep 17 00:00:00 2001 From: Malik Ahmed Date: Tue, 22 Nov 2022 13:40:48 -0500 Subject: [PATCH 58/70] Fix logs (verbose) CLI option (#808) --- sourcemap-uploader/cli.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sourcemap-uploader/cli.js b/sourcemap-uploader/cli.js index 7085f1345..28c167ea3 100755 --- a/sourcemap-uploader/cli.js +++ b/sourcemap-uploader/cli.js @@ -55,10 +55,10 @@ dir.addArgument(['-u', '--js-dir-url'], { // TODO: exclude in dir -const { command, api_key, project_key, server, verbose, ...args } = +const { command, api_key, project_key, server, logs, ...args } = parser.parseArgs(); -global._VERBOSE = !!verbose; +global._VERBOSE = !!logs; (command === 'file' ? uploadFile( From 3c848593f3934d1d66408484925d674510e0ac27 Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Tue, 22 Nov 2022 19:45:08 +0100 Subject: [PATCH 59/70] fix(sourcemap-uploader): version inc (--logs option fix) --- sourcemap-uploader/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sourcemap-uploader/package.json b/sourcemap-uploader/package.json index e8da522a1..ee495605e 100644 --- a/sourcemap-uploader/package.json +++ b/sourcemap-uploader/package.json @@ -1,6 +1,6 @@ { "name": "@openreplay/sourcemap-uploader", - "version": "3.0.6", + "version": "3.0.7", "description": "NPM module to upload your JS sourcemaps files to OpenReplay", "bin": "cli.js", "main": "index.js", From 871a9b18266e0d1cb4a37a467389f442cfb9f731 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 23 Nov 2022 16:31:05 +0100 Subject: [PATCH 60/70] change(ui) - network with state --- .../DevTools/NetworkPanel/NetworkPanel.tsx | 81 +++++++++++++------ .../FetchDetailsModal/FetchDetailsModal.tsx | 6 ++ frontend/app/mstore/sessionStore.ts | 9 ++- 3 files changed, 67 insertions(+), 29 deletions(-) diff --git a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx index 8c087d868..6be39a77a 100644 --- a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx +++ b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx @@ -15,8 +15,7 @@ import FetchDetailsModal from 'Shared/FetchDetailsModal'; import { useStore } from 'App/mstore'; import { useObserver } from 'mobx-react-lite'; -const INDEX_KEY = 'networkIndex'; -const INDEX_KEY_ACTIVE = 'networkActive'; +const INDEX_KEY = 'network'; const ALL = 'ALL'; const XHR = 'xhr'; @@ -126,6 +125,9 @@ export function renderDuration(r: any) { ); } +let timeOut: any = null; +const TIMEOUT_DURATION = 5000; + interface Props { location: any; resources: any; @@ -139,49 +141,70 @@ interface Props { function NetworkPanel(props: Props) { const { resources, time, domContentLoadedTime, loadTime, domBuildingTime, fetchList } = props; const { showModal } = useModal(); - const [activeTab, setActiveTab] = useState(ALL); + const [sortBy, setSortBy] = useState('time'); const [sortAscending, setSortAscending] = useState(true); - const [filter, setFilter] = useState(''); + const [filteredList, setFilteredList] = useState([]); const [showOnlyErrors, setShowOnlyErrors] = useState(false); - const onTabClick = (activeTab: any) => setActiveTab(activeTab); - const onFilterChange = ({ target: { value } }: any) => setFilter(value); + const [isDetailsModalActive, setIsDetailsModalActive] = useState(false); const additionalHeight = 0; const fetchPresented = fetchList.length > 0; const { sessionStore: { devTools }, } = useStore(); - - const activeIndex = useObserver(() => devTools[INDEX_KEY]); - const activeClick = useObserver(() => devTools[INDEX_KEY_ACTIVE]); - const [pauseSync, setPauseSync] = useState(!!activeClick); + // const [filter, setFilter] = useState(devTools[INDEX_KEY].filter); + // const [activeTab, setActiveTab] = useState(ALL); + const filter = useObserver(() => devTools[INDEX_KEY].filter); + const activeTab = useObserver(() => devTools[INDEX_KEY].activeTab); + const activeIndex = useObserver(() => devTools[INDEX_KEY].index); + const [pauseSync, setPauseSync] = useState(activeIndex > 0); const synRef: any = useRef({}); + const onTabClick = (activeTab: any) => devTools.update(INDEX_KEY, { activeTab });; + const onFilterChange = ({ target: { value } }: any) => { + devTools.update(INDEX_KEY, { filter: value }); + }; + synRef.current = { pauseSync, activeIndex, - activeClick, + }; + + const removePause = () => { + clearTimeout(timeOut); + timeOut = setTimeout(() => { + devTools.update(INDEX_KEY, { index: getCurrentIndex() }); + setPauseSync(false); + }, TIMEOUT_DURATION); + }; + + const onMouseLeave = () => { + if (isDetailsModalActive) return; + removePause(); }; useEffect(() => { - if (!!activeClick) { - setPauseSync(true); - devTools.update(INDEX_KEY, activeClick); - console.log('mounting at: ', activeClick); + if (pauseSync) { + removePause(); } + return () => { - if (synRef.current.pauseSync) { - console.log('unmouting at: ', synRef.current.activeIndex); - devTools.update(INDEX_KEY_ACTIVE, synRef.current.activeIndex); + clearTimeout(timeOut); + if (!synRef.current.pauseSync) { + devTools.update(INDEX_KEY, { index: 0 }); } }; }, []); + const getCurrentIndex = () => { + return filteredList.filter((item: any) => item.time <= time).length - 1; + }; + useEffect(() => { - const lastIndex = filteredList.filter((item: any) => item.time <= time).length - 1; - if (lastIndex !== activeIndex && !pauseSync) { - devTools.update(INDEX_KEY, lastIndex); + const currentIndex = getCurrentIndex(); + if (currentIndex !== activeIndex && !pauseSync) { + devTools.update(INDEX_KEY, { index: currentIndex }); } }, [time]); @@ -246,14 +269,16 @@ function NetworkPanel(props: Props) { return arr; }, []); - const onRowClick = (row: any) => { + const showDetailsModal = (row: any) => { + setIsDetailsModalActive(true); showModal( , { right: true, + onClose: removePause, } ); - devTools.update(INDEX_KEY, filteredList.indexOf(row)); + devTools.update(INDEX_KEY, { index: filteredList.indexOf(row) }); setPauseSync(true); }; @@ -264,12 +289,17 @@ function NetworkPanel(props: Props) { setSortBy(sortKey); }; + useEffect(() => { + devTools.update(INDEX_KEY, { filter, activeTab }); + }, [filter, activeTab]); + return ( setPauseSync(true)} + onMouseLeave={onMouseLeave} >
@@ -291,6 +321,7 @@ function NetworkPanel(props: Props) { onChange={onFilterChange} height={28} width={230} + value={filter} /> @@ -348,11 +379,11 @@ function NetworkPanel(props: Props) { rows={filteredList} referenceLines={referenceLines} renderPopup - onRowClick={onRowClick} + onRowClick={showDetailsModal} additionalHeight={additionalHeight} onJump={(row: any) => { setPauseSync(true); - devTools.update(INDEX_KEY, filteredList.indexOf(row)); + devTools.update(INDEX_KEY, { index: filteredList.indexOf(row) }); jump(row.time); }} sortBy={sortBy} diff --git a/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx b/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx index 1ab311bfa..bcee5f5b9 100644 --- a/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx +++ b/frontend/app/components/shared/FetchDetailsModal/FetchDetailsModal.tsx @@ -4,6 +4,7 @@ import { Button } from 'UI'; import FetchPluginMessage from './components/FetchPluginMessage'; import { TYPES } from 'Types/session/resource'; import FetchTabs from './components/FetchTabs/FetchTabs'; +import { useStore } from 'App/mstore'; interface Props { resource: any; @@ -16,6 +17,9 @@ function FetchDetailsModal(props: Props) { const [first, setFirst] = useState(false); const [last, setLast] = useState(false); const isXHR = resource.type === TYPES.XHR || resource.type === TYPES.FETCH; + const { + sessionStore: { devTools }, + } = useStore(); useEffect(() => { const index = rows.indexOf(resource); @@ -28,6 +32,7 @@ function FetchDetailsModal(props: Props) { const index = rows.indexOf(resource); if (index > 0) { setResource(rows[index - 1]); + devTools.update('network', { index: index - 1 }) } }; @@ -35,6 +40,7 @@ function FetchDetailsModal(props: Props) { const index = rows.indexOf(resource); if (index < rows.length - 1) { setResource(rows[index + 1]); + devTools.update('network', { index: index + 1 }) } }; diff --git a/frontend/app/mstore/sessionStore.ts b/frontend/app/mstore/sessionStore.ts index ec72c19ba..b947bd01a 100644 --- a/frontend/app/mstore/sessionStore.ts +++ b/frontend/app/mstore/sessionStore.ts @@ -51,12 +51,13 @@ class UserFilter { class DevTools { networkIndex: 0; + network: any; + consoleIndex: 0; eventsIndex: 0; - networkActive: null; - consoleActive: null; - eventsActive: null; + constructor() { + this.network = { index: 0, search: '', activeTab: 'ALL', isError: false }; makeAutoObservable(this, { update: action, }); @@ -64,7 +65,7 @@ class DevTools { update(key: string, value: any) { // @ts-ignore - this[key] = value; + this[key] = Object.assign(this[key], value); } } From 76804f0cd66b076377573b84464188fd99295935 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 23 Nov 2022 18:35:45 +0100 Subject: [PATCH 61/70] change(ui) - events virutalize and sync --- .../app/components/Session_/Player/Player.js | 4 +- .../DevTools/ConsolePanel/ConsolePanel.tsx | 2 +- .../DevTools/NetworkPanel/NetworkPanel.tsx | 6 +- .../StackEventPanel/StackEventPanel.tsx | 183 ++++++++++++++++++ .../shared/DevTools/StackEventPanel/index.ts | 1 + .../DevTools/StackEventRow/StackEventRow.tsx | 8 +- frontend/app/mstore/sessionStore.ts | 17 +- 7 files changed, 208 insertions(+), 13 deletions(-) create mode 100644 frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx create mode 100644 frontend/app/components/shared/DevTools/StackEventPanel/index.ts diff --git a/frontend/app/components/Session_/Player/Player.js b/frontend/app/components/Session_/Player/Player.js index 48d881c29..96a4859cf 100644 --- a/frontend/app/components/Session_/Player/Player.js +++ b/frontend/app/components/Session_/Player/Player.js @@ -42,6 +42,7 @@ import { updateLastPlayedSession } from 'Duck/sessions'; import OverviewPanel from '../OverviewPanel'; import ConsolePanel from 'Shared/DevTools/ConsolePanel'; import ProfilerPanel from 'Shared/DevTools/ProfilerPanel'; +import StackEventPanel from 'Shared/DevTools/StackEventPanel'; @connectPlayer((state) => ({ live: state.live, @@ -115,7 +116,8 @@ export default class Player extends React.PureComponent { // )} - {bottomBlock === STACKEVENTS && } + {/* {bottomBlock === STACKEVENTS && } */} + {bottomBlock === STACKEVENTS && } {bottomBlock === STORAGE && } {bottomBlock === PROFILER && } {bottomBlock === PERFORMANCE && } diff --git a/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx b/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx index 8f5835cfa..efc4f735a 100644 --- a/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx +++ b/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx @@ -95,7 +95,7 @@ function ConsolePanel(props: Props) { ); }; - let filtered = React.useMemo(() => { + const filtered = React.useMemo(() => { const filterRE = getRE(filter, 'i'); let list = logs; diff --git a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx index 6be39a77a..ee0ce8a55 100644 --- a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx +++ b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx @@ -233,9 +233,9 @@ function NetworkPanel(props: Props) { (list = list.filter((networkCall: any) => networkCall.url !== fetchCall.url)) ); list = list.concat(fetchList); - list = list.sort((a: any, b: any) => { - return compare(a, b, sortBy); - }); + // list = list.sort((a: any, b: any) => { + // return compare(a, b, sortBy); + // }); // if (!sortAscending) { // list = list.reverse(); diff --git a/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx b/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx new file mode 100644 index 000000000..57abe1808 --- /dev/null +++ b/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx @@ -0,0 +1,183 @@ +import React, { useEffect, useMemo, useRef, useState } from 'react'; +import { hideHint } from 'Duck/components/player'; +import { Tooltip, Tabs, Input, NoContent, Icon, Toggler } from 'UI'; +import { getRE } from 'App/utils'; +import { List, CellMeasurer, CellMeasurerCache, AutoSizer } from 'react-virtualized'; + +import TimeTable from '../TimeTable'; +import BottomBlock from '../BottomBlock'; +import { connectPlayer, jump } from 'Player'; +import { useModal } from 'App/components/Modal'; +import { useStore } from 'App/mstore'; +import { useObserver } from 'mobx-react-lite'; +import { DATADOG, SENTRY, STACKDRIVER, typeList } from 'Types/session/stackEvent'; +import { connect } from 'react-redux'; +import StackEventRow from 'Shared/DevTools/StackEventRow'; + +let timeOut: any = null; +const TIMEOUT_DURATION = 5000; +const INDEX_KEY = 'stackEvent'; +const ALL = 'ALL'; +const TABS = [ALL, ...typeList].map((tab) => ({ text: tab, key: tab })); + +interface Props { + list: any; + hideHint: any; + time: any; +} +function StackEventPanel(props: Props) { + const { list, time } = props; + const additionalHeight = 0; + const { + sessionStore: { devTools }, + } = useStore(); + const [isDetailsModalActive, setIsDetailsModalActive] = useState(false); + const [filteredList, setFilteredList] = useState([]); + const filter = useObserver(() => devTools[INDEX_KEY].filter); + const activeTab = useObserver(() => devTools[INDEX_KEY].activeTab); + const activeIndex = useObserver(() => devTools[INDEX_KEY].index); + const [pauseSync, setPauseSync] = useState(activeIndex > 0); + const synRef: any = useRef({}); + synRef.current = { + pauseSync, + activeIndex, + }; + const _list = React.useRef(); + + const onTabClick = (activeTab: any) => devTools.update(INDEX_KEY, { activeTab }); + const onFilterChange = ({ target: { value } }: any) => { + devTools.update(INDEX_KEY, { filter: value }); + }; + + const getCurrentIndex = () => { + return filteredList.filter((item: any) => item.time <= time).length - 1; + }; + + const removePause = () => { + clearTimeout(timeOut); + timeOut = setTimeout(() => { + devTools.update(INDEX_KEY, { index: getCurrentIndex() }); + setPauseSync(false); + }, TIMEOUT_DURATION); + }; + + useEffect(() => { + const currentIndex = getCurrentIndex(); + if (currentIndex !== activeIndex && !pauseSync) { + devTools.update(INDEX_KEY, { index: currentIndex }); + } + }, [time]); + + const onMouseLeave = () => { + if (isDetailsModalActive) return; + removePause(); + }; + + React.useMemo(() => { + const filterRE = getRE(filter, 'i'); + let list = props.list; + + list = list.filter( + ({ name, source }: any) => + (!!filter ? filterRE.test(name) : true) && (activeTab === ALL || activeTab === source) + ); + + setFilteredList(list); + }, [filter, activeTab]); + + const tabs = useMemo(() => { + return TABS.filter(({ key }) => key === ALL || list.some(({ source }: any) => key === source)); + }, []); + + const cache = new CellMeasurerCache({ + fixedWidth: true, + keyMapper: (index: number) => filteredList[index], + }); + + const _rowRenderer = ({ index, key, parent, style }: any) => { + const item = filteredList[index]; + + return ( + // @ts-ignore + + {() => ( + jump(item.time)} + /> + )} + + ); + }; + + return ( + setPauseSync(true)} + onMouseLeave={onMouseLeave} + > + +
+ Stack Events + +
+ +
+ + + + No Data +
+ } + size="small" + show={filteredList.length === 0} + > + + {({ height, width }: any) => ( + + )} + + + +
+ ); +} + +export default connect( + (state: any) => ({ + hintIsHidden: + state.getIn(['components', 'player', 'hiddenHints', 'stack']) || + !state.getIn(['site', 'list']).some((s: any) => s.stackIntegrations), + }), + { hideHint } +)( + connectPlayer((state: any) => ({ + list: state.stackList, + time: state.time, + }))(StackEventPanel) +); diff --git a/frontend/app/components/shared/DevTools/StackEventPanel/index.ts b/frontend/app/components/shared/DevTools/StackEventPanel/index.ts new file mode 100644 index 000000000..bb0ca8cb6 --- /dev/null +++ b/frontend/app/components/shared/DevTools/StackEventPanel/index.ts @@ -0,0 +1 @@ +export { default } from './StackEventPanel'; diff --git a/frontend/app/components/shared/DevTools/StackEventRow/StackEventRow.tsx b/frontend/app/components/shared/DevTools/StackEventRow/StackEventRow.tsx index b6b1a8a6f..e5af72207 100644 --- a/frontend/app/components/shared/DevTools/StackEventRow/StackEventRow.tsx +++ b/frontend/app/components/shared/DevTools/StackEventRow/StackEventRow.tsx @@ -9,9 +9,11 @@ import StackEventModal from '../StackEventModal'; interface Props { event: any; onJump: any; + style?: any; + isActive?: boolean; } function StackEventRow(props: Props) { - const { event, onJump } = props; + const { event, onJump, style, isActive } = props; let message = event.payload[0] || ''; message = typeof message === 'string' ? message : JSON.stringify(message); const onClickDetails = () => { @@ -30,11 +32,13 @@ function StackEventRow(props: Props) { return (
diff --git a/frontend/app/mstore/sessionStore.ts b/frontend/app/mstore/sessionStore.ts index b947bd01a..f19f747fd 100644 --- a/frontend/app/mstore/sessionStore.ts +++ b/frontend/app/mstore/sessionStore.ts @@ -49,15 +49,20 @@ class UserFilter { } } -class DevTools { - networkIndex: 0; - network: any; +interface BaseDevState { + index: number; + filter: string; + activeTab: string; + isError: boolean; +} - consoleIndex: 0; - eventsIndex: 0; +class DevTools { + network: BaseDevState; + stackEvent: BaseDevState; constructor() { - this.network = { index: 0, search: '', activeTab: 'ALL', isError: false }; + this.network = { index: 0, filter: '', activeTab: 'ALL', isError: false }; + this.stackEvent = { index: 0, filter: '', activeTab: 'ALL', isError: false }; makeAutoObservable(this, { update: action, }); From 276d2bd10025cd8205d002952e2b3e11b17cded4 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Wed, 23 Nov 2022 18:51:01 +0100 Subject: [PATCH 62/70] change(ui) - events virutalize and sync --- .../DevTools/NetworkPanel/NetworkPanel.tsx | 5 +++-- .../StackEventPanel/StackEventPanel.tsx | 18 ++++++++++++++++-- .../DevTools/StackEventRow/StackEventRow.tsx | 9 ++------- 3 files changed, 21 insertions(+), 11 deletions(-) diff --git a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx index ee0ce8a55..1acb51ac2 100644 --- a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx +++ b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx @@ -141,7 +141,7 @@ interface Props { function NetworkPanel(props: Props) { const { resources, time, domContentLoadedTime, loadTime, domBuildingTime, fetchList } = props; const { showModal } = useModal(); - + const [sortBy, setSortBy] = useState('time'); const [sortAscending, setSortAscending] = useState(true); @@ -161,7 +161,7 @@ function NetworkPanel(props: Props) { const [pauseSync, setPauseSync] = useState(activeIndex > 0); const synRef: any = useRef({}); - const onTabClick = (activeTab: any) => devTools.update(INDEX_KEY, { activeTab });; + const onTabClick = (activeTab: any) => devTools.update(INDEX_KEY, { activeTab }); const onFilterChange = ({ target: { value } }: any) => { devTools.update(INDEX_KEY, { filter: value }); }; @@ -172,6 +172,7 @@ function NetworkPanel(props: Props) { }; const removePause = () => { + setIsDetailsModalActive(false); clearTimeout(timeOut); timeOut = setTimeout(() => { devTools.update(INDEX_KEY, { index: getCurrentIndex() }); diff --git a/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx b/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx index 57abe1808..929a0e283 100644 --- a/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx +++ b/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx @@ -4,7 +4,6 @@ import { Tooltip, Tabs, Input, NoContent, Icon, Toggler } from 'UI'; import { getRE } from 'App/utils'; import { List, CellMeasurer, CellMeasurerCache, AutoSizer } from 'react-virtualized'; -import TimeTable from '../TimeTable'; import BottomBlock from '../BottomBlock'; import { connectPlayer, jump } from 'Player'; import { useModal } from 'App/components/Modal'; @@ -13,6 +12,7 @@ import { useObserver } from 'mobx-react-lite'; import { DATADOG, SENTRY, STACKDRIVER, typeList } from 'Types/session/stackEvent'; import { connect } from 'react-redux'; import StackEventRow from 'Shared/DevTools/StackEventRow'; +import StackEventModal from '../StackEventModal'; let timeOut: any = null; const TIMEOUT_DURATION = 5000; @@ -31,6 +31,7 @@ function StackEventPanel(props: Props) { const { sessionStore: { devTools }, } = useStore(); + const { showModal } = useModal(); const [isDetailsModalActive, setIsDetailsModalActive] = useState(false); const [filteredList, setFilteredList] = useState([]); const filter = useObserver(() => devTools[INDEX_KEY].filter); @@ -55,6 +56,7 @@ function StackEventPanel(props: Props) { const removePause = () => { clearTimeout(timeOut); + setIsDetailsModalActive(false); timeOut = setTimeout(() => { devTools.update(INDEX_KEY, { index: getCurrentIndex() }); setPauseSync(false); @@ -94,6 +96,13 @@ function StackEventPanel(props: Props) { keyMapper: (index: number) => filteredList[index], }); + const showDetails = (item: any) => { + setIsDetailsModalActive(true); + showModal(, { right: true, onClose: removePause }); + devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) }); + setPauseSync(true); + }; + const _rowRenderer = ({ index, key, parent, style }: any) => { const item = filteredList[index]; @@ -106,7 +115,12 @@ function StackEventPanel(props: Props) { style={style} key={item.key} event={item} - onJump={() => jump(item.time)} + onJump={() => { + setPauseSync(true); + devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) }); + jump(item.time); + }} + onClick={() => showDetails(item)} /> )} diff --git a/frontend/app/components/shared/DevTools/StackEventRow/StackEventRow.tsx b/frontend/app/components/shared/DevTools/StackEventRow/StackEventRow.tsx index e5af72207..0d2eeb554 100644 --- a/frontend/app/components/shared/DevTools/StackEventRow/StackEventRow.tsx +++ b/frontend/app/components/shared/DevTools/StackEventRow/StackEventRow.tsx @@ -3,23 +3,18 @@ import JumpButton from '../JumpButton'; import { Icon } from 'UI'; import cn from 'classnames'; import { OPENREPLAY, SENTRY, DATADOG, STACKDRIVER } from 'Types/session/stackEvent'; -import { useModal } from 'App/components/Modal'; -import StackEventModal from '../StackEventModal'; interface Props { event: any; onJump: any; style?: any; isActive?: boolean; + onClick?: any; } function StackEventRow(props: Props) { const { event, onJump, style, isActive } = props; let message = event.payload[0] || ''; message = typeof message === 'string' ? message : JSON.stringify(message); - const onClickDetails = () => { - showModal(, { right: true }); - }; - const { showModal } = useModal(); const iconProps: any = React.useMemo(() => { const { source } = event; @@ -34,7 +29,7 @@ function StackEventRow(props: Props) {
Date: Wed, 23 Nov 2022 19:47:10 +0100 Subject: [PATCH 63/70] change(ui) - console sync --- .../DevTools/ConsolePanel/ConsolePanel.tsx | 129 +++++++++++++++--- .../shared/DevTools/ConsoleRow/ConsoleRow.tsx | 26 ++-- .../DevTools/NetworkPanel/NetworkPanel.tsx | 26 +--- .../StackEventPanel/StackEventPanel.tsx | 8 +- .../shared/DevTools/TimeTable/TimeTable.tsx | 10 -- frontend/app/mstore/sessionStore.ts | 2 + 6 files changed, 131 insertions(+), 70 deletions(-) diff --git a/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx b/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx index efc4f735a..38714b92d 100644 --- a/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx +++ b/frontend/app/components/shared/DevTools/ConsolePanel/ConsolePanel.tsx @@ -1,4 +1,4 @@ -import React, { useState } from 'react'; +import React, { useEffect, useRef, useState } from 'react'; import { connectPlayer, jump } from 'Player'; import Log from 'Types/session/log'; import BottomBlock from '../BottomBlock'; @@ -7,12 +7,11 @@ import { Tabs, Input, Icon, NoContent } from 'UI'; import cn from 'classnames'; import ConsoleRow from '../ConsoleRow'; import { getRE } from 'App/utils'; -import { - List, - CellMeasurer, - CellMeasurerCache, - AutoSizer, -} from 'react-virtualized'; +import { List, CellMeasurer, CellMeasurerCache, AutoSizer } from 'react-virtualized'; +import { useObserver } from 'mobx-react-lite'; +import { useStore } from 'App/mstore'; +import ErrorDetailsModal from 'App/components/Dashboard/components/Errors/ErrorDetailsModal'; +import { useModal } from 'App/components/Modal'; const ALL = 'ALL'; const INFO = 'INFO'; @@ -57,26 +56,97 @@ const getIconProps = (level: any) => { return null; }; +const INDEX_KEY = 'console'; +let timeOut: any = null; +const TIMEOUT_DURATION = 5000; interface Props { logs: any; exceptions: any; + time: any; } function ConsolePanel(props: Props) { - const { logs } = props; + const { logs, time } = props; const additionalHeight = 0; - const [activeTab, setActiveTab] = useState(ALL); - const [filter, setFilter] = useState(''); + // const [activeTab, setActiveTab] = useState(ALL); + // const [filter, setFilter] = useState(''); + const { + sessionStore: { devTools }, + } = useStore(); + const [isDetailsModalActive, setIsDetailsModalActive] = useState(false); + const [filteredList, setFilteredList] = useState([]); + const filter = useObserver(() => devTools[INDEX_KEY].filter); + const activeTab = useObserver(() => devTools[INDEX_KEY].activeTab); + const activeIndex = useObserver(() => devTools[INDEX_KEY].index); + const [pauseSync, setPauseSync] = useState(activeIndex > 0); + const synRef: any = useRef({}); + const { showModal } = useModal(); + + const onTabClick = (activeTab: any) => devTools.update(INDEX_KEY, { activeTab }); + const onFilterChange = ({ target: { value } }: any) => { + devTools.update(INDEX_KEY, { filter: value }); + }; + + synRef.current = { + pauseSync, + activeIndex, + }; + + const removePause = () => { + setIsDetailsModalActive(false); + clearTimeout(timeOut); + timeOut = setTimeout(() => { + devTools.update(INDEX_KEY, { index: getCurrentIndex() }); + setPauseSync(false); + }, TIMEOUT_DURATION); + }; + + const onMouseLeave = () => { + if (isDetailsModalActive) return; + removePause(); + }; + + useEffect(() => { + if (pauseSync) { + removePause(); + } + + return () => { + clearTimeout(timeOut); + if (!synRef.current.pauseSync) { + devTools.update(INDEX_KEY, { index: 0 }); + } + }; + }, []); + + const getCurrentIndex = () => { + return filteredList.filter((item: any) => item.time <= time).length - 1; + }; + + useEffect(() => { + const currentIndex = getCurrentIndex(); + if (currentIndex !== activeIndex && !pauseSync) { + devTools.update(INDEX_KEY, { index: currentIndex }); + } + }, [time]); const cache = new CellMeasurerCache({ fixedWidth: true, - keyMapper: (index: number) => filtered[index], + keyMapper: (index: number) => filteredList[index], }); const _list = React.useRef(); + const showDetails = (log: any) => { + setIsDetailsModalActive(true); + showModal(, { right: true, onClose: removePause }); + devTools.update(INDEX_KEY, { index: filteredList.indexOf(log) }); + setPauseSync(true); + }; + const _rowRenderer = ({ index, key, parent, style }: any) => { - const item = filtered[index]; + const item = filteredList[index]; return ( + // @ts-ignore {({ measure }: any) => ( showDetails(item)} recalcHeight={() => { measure(); (_list as any).current.recomputeRowHeights(index); @@ -95,7 +166,7 @@ function ConsolePanel(props: Props) { ); }; - const filtered = React.useMemo(() => { + React.useMemo(() => { const filterRE = getRE(filter, 'i'); let list = logs; @@ -104,14 +175,23 @@ function ConsolePanel(props: Props) { (!!filter ? filterRE.test(value) : true) && (activeTab === ALL || activeTab === LEVEL_TAB[level]) ); - return list; - }, [filter, activeTab]); + setFilteredList(list); + }, [logs, filter, activeTab]); - const onTabClick = (activeTab: any) => setActiveTab(activeTab); - const onFilterChange = ({ target: { value } }: any) => setFilter(value); + useEffect(() => { + if (_list.current) { + // @ts-ignore + _list.current.scrollToRow(activeIndex); + } + }, [activeIndex]); return ( - + setPauseSync(true)} + onMouseLeave={onMouseLeave} + > + {/* @ts-ignore */}
Console @@ -125,8 +205,11 @@ function ConsolePanel(props: Props) { name="filter" height={28} onChange={onFilterChange} + value={filter} /> + {/* @ts-ignore */} + {/* @ts-ignore */} } size="small" - show={filtered.length === 0} + show={filteredList.length === 0} > + {/* @ts-ignore */} {({ height, width }: any) => ( + // @ts-ignore )} + {/* @ts-ignore */} ); @@ -170,6 +258,7 @@ export default connectPlayer((state: any) => { }) ); return { + time: state.time, logs: logs.concat(logExceptions), }; })(ConsolePanel); diff --git a/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx b/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx index aae911d42..83929cbed 100644 --- a/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx +++ b/frontend/app/components/shared/DevTools/ConsoleRow/ConsoleRow.tsx @@ -2,8 +2,6 @@ import React, { useState } from 'react'; import cn from 'classnames'; import { Icon } from 'UI'; import JumpButton from 'Shared/DevTools/JumpButton'; -import { useModal } from 'App/components/Modal'; -import ErrorDetailsModal from 'App/components/Dashboard/components/Errors/ErrorDetailsModal'; interface Props { log: any; @@ -12,24 +10,20 @@ interface Props { renderWithNL?: any; style?: any; recalcHeight?: () => void; + onClick: () => void; } function ConsoleRow(props: Props) { const { log, iconProps, jump, renderWithNL, style, recalcHeight } = props; - const { showModal } = useModal(); const [expanded, setExpanded] = useState(false); const lines = log.value.split('\n').filter((l: any) => !!l); const canExpand = lines.length > 1; const clickable = canExpand || !!log.errorId; - const onErrorClick = () => { - showModal(, { right: true }); - }; - const toggleExpand = () => { - setExpanded(!expanded) - setTimeout(() => recalcHeight(), 0) - } + setExpanded(!expanded); + setTimeout(() => recalcHeight(), 0); + }; return (
(!!log.errorId ? onErrorClick() : toggleExpand()) : () => {} - } + onClick={clickable ? () => (!!log.errorId ? props.onClick() : toggleExpand()) : () => {}} >
@@ -57,7 +49,13 @@ function ConsoleRow(props: Props) { )} {renderWithNL(lines.pop())}
- {canExpand && expanded && lines.map((l: string, i: number) =>
{l}
)} + {canExpand && + expanded && + lines.map((l: string, i: number) => ( +
+ {l} +
+ ))}
jump(log.time)} />
diff --git a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx index 1acb51ac2..634aa9bae 100644 --- a/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx +++ b/frontend/app/components/shared/DevTools/NetworkPanel/NetworkPanel.tsx @@ -142,9 +142,6 @@ function NetworkPanel(props: Props) { const { resources, time, domContentLoadedTime, loadTime, domBuildingTime, fetchList } = props; const { showModal } = useModal(); - const [sortBy, setSortBy] = useState('time'); - const [sortAscending, setSortAscending] = useState(true); - const [filteredList, setFilteredList] = useState([]); const [showOnlyErrors, setShowOnlyErrors] = useState(false); const [isDetailsModalActive, setIsDetailsModalActive] = useState(false); @@ -234,13 +231,6 @@ function NetworkPanel(props: Props) { (list = list.filter((networkCall: any) => networkCall.url !== fetchCall.url)) ); list = list.concat(fetchList); - // list = list.sort((a: any, b: any) => { - // return compare(a, b, sortBy); - // }); - - // if (!sortAscending) { - // list = list.reverse(); - // } list = list.filter( ({ type, name, status, success }: any) => @@ -249,7 +239,7 @@ function NetworkPanel(props: Props) { (showOnlyErrors ? parseInt(status) >= 400 || !success : true) ); setFilteredList(list); - }, [resources, filter, sortBy, sortAscending, showOnlyErrors, activeTab]); + }, [resources, filter, showOnlyErrors, activeTab]); const referenceLines = useMemo(() => { const arr = []; @@ -283,13 +273,6 @@ function NetworkPanel(props: Props) { setPauseSync(true); }; - const handleSort = (sortKey: string) => { - if (sortKey === sortBy) { - setSortAscending(!sortAscending); - } - setSortBy(sortKey); - }; - useEffect(() => { devTools.update(INDEX_KEY, { filter, activeTab }); }, [filter, activeTab]); @@ -387,8 +370,6 @@ function NetworkPanel(props: Props) { devTools.update(INDEX_KEY, { index: filteredList.indexOf(row) }); jump(row.time); }} - sortBy={sortBy} - sortAscending={sortAscending} activeIndex={activeIndex} > {[ @@ -401,28 +382,24 @@ function NetworkPanel(props: Props) { label: 'Status', dataKey: 'status', width: 70, - // onClick: handleSort, }, { label: 'Type', dataKey: 'type', width: 90, render: renderType, - // onClick: handleSort, }, { label: 'Name', width: 240, dataKey: 'name', render: renderName, - // onClick: handleSort, }, { label: 'Size', width: 80, dataKey: 'decodedBodySize', render: renderSize, - // onClick: handleSort, hidden: activeTab === XHR, }, { @@ -430,7 +407,6 @@ function NetworkPanel(props: Props) { width: 80, dataKey: 'duration', render: renderDuration, - // onClick: handleSort, }, ]} diff --git a/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx b/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx index 929a0e283..557c72172 100644 --- a/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx +++ b/frontend/app/components/shared/DevTools/StackEventPanel/StackEventPanel.tsx @@ -127,6 +127,13 @@ function StackEventPanel(props: Props) { ); }; + useEffect(() => { + if (_list.current) { + // @ts-ignore + _list.current.scrollToRow(activeIndex); + } + }, [activeIndex]); + return ( )} diff --git a/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx b/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx index 1a224d968..8271a6561 100644 --- a/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx +++ b/frontend/app/components/shared/DevTools/TimeTable/TimeTable.tsx @@ -72,8 +72,6 @@ type Props = { hoverable?: boolean; onRowClick?: (row: any, index: number) => void; onJump?: (time: any) => void; - sortBy?: string; - sortAscending?: boolean; }; type TimeLineInfo = { @@ -281,8 +279,6 @@ export default class TimeTable extends React.PureComponent { referenceLines = [], additionalHeight = 0, activeIndex, - sortBy = '', - sortAscending = true, } = this.props; const columns = this.props.children.filter((i: any) => !i.hidden); const { timewidth, timestart } = this.state; @@ -338,12 +334,6 @@ export default class TimeTable extends React.PureComponent { // onClick={() => this.onColumnClick(dataKey, onClick)} > {label} - {!!sortBy && sortBy === dataKey && ( - - )}
))}
diff --git a/frontend/app/mstore/sessionStore.ts b/frontend/app/mstore/sessionStore.ts index f19f747fd..d055a9aa8 100644 --- a/frontend/app/mstore/sessionStore.ts +++ b/frontend/app/mstore/sessionStore.ts @@ -59,10 +59,12 @@ interface BaseDevState { class DevTools { network: BaseDevState; stackEvent: BaseDevState; + console: BaseDevState; constructor() { this.network = { index: 0, filter: '', activeTab: 'ALL', isError: false }; this.stackEvent = { index: 0, filter: '', activeTab: 'ALL', isError: false }; + this.console = { index: 0, filter: '', activeTab: 'ALL', isError: false }; makeAutoObservable(this, { update: action, }); From 66a3c5b4861f99c965367be97816cc15f7bea24a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 23 Nov 2022 20:42:50 +0100 Subject: [PATCH 64/70] feat(chalice): funnel optimizations --- api/chalicelib/core/significance.py | 113 +++++++++++++--------------- 1 file changed, 53 insertions(+), 60 deletions(-) diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 2abd87cf7..1522dc94a 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -181,9 +181,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: values=s["value"], value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, - {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp, - '{event_type}' AS type, - '{s["operator"]}' AS operator + {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp FROM {next_table} AS main {" ".join(extra_from)} WHERE main.timestamp >= {f"T{i}.stage{i}_timestamp" if i > 0 else "%(startTimestamp)s"} {f"AND main.session_id=T1.session_id" if i > 0 else ""} @@ -199,7 +197,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: n_stages_query += ") AS stages_t" n_stages_query = f""" - SELECT stages_and_issues_t.*, sessions.user_uuid FROM ( + SELECT stages_and_issues_t.*, sessions.user_uuid + FROM ( SELECT * FROM ( SELECT * FROM {n_stages_query} @@ -297,7 +296,21 @@ def pearson_corr(x: list, y: list): return r, confidence, False -def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage): +# def tuple_or(t: tuple): +# x = 0 +# for el in t: +# x |= el # | is for bitwise OR +# return x +# +# The following function is correct optimization of the previous function because t is a list of 0,1 +def tuple_or(t: tuple): + for el in t: + if el > 0: + return 1 + return 0 + + +def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues, first_stage, last_stage): """ Returns two lists with binary values 0/1: @@ -316,12 +329,6 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_ transitions = [] n_sess_affected = 0 errors = {} - for issue in all_issues_with_context: - split = issue.split('__^__') - errors[issue] = { - "errors": [], - "issue_type": split[0], - "context": split[1]} for row in rows: t = 0 @@ -329,38 +336,28 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_ last_ts = row[f'stage{last_stage}_timestamp'] if first_ts is None: continue - elif first_ts is not None and last_ts is not None: + elif last_ts is not None: t = 1 transitions.append(t) ic_present = False - for issue_type_with_context in errors: + for error_id in all_issues: + if error_id not in errors: + errors[error_id] = [] ic = 0 - issue_type = errors[issue_type_with_context]["issue_type"] - context = errors[issue_type_with_context]["context"] + issue_type = all_issues[error_id]["issue_type"] + context = all_issues[error_id]["context"] if row['issue_type'] is not None: if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts): context_in_row = row['issue_context'] if row['issue_context'] is not None else '' if issue_type == row['issue_type'] and context == context_in_row: ic = 1 ic_present = True - errors[issue_type_with_context]["errors"].append(ic) + errors[error_id].append(ic) if ic_present and t: n_sess_affected += 1 - # def tuple_or(t: tuple): - # x = 0 - # for el in t: - # x |= el - # return x - def tuple_or(t: tuple): - for el in t: - if el > 0: - return 1 - return 0 - - errors = {key: errors[key]["errors"] for key in errors} all_errors = [tuple_or(t) for t in zip(*errors.values())] return transitions, errors, all_errors, n_sess_affected @@ -376,10 +373,9 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage): """ affected_users = defaultdict(lambda: set()) affected_sessions = defaultdict(lambda: set()) - contexts = defaultdict(lambda: None) + all_issues = {} n_affected_users_dict = defaultdict(lambda: None) n_affected_sessions_dict = defaultdict(lambda: None) - all_issues_with_context = set() n_issues_dict = defaultdict(lambda: 0) issues_by_session = defaultdict(lambda: 0) @@ -396,14 +392,13 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage): if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or (row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])): context_string = row['issue_context'] if row['issue_context'] is not None else '' - issue_with_context = iss + '__^__' + context_string - contexts[issue_with_context] = {"context": context_string, "id": row["issue_id"]} - all_issues_with_context.add(issue_with_context) - n_issues_dict[issue_with_context] += 1 + if row["issue_id"] not in all_issues: + all_issues[row["issue_id"]] = {"context": context_string, "issue_type": row["issue_type"]} + n_issues_dict[row["issue_id"]] += 1 if row['user_uuid'] is not None: - affected_users[issue_with_context].add(row['user_uuid']) + affected_users[row["issue_id"]].add(row['user_uuid']) - affected_sessions[issue_with_context].add(row['session_id']) + affected_sessions[row["issue_id"]].add(row['session_id']) issues_by_session[row[f'session_id']] += 1 if len(affected_users) > 0: @@ -414,29 +409,28 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage): n_affected_sessions_dict.update({ iss: len(affected_sessions[iss]) for iss in affected_sessions }) - return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts + return all_issues, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict def count_sessions(rows, n_stages): session_counts = {i: set() for i in range(1, n_stages + 1)} - for ind, row in enumerate(rows): + for row in rows: for i in range(1, n_stages + 1): if row[f"stage{i}_timestamp"] is not None: session_counts[i].add(row[f"session_id"]) + session_counts = {i: len(session_counts[i]) for i in session_counts} return session_counts def count_users(rows, n_stages): - users_in_stages = defaultdict(lambda: set()) - - for ind, row in enumerate(rows): + users_in_stages = {i: set() for i in range(1, n_stages + 1)} + for row in rows: for i in range(1, n_stages + 1): if row[f"stage{i}_timestamp"] is not None: users_in_stages[i].add(row["user_uuid"]) users_count = {i: len(users_in_stages[i]) for i in range(1, n_stages + 1)} - return users_count @@ -489,18 +483,18 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) last_stage = n_stages n_critical_issues = 0 - issues_dict = dict({"significant": [], - "insignificant": []}) + issues_dict = {"significant": [], + "insignificant": []} session_counts = count_sessions(rows, n_stages) drop = session_counts[first_stage] - session_counts[last_stage] - all_issues_with_context, n_issues_dict, affected_users_dict, affected_sessions, contexts = get_affected_users_for_all_issues( + all_issues, n_issues_dict, affected_users_dict, affected_sessions = get_affected_users_for_all_issues( rows, first_stage, last_stage) transitions, errors, all_errors, n_sess_affected = get_transitions_and_issues_of_each_type(rows, - all_issues_with_context, + all_issues, first_stage, last_stage) - # print("len(transitions) =", len(transitions)) + del rows if any(all_errors): total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors) @@ -513,33 +507,32 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) if drop_only: return total_drop_due_to_issues - for issue in all_issues_with_context: + for issue_id in all_issues: - if not any(errors[issue]): + if not any(errors[issue_id]): continue - r, confidence, is_sign = pearson_corr(transitions, errors[issue]) + r, confidence, is_sign = pearson_corr(transitions, errors[issue_id]) if r is not None and drop is not None and is_sign: - lost_conversions = int(r * affected_sessions[issue]) + lost_conversions = int(r * affected_sessions[issue_id]) else: lost_conversions = None if r is None: r = 0 - split = issue.split('__^__') issues_dict['significant' if is_sign else 'insignificant'].append({ - "type": split[0], - "title": helper.get_issue_title(split[0]), - "affected_sessions": affected_sessions[issue], - "unaffected_sessions": session_counts[1] - affected_sessions[issue], + "type": all_issues[issue_id]["issue_type"], + "title": helper.get_issue_title(all_issues[issue_id]["issue_type"]), + "affected_sessions": affected_sessions[issue_id], + "unaffected_sessions": session_counts[1] - affected_sessions[issue_id], "lost_conversions": lost_conversions, - "affected_users": affected_users_dict[issue], + "affected_users": affected_users_dict[issue_id], "conversion_impact": round(r * 100), - "context_string": contexts[issue]["context"], - "issue_id": contexts[issue]["id"] + "context_string": all_issues[issue_id]["context"], + "issue_id": issue_id }) if is_sign: - n_critical_issues += n_issues_dict[issue] + n_critical_issues += n_issues_dict[issue_id] return n_critical_issues, issues_dict, total_drop_due_to_issues From 5581f3d541ffd5a250a27f64184f797da546009b Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 24 Nov 2022 13:21:14 +0100 Subject: [PATCH 65/70] fix(ui) - wrong param check --- .../SessionSearchQueryParamHandler.tsx | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/frontend/app/components/shared/SessionSearchQueryParamHandler/SessionSearchQueryParamHandler.tsx b/frontend/app/components/shared/SessionSearchQueryParamHandler/SessionSearchQueryParamHandler.tsx index 647ee68bd..f2624b460 100644 --- a/frontend/app/components/shared/SessionSearchQueryParamHandler/SessionSearchQueryParamHandler.tsx +++ b/frontend/app/components/shared/SessionSearchQueryParamHandler/SessionSearchQueryParamHandler.tsx @@ -49,10 +49,12 @@ const SessionSearchQueryParamHandler = React.memo((props: Props) => { } else { const _filters: any = { ...filtersMap }; const _filter = _filters[key]; - _filter.value = valueArr; - _filter.operator = operator; - _filter.source = sourceArr; - props.addFilter(_filter); + if (!!_filter) { + _filter.value = valueArr; + _filter.operator = operator; + _filter.source = sourceArr; + props.addFilter(_filter); + } } } }; From fbc8012c08f5228141ee8437e3a817aea08c2371 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Thu, 24 Nov 2022 13:23:07 +0100 Subject: [PATCH 66/70] change(ui) - checking for fullScreen param and hide header and footer in assist --- frontend/app/components/Session/LivePlayer.js | 87 +++++++++++-------- .../app/components/Session_/Player/Player.js | 3 +- .../app/components/Session_/PlayerBlock.js | 40 ++++----- 3 files changed, 68 insertions(+), 62 deletions(-) diff --git a/frontend/app/components/Session/LivePlayer.js b/frontend/app/components/Session/LivePlayer.js index 7e0f09145..ca9f40f28 100644 --- a/frontend/app/components/Session/LivePlayer.js +++ b/frontend/app/components/Session/LivePlayer.js @@ -3,24 +3,19 @@ import { useEffect, useState } from 'react'; import { connect } from 'react-redux'; import { Loader } from 'UI'; import { toggleFullscreen, closeBottomBlock } from 'Duck/components/player'; -import { withRequest } from 'HOCs' -import { - PlayerProvider, - connectPlayer, - init as initPlayer, - clean as cleanPlayer, -} from 'Player'; +import { withRequest } from 'HOCs'; +import { PlayerProvider, connectPlayer, init as initPlayer, clean as cleanPlayer } from 'Player'; import withPermissions from 'HOCs/withPermissions'; import PlayerBlockHeader from '../Session_/PlayerBlockHeader'; import PlayerBlock from '../Session_/PlayerBlock'; import styles from '../Session_/session.module.css'; -const InitLoader = connectPlayer(state => ({ - loading: !state.initialized +const InitLoader = connectPlayer((state) => ({ + loading: !state.initialized, }))(Loader); -function LivePlayer ({ +function LivePlayer({ session, toggleFullscreen, closeBottomBlock, @@ -30,68 +25,88 @@ function LivePlayer ({ request, isEnterprise, userEmail, - userName + userName, }) { + const [fullView, setFullView] = useState(false); useEffect(() => { if (!loadingCredentials) { - const sessionWithAgentData = { ...session.toJS(), agentInfo: { email: userEmail, name: userName, }, - } + }; initPlayer(sessionWithAgentData, assistCredendials, true); } - return () => cleanPlayer() - }, [ session.sessionId, loadingCredentials, assistCredendials ]); + return () => cleanPlayer(); + }, [session.sessionId, loadingCredentials, assistCredendials]); // LAYOUT (TODO: local layout state - useContext or something..) useEffect(() => { + const queryParams = new URLSearchParams(window.location.search); + if (queryParams.has('fullScreen') && queryParams.get('fullScreen') === 'true') { + setFullView(true); + } + if (isEnterprise) { request(); } return () => { toggleFullscreen(false); closeBottomBlock(); - } - }, []) + }; + }, []); const TABS = { EVENTS: 'User Steps', HEATMAPS: 'Click Map', - } + }; const [activeTab, setActiveTab] = useState(''); return ( - -
- + {!fullView && ( + + )} +
+
); -}; +} export default withRequest({ initialData: null, endpoint: '/assist/credentials', - dataWrapper: data => data, + dataWrapper: (data) => data, dataName: 'assistCredendials', loadingName: 'loadingCredentials', -})(withPermissions(['ASSIST_LIVE'], '', true)(connect( - state => { - return { - session: state.getIn([ 'sessions', 'current' ]), - showAssist: state.getIn([ 'sessions', 'showChatWindow' ]), - fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), - isEnterprise: state.getIn([ 'user', 'account', 'edition' ]) === 'ee', - userEmail: state.getIn(['user', 'account', 'email']), - userName: state.getIn(['user', 'account', 'name']), - } - }, - { toggleFullscreen, closeBottomBlock }, -)(LivePlayer))); +})( + withPermissions( + ['ASSIST_LIVE'], + '', + true + )( + connect( + (state) => { + return { + session: state.getIn(['sessions', 'current']), + showAssist: state.getIn(['sessions', 'showChatWindow']), + fullscreen: state.getIn(['components', 'player', 'fullscreen']), + isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee', + userEmail: state.getIn(['user', 'account', 'email']), + userName: state.getIn(['user', 'account', 'name']), + }; + }, + { toggleFullscreen, closeBottomBlock } + )(LivePlayer) + ) +); diff --git a/frontend/app/components/Session_/Player/Player.js b/frontend/app/components/Session_/Player/Player.js index 96a4859cf..83748dc0d 100644 --- a/frontend/app/components/Session_/Player/Player.js +++ b/frontend/app/components/Session_/Player/Player.js @@ -95,6 +95,7 @@ export default class Player extends React.PureComponent { closedLive, bottomBlock, activeTab, + fullView = false, } = this.props; const maxWidth = activeTab ? 'calc(100vw - 270px)' : '100vw'; @@ -127,7 +128,7 @@ export default class Player extends React.PureComponent { {bottomBlock === INSPECTOR && }
)} - + {!fullView && }
); } diff --git a/frontend/app/components/Session_/PlayerBlock.js b/frontend/app/components/Session_/PlayerBlock.js index 68d2c51c8..54130adf5 100644 --- a/frontend/app/components/Session_/PlayerBlock.js +++ b/frontend/app/components/Session_/PlayerBlock.js @@ -1,46 +1,36 @@ import React from 'react'; -import cn from "classnames"; +import cn from 'classnames'; import { connect } from 'react-redux'; -import { - NONE, -} from 'Duck/components/player'; +import { NONE } from 'Duck/components/player'; import Player from './Player'; import SubHeader from './Subheader'; import styles from './playerBlock.module.css'; -@connect(state => ({ - fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]), - bottomBlock: state.getIn([ 'components', 'player', 'bottomBlock' ]), - sessionId: state.getIn([ 'sessions', 'current', 'sessionId' ]), - disabled: state.getIn([ 'components', 'targetDefiner', 'inspectorMode' ]), - jiraConfig: state.getIn([ 'issues', 'list' ]).first(), +@connect((state) => ({ + fullscreen: state.getIn(['components', 'player', 'fullscreen']), + bottomBlock: state.getIn(['components', 'player', 'bottomBlock']), + sessionId: state.getIn(['sessions', 'current', 'sessionId']), + disabled: state.getIn(['components', 'targetDefiner', 'inspectorMode']), + jiraConfig: state.getIn(['issues', 'list']).first(), })) export default class PlayerBlock extends React.PureComponent { render() { - const { - fullscreen, - bottomBlock, - sessionId, - disabled, - activeTab, - jiraConfig, - } = this.props; + const { fullscreen, bottomBlock, sessionId, disabled, activeTab, jiraConfig, fullView = false } = this.props; return ( -
- {!fullscreen && } +
+ {!fullscreen && !fullView && ( + + )}
); From 3c8d5d9239c2c2c666cea0e460baf8ed197d32f2 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 24 Nov 2022 16:29:39 +0100 Subject: [PATCH 67/70] feat(chalice): funnel optimizations --- api/chalicelib/core/significance.py | 28 +++-- ee/api/chalicelib/core/significance.py | 152 ++++++++++++------------- 2 files changed, 89 insertions(+), 91 deletions(-) diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 1522dc94a..21e701157 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -189,9 +189,10 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: {(" AND " + " AND ".join(stage_constraints)) if len(stage_constraints) > 0 else ""} {(" AND " + " AND ".join(first_stage_extra_constraints)) if len(first_stage_extra_constraints) > 0 and i == 0 else ""} GROUP BY main.session_id) - AS T{i + 1} {"USING (session_id)" if i > 0 else ""} + AS T{i + 1} {"ON (TRUE)" if i > 0 else ""} """) - if len(n_stages_query) == 0: + n_stages=len(n_stages_query) + if n_stages == 0: return [] n_stages_query = " LEFT JOIN LATERAL ".join(n_stages_query) n_stages_query += ") AS stages_t" @@ -200,20 +201,20 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: SELECT stages_and_issues_t.*, sessions.user_uuid FROM ( SELECT * FROM ( - SELECT * FROM - {n_stages_query} + SELECT T1.session_id, {",".join([f"stage{i + 1}_timestamp" for i in range(n_stages)])} + FROM {n_stages_query} LEFT JOIN LATERAL - ( SELECT ISE.session_id, - ISS.type as issue_type, + ( SELECT ISS.type as issue_type, ISE.timestamp AS issue_timestamp, - ISS.context_string as issue_context, + COALESCE(ISS.context_string,'') as issue_context, ISS.issue_id as issue_id FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id) WHERE ISE.timestamp >= stages_t.stage1_timestamp AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp AND ISS.project_id=%(project_id)s + AND ISE.session_id = stages_t.session_id {"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""} - ) AS issues_t USING (session_id) + ) AS issues_t ON (TRUE) ) AS stages_and_issues_t INNER JOIN sessions USING(session_id); """ @@ -345,12 +346,10 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues, if error_id not in errors: errors[error_id] = [] ic = 0 - issue_type = all_issues[error_id]["issue_type"] - context = all_issues[error_id]["context"] - if row['issue_type'] is not None: + row_issue_id=row['issue_id'] + if row_issue_id is not None: if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts): - context_in_row = row['issue_context'] if row['issue_context'] is not None else '' - if issue_type == row['issue_type'] and context == context_in_row: + if error_id == row_issue_id: ic = 1 ic_present = True errors[error_id].append(ic) @@ -391,9 +390,8 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage): # check that the issue exists and belongs to subfunnel: if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or (row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])): - context_string = row['issue_context'] if row['issue_context'] is not None else '' if row["issue_id"] not in all_issues: - all_issues[row["issue_id"]] = {"context": context_string, "issue_type": row["issue_type"]} + all_issues[row["issue_id"]] = {"context": row['issue_context'], "issue_type": row["issue_type"]} n_issues_dict[row["issue_id"]] += 1 if row['user_uuid'] is not None: affected_users[row["issue_id"]].add(row['user_uuid']) diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py index 59f773c9e..b669be2fb 100644 --- a/ee/api/chalicelib/core/significance.py +++ b/ee/api/chalicelib/core/significance.py @@ -188,9 +188,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: values=s["value"], value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, - {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp, - '{event_type}' AS type, - '{s["operator"]}' AS operator + {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp FROM {next_table} AS main {" ".join(extra_from)} WHERE main.timestamp >= {f"T{i}.stage{i}_timestamp" if i > 0 else "%(startTimestamp)s"} {f"AND main.session_id=T1.session_id" if i > 0 else ""} @@ -198,45 +196,53 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: {(" AND " + " AND ".join(stage_constraints)) if len(stage_constraints) > 0 else ""} {(" AND " + " AND ".join(first_stage_extra_constraints)) if len(first_stage_extra_constraints) > 0 and i == 0 else ""} GROUP BY main.session_id) - AS T{i + 1} {"USING (session_id)" if i > 0 else ""} + AS T{i + 1} {"ON (TRUE)" if i > 0 else ""} """) - if len(n_stages_query) == 0: + n_stages=len(n_stages_query) + if n_stages == 0: return [] n_stages_query = " LEFT JOIN LATERAL ".join(n_stages_query) n_stages_query += ") AS stages_t" n_stages_query = f""" - SELECT stages_and_issues_t.*,sessions.session_id, sessions.user_uuid FROM ( + SELECT stages_and_issues_t.*, sessions.user_uuid + FROM ( SELECT * FROM ( - SELECT * FROM - {n_stages_query} + SELECT T1.session_id, {",".join([f"stage{i + 1}_timestamp" for i in range(n_stages)])} + FROM {n_stages_query} LEFT JOIN LATERAL - ( - SELECT * FROM - (SELECT ISE.session_id, - ISS.type as issue_type, + ( SELECT ISS.type as issue_type, ISE.timestamp AS issue_timestamp, - ISS.context_string as issue_context, + COALESCE(ISS.context_string,'') as issue_context, ISS.issue_id as issue_id FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id) WHERE ISE.timestamp >= stages_t.stage1_timestamp AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp AND ISS.project_id=%(project_id)s - {"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t - ) AS issues_t - USING (session_id)) AS stages_and_issues_t - inner join sessions USING(session_id); + AND ISE.session_id = stages_t.session_id + {"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""} + ) AS issues_t ON (TRUE) + ) AS stages_and_issues_t INNER JOIN sessions USING(session_id); """ # LIMIT 10000 params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"], "issueTypes": tuple(filter_issues), **values} with pg_client.PostgresClient() as cur: + query = cur.mogrify(n_stages_query, params) # print("---------------------------------------------------") - # print(cur.mogrify(n_stages_query, params)) + # print(query) # print("---------------------------------------------------") - cur.execute(cur.mogrify(n_stages_query, params)) - rows = cur.fetchall() + try: + cur.execute(query) + rows = cur.fetchall() + except Exception as err: + print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------") + print(query.decode('UTF-8')) + print("--------- PAYLOAD -----------") + print(filter_d) + print("--------------------") + raise err return rows @@ -298,7 +304,21 @@ def pearson_corr(x: list, y: list): return r, confidence, False -def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage): +# def tuple_or(t: tuple): +# x = 0 +# for el in t: +# x |= el # | is for bitwise OR +# return x +# +# The following function is correct optimization of the previous function because t is a list of 0,1 +def tuple_or(t: tuple): + for el in t: + if el > 0: + return 1 + return 0 + + +def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues, first_stage, last_stage): """ Returns two lists with binary values 0/1: @@ -317,12 +337,6 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_ transitions = [] n_sess_affected = 0 errors = {} - for issue in all_issues_with_context: - split = issue.split('__^__') - errors[issue] = { - "errors": [], - "issue_type": split[0], - "context": split[1]} for row in rows: t = 0 @@ -330,38 +344,28 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_ last_ts = row[f'stage{last_stage}_timestamp'] if first_ts is None: continue - elif first_ts is not None and last_ts is not None: + elif last_ts is not None: t = 1 transitions.append(t) ic_present = False - for issue_type_with_context in errors: + for error_id in all_issues: + if error_id not in errors: + errors[error_id] = [] ic = 0 - issue_type = errors[issue_type_with_context]["issue_type"] - context = errors[issue_type_with_context]["context"] + issue_type = all_issues[error_id]["issue_type"] + context = all_issues[error_id]["context"] if row['issue_type'] is not None: if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts): context_in_row = row['issue_context'] if row['issue_context'] is not None else '' if issue_type == row['issue_type'] and context == context_in_row: ic = 1 ic_present = True - errors[issue_type_with_context]["errors"].append(ic) + errors[error_id].append(ic) if ic_present and t: n_sess_affected += 1 - # def tuple_or(t: tuple): - # x = 0 - # for el in t: - # x |= el - # return x - def tuple_or(t: tuple): - for el in t: - if el > 0: - return 1 - return 0 - - errors = {key: errors[key]["errors"] for key in errors} all_errors = [tuple_or(t) for t in zip(*errors.values())] return transitions, errors, all_errors, n_sess_affected @@ -377,10 +381,9 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage): """ affected_users = defaultdict(lambda: set()) affected_sessions = defaultdict(lambda: set()) - contexts = defaultdict(lambda: None) + all_issues = {} n_affected_users_dict = defaultdict(lambda: None) n_affected_sessions_dict = defaultdict(lambda: None) - all_issues_with_context = set() n_issues_dict = defaultdict(lambda: 0) issues_by_session = defaultdict(lambda: 0) @@ -397,14 +400,13 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage): if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or (row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])): context_string = row['issue_context'] if row['issue_context'] is not None else '' - issue_with_context = iss + '__^__' + context_string - contexts[issue_with_context] = {"context": context_string, "id": row["issue_id"]} - all_issues_with_context.add(issue_with_context) - n_issues_dict[issue_with_context] += 1 + if row["issue_id"] not in all_issues: + all_issues[row["issue_id"]] = {"context": context_string, "issue_type": row["issue_type"]} + n_issues_dict[row["issue_id"]] += 1 if row['user_uuid'] is not None: - affected_users[issue_with_context].add(row['user_uuid']) + affected_users[row["issue_id"]].add(row['user_uuid']) - affected_sessions[issue_with_context].add(row['session_id']) + affected_sessions[row["issue_id"]].add(row['session_id']) issues_by_session[row[f'session_id']] += 1 if len(affected_users) > 0: @@ -415,29 +417,28 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage): n_affected_sessions_dict.update({ iss: len(affected_sessions[iss]) for iss in affected_sessions }) - return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts + return all_issues, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict def count_sessions(rows, n_stages): session_counts = {i: set() for i in range(1, n_stages + 1)} - for ind, row in enumerate(rows): + for row in rows: for i in range(1, n_stages + 1): if row[f"stage{i}_timestamp"] is not None: session_counts[i].add(row[f"session_id"]) + session_counts = {i: len(session_counts[i]) for i in session_counts} return session_counts def count_users(rows, n_stages): - users_in_stages = defaultdict(lambda: set()) - - for ind, row in enumerate(rows): + users_in_stages = {i: set() for i in range(1, n_stages + 1)} + for row in rows: for i in range(1, n_stages + 1): if row[f"stage{i}_timestamp"] is not None: users_in_stages[i].add(row["user_uuid"]) users_count = {i: len(users_in_stages[i]) for i in range(1, n_stages + 1)} - return users_count @@ -490,18 +491,18 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) last_stage = n_stages n_critical_issues = 0 - issues_dict = dict({"significant": [], - "insignificant": []}) + issues_dict = {"significant": [], + "insignificant": []} session_counts = count_sessions(rows, n_stages) drop = session_counts[first_stage] - session_counts[last_stage] - all_issues_with_context, n_issues_dict, affected_users_dict, affected_sessions, contexts = get_affected_users_for_all_issues( + all_issues, n_issues_dict, affected_users_dict, affected_sessions = get_affected_users_for_all_issues( rows, first_stage, last_stage) transitions, errors, all_errors, n_sess_affected = get_transitions_and_issues_of_each_type(rows, - all_issues_with_context, + all_issues, first_stage, last_stage) - # print("len(transitions) =", len(transitions)) + del rows if any(all_errors): total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors) @@ -514,33 +515,32 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) if drop_only: return total_drop_due_to_issues - for issue in all_issues_with_context: + for issue_id in all_issues: - if not any(errors[issue]): + if not any(errors[issue_id]): continue - r, confidence, is_sign = pearson_corr(transitions, errors[issue]) + r, confidence, is_sign = pearson_corr(transitions, errors[issue_id]) if r is not None and drop is not None and is_sign: - lost_conversions = int(r * affected_sessions[issue]) + lost_conversions = int(r * affected_sessions[issue_id]) else: lost_conversions = None if r is None: r = 0 - split = issue.split('__^__') issues_dict['significant' if is_sign else 'insignificant'].append({ - "type": split[0], - "title": helper.get_issue_title(split[0]), - "affected_sessions": affected_sessions[issue], - "unaffected_sessions": session_counts[1] - affected_sessions[issue], + "type": all_issues[issue_id]["issue_type"], + "title": helper.get_issue_title(all_issues[issue_id]["issue_type"]), + "affected_sessions": affected_sessions[issue_id], + "unaffected_sessions": session_counts[1] - affected_sessions[issue_id], "lost_conversions": lost_conversions, - "affected_users": affected_users_dict[issue], + "affected_users": affected_users_dict[issue_id], "conversion_impact": round(r * 100), - "context_string": contexts[issue]["context"], - "issue_id": contexts[issue]["id"] + "context_string": all_issues[issue_id]["context"], + "issue_id": issue_id }) if is_sign: - n_critical_issues += n_issues_dict[issue] + n_critical_issues += n_issues_dict[issue_id] return n_critical_issues, issues_dict, total_drop_due_to_issues From f174cbedace722d97e5c100ece2b1680ab8a90c8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 24 Nov 2022 16:59:34 +0100 Subject: [PATCH 68/70] feat(chalice): funnel optimizations LIMIT --- api/chalicelib/core/significance.py | 1 + ee/api/chalicelib/core/significance.py | 12 +++++------- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 21e701157..a38dc82d1 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -214,6 +214,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: AND ISS.project_id=%(project_id)s AND ISE.session_id = stages_t.session_id {"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""} + LIMIT 20 -- remove the limit to get exact stats ) AS issues_t ON (TRUE) ) AS stages_and_issues_t INNER JOIN sessions USING(session_id); """ diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py index b669be2fb..75df1cd94 100644 --- a/ee/api/chalicelib/core/significance.py +++ b/ee/api/chalicelib/core/significance.py @@ -221,6 +221,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: AND ISS.project_id=%(project_id)s AND ISE.session_id = stages_t.session_id {"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""} + LIMIT 20 -- remove the limit to get exact stats ) AS issues_t ON (TRUE) ) AS stages_and_issues_t INNER JOIN sessions USING(session_id); """ @@ -353,12 +354,10 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues, if error_id not in errors: errors[error_id] = [] ic = 0 - issue_type = all_issues[error_id]["issue_type"] - context = all_issues[error_id]["context"] - if row['issue_type'] is not None: + row_issue_id=row['issue_id'] + if row_issue_id is not None: if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts): - context_in_row = row['issue_context'] if row['issue_context'] is not None else '' - if issue_type == row['issue_type'] and context == context_in_row: + if error_id == row_issue_id: ic = 1 ic_present = True errors[error_id].append(ic) @@ -399,9 +398,8 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage): # check that the issue exists and belongs to subfunnel: if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or (row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])): - context_string = row['issue_context'] if row['issue_context'] is not None else '' if row["issue_id"] not in all_issues: - all_issues[row["issue_id"]] = {"context": context_string, "issue_type": row["issue_type"]} + all_issues[row["issue_id"]] = {"context": row['issue_context'], "issue_type": row["issue_type"]} n_issues_dict[row["issue_id"]] += 1 if row['user_uuid'] is not None: affected_users[row["issue_id"]].add(row['user_uuid']) From 41df26a1e2c2b24dcb1173494ef987d2329cd65b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 24 Nov 2022 18:42:26 +0100 Subject: [PATCH 69/70] feat(chalice): return session_id for sessions' search --- api/chalicelib/core/sessions.py | 35 +++++++++++++++++---------- api/routers/core.py | 8 +++++++ ee/api/chalicelib/core/sessions.py | 38 ++++++++++++++++++------------ 3 files changed, 53 insertions(+), 28 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index fcea8621d..805109709 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -177,7 +177,7 @@ def _isUndefined_operator(op: schemas.SearchEventOperator): # This function executes the query and return result def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, - error_status=schemas.ErrorStatus.all, count_only=False, issue=None): + error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): if data.bookmarked: data.startDate, data.endDate = sessions_favorite.get_start_end_timestamp(project_id, user_id) @@ -185,9 +185,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ favorite_only=data.bookmarked, issue=issue, project_id=project_id, user_id=user_id) if data.limit is not None and data.page is not None: + full_args["sessions_limit"] = data.limit full_args["sessions_limit_s"] = (data.page - 1) * data.limit full_args["sessions_limit_e"] = data.page * data.limit else: + full_args["sessions_limit"] = 200 full_args["sessions_limit_s"] = 1 full_args["sessions_limit_e"] = 200 @@ -243,17 +245,24 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) sort = helper.key_to_snake_case(data.sort) - meta_keys = metadata.get(project_id=project_id) - main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, - COALESCE(JSONB_AGG(full_sessions) - FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", - full_args) + if ids_only: + main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id + {query_part} + ORDER BY s.session_id desc + LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""", + full_args) + else: + meta_keys = metadata.get(project_id=project_id) + main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, + COALESCE(JSONB_AGG(full_sessions) + FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn + FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} + {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} + {query_part} + ORDER BY s.session_id desc) AS filtred_sessions + ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", + full_args) # print("--------------------") # print(main_query) # print("--------------------") @@ -266,7 +275,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ print(data.json()) print("--------------------") raise err - if errors_only: + if errors_only or ids_only: return helper.list_to_camel_case(cur.fetchall()) sessions = cur.fetchone() diff --git a/api/routers/core.py b/api/routers/core.py index 80f2b6296..7ee8364e7 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -56,6 +56,14 @@ def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchem return {'data': data} +@app.post('/{projectId}/sessions/search/ids', tags=["sessions"]) +@app.post('/{projectId}/sessions/search2/ids', tags=["sessions"]) +def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True) + return {'data': data} + + @app.get('/{projectId}/events/search', tags=["events"]) def events_search(projectId: int, q: str, type: Union[schemas.FilterType, schemas.EventType, diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 8c9eaf006..0d885dc9b 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -107,8 +107,7 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu session_id=session_id, user_id=context.user_id) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, - session_id=session_id, + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, project_key=data["projectKey"]) data["inDB"] = True return data @@ -181,7 +180,7 @@ def _isUndefined_operator(op: schemas.SearchEventOperator): # This function executes the query and return result def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, - error_status=schemas.ErrorStatus.all, count_only=False, issue=None): + error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): if data.bookmarked: data.startDate, data.endDate = sessions_favorite.get_start_end_timestamp(project_id, user_id) @@ -189,9 +188,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ favorite_only=data.bookmarked, issue=issue, project_id=project_id, user_id=user_id) if data.limit is not None and data.page is not None: + full_args["sessions_limit"] = data.limit full_args["sessions_limit_s"] = (data.page - 1) * data.limit full_args["sessions_limit_e"] = data.page * data.limit else: + full_args["sessions_limit"] = 200 full_args["sessions_limit_s"] = 1 full_args["sessions_limit_e"] = 200 @@ -247,17 +248,24 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) sort = helper.key_to_snake_case(data.sort) - meta_keys = metadata.get(project_id=project_id) - main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, - COALESCE(JSONB_AGG(full_sessions) - FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", - full_args) + if ids_only: + main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id + {query_part} + ORDER BY s.session_id desc + LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""", + full_args) + else: + meta_keys = metadata.get(project_id=project_id) + main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, + COALESCE(JSONB_AGG(full_sessions) + FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn + FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} + {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} + {query_part} + ORDER BY s.session_id desc) AS filtred_sessions + ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", + full_args) # print("--------------------") # print(main_query) # print("--------------------") @@ -270,7 +278,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ print(data.json()) print("--------------------") raise err - if errors_only: + if errors_only or ids_only: return helper.list_to_camel_case(cur.fetchall()) sessions = cur.fetchone() From f65af8b72402d2bd50eb716c8b06ef26373485b8 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 24 Nov 2022 19:11:34 +0100 Subject: [PATCH 70/70] feat(chalice): return session_id for sessions' search --- api/chalicelib/core/sessions.py | 36 ++++++++++++++---------------- ee/api/chalicelib/core/sessions.py | 36 ++++++++++++++---------------- 2 files changed, 34 insertions(+), 38 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 805109709..91efb967f 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -237,6 +237,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ GROUP BY user_id ) AS users_sessions;""", full_args) + elif ids_only: + main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id + {query_part} + ORDER BY s.session_id desc + LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""", + full_args) else: if data.order is None: data.order = schemas.SortOrderType.desc @@ -244,25 +250,17 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) sort = helper.key_to_snake_case(data.sort) - - if ids_only: - main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id - {query_part} - ORDER BY s.session_id desc - LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""", - full_args) - else: - meta_keys = metadata.get(project_id=project_id) - main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, - COALESCE(JSONB_AGG(full_sessions) - FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", - full_args) + meta_keys = metadata.get(project_id=project_id) + main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, + COALESCE(JSONB_AGG(full_sessions) + FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn + FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} + {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} + {query_part} + ORDER BY s.session_id desc) AS filtred_sessions + ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", + full_args) # print("--------------------") # print(main_query) # print("--------------------") diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 0d885dc9b..7d999fe6c 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -240,6 +240,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ GROUP BY user_id ) AS users_sessions;""", full_args) + elif ids_only: + main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id + {query_part} + ORDER BY s.session_id desc + LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""", + full_args) else: if data.order is None: data.order = schemas.SortOrderType.desc @@ -247,25 +253,17 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) sort = helper.key_to_snake_case(data.sort) - - if ids_only: - main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id - {query_part} - ORDER BY s.session_id desc - LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""", - full_args) - else: - meta_keys = metadata.get(project_id=project_id) - main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, - COALESCE(JSONB_AGG(full_sessions) - FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", - full_args) + meta_keys = metadata.get(project_id=project_id) + main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, + COALESCE(JSONB_AGG(full_sessions) + FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn + FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} + {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} + {query_part} + ORDER BY s.session_id desc) AS filtred_sessions + ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", + full_args) # print("--------------------") # print(main_query) # print("--------------------")