Merge remote-tracking branch 'origin/dev' into api-v1.8.2

This commit is contained in:
Taha Yassine Kraiem 2022-10-27 16:28:46 +02:00
commit 23ea07042f
385 changed files with 12084 additions and 10993 deletions

View file

@ -43,7 +43,7 @@ jobs:
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}-ee
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
ENVIRONMENT: staging
run: |
cd api
@ -91,7 +91,7 @@ jobs:
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
# - name: Debug Job

View file

@ -42,7 +42,7 @@ jobs:
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
run: |
cd api
@ -90,7 +90,7 @@ jobs:
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
# - name: Debug Job

View file

@ -41,7 +41,7 @@ jobs:
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
run: |
set -x
@ -84,7 +84,7 @@ jobs:
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
@ -130,7 +130,7 @@ jobs:
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
# - name: Debug Job

View file

@ -36,7 +36,7 @@ jobs:
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
run: |
cd utilities
@ -53,7 +53,7 @@ jobs:
bash kube-install.sh --app utilities
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
# - name: Debug Job

View file

@ -49,7 +49,7 @@ jobs:
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}-ee
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
ENVIRONMENT: staging
run: |
#
@ -96,7 +96,7 @@ jobs:
- name: Deploying to kuberntes
env:
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
run: |
#
# Deploying image to environment.

View file

@ -49,7 +49,7 @@ jobs:
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
run: |
#
@ -95,7 +95,7 @@ jobs:
- name: Deploying to kuberntes
env:
IMAGE_TAG: ${{ github.sha }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
run: |
#
# Deploying image to environment.

View file

@ -6,6 +6,7 @@
# Default will be OSS build.
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
set -e
git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)}
envarg="default-foss"
@ -46,4 +47,4 @@ IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_
[[ $1 == "ee" ]] && {
cp ../ee/api/build_crons.sh .
IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_crons.sh $1
}
}

View file

@ -7,6 +7,7 @@
# Example
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
set -e
git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)}
ee="false"
@ -25,6 +26,7 @@ function build_service() {
[[ $PUSH_IMAGE -eq 1 ]] && {
docker push ${DOCKER_REPO:-'local'}/$image:${git_sha1}
}
echo "Build completed for $image"
return
}
@ -38,6 +40,8 @@ function build_api(){
}
[[ $2 != "" ]] && {
build_service $2
cd ../backend
rm -rf ../_backend
return
}
for image in $(ls cmd);

View file

@ -1 +0,0 @@
GROUP_CACHE=from_file

View file

@ -3,7 +3,6 @@ package main
import (
"context"
"log"
"openreplay/backend/pkg/queue/types"
"os"
"os/signal"
"syscall"
@ -31,40 +30,28 @@ func main() {
log.Printf("can't create assets_total metric: %s", err)
}
consumer := queue.NewMessageConsumer(
msgHandler := func(msg messages.Message) {
switch m := msg.(type) {
case *messages.AssetCache:
cacher.CacheURL(m.SessionID(), m.URL)
totalAssets.Add(context.Background(), 1)
// TODO: connect to "raw" topic in order to listen for JSException
case *messages.JSException:
sourceList, err := assets.ExtractJSExceptionSources(&m.Payload)
if err != nil {
log.Printf("Error on source extraction: %v", err)
return
}
for _, source := range sourceList {
cacher.CacheJSFile(source)
}
}
}
msgConsumer := queue.NewConsumer(
cfg.GroupCache,
[]string{cfg.TopicCache},
func(sessionID uint64, iter messages.Iterator, meta *types.Meta) {
for iter.Next() {
if iter.Type() == messages.MsgAssetCache {
m := iter.Message().Decode()
if m == nil {
return
}
msg := m.(*messages.AssetCache)
cacher.CacheURL(sessionID, msg.URL)
totalAssets.Add(context.Background(), 1)
} else if iter.Type() == messages.MsgErrorEvent {
m := iter.Message().Decode()
if m == nil {
return
}
msg := m.(*messages.ErrorEvent)
if msg.Source != "js_exception" {
continue
}
sourceList, err := assets.ExtractJSExceptionSources(&msg.Payload)
if err != nil {
log.Printf("Error on source extraction: %v", err)
continue
}
for _, source := range sourceList {
cacher.CacheJSFile(source)
}
}
}
iter.Close()
},
messages.NewMessageIterator(msgHandler, []int{messages.MsgAssetCache, messages.MsgJSException}, true),
true,
cfg.MessageSizeLimit,
)
@ -79,15 +66,18 @@ func main() {
select {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
consumer.Close()
cacher.Stop()
msgConsumer.Close()
os.Exit(0)
case err := <-cacher.Errors:
log.Printf("Error while caching: %v", err)
// TODO: notify user
case <-tick:
cacher.UpdateTimeouts()
default:
if err := consumer.ConsumeNext(); err != nil {
if !cacher.CanCache() {
continue
}
if err := msgConsumer.ConsumeNext(); err != nil {
log.Fatalf("Error on consumption: %v", err)
}
}

View file

@ -45,10 +45,6 @@ func main() {
// Create handler's aggregator
builderMap := sessions.NewBuilderMap(handlersFabric)
keepMessage := func(tp int) bool {
return tp == messages.MsgMetadata || tp == messages.MsgIssueEvent || tp == messages.MsgSessionStart || tp == messages.MsgSessionEnd || tp == messages.MsgUserID || tp == messages.MsgUserAnonymousID || tp == messages.MsgCustomEvent || tp == messages.MsgClickEvent || tp == messages.MsgInputEvent || tp == messages.MsgPageEvent || tp == messages.MsgErrorEvent || tp == messages.MsgFetchEvent || tp == messages.MsgGraphQLEvent || tp == messages.MsgIntegrationEvent || tp == messages.MsgPerformanceTrackAggr || tp == messages.MsgResourceEvent || tp == messages.MsgLongTask || tp == messages.MsgJSException || tp == messages.MsgResourceTiming || tp == messages.MsgRawCustomEvent || tp == messages.MsgCustomIssue || tp == messages.MsgFetch || tp == messages.MsgGraphQL || tp == messages.MsgStateAction || tp == messages.MsgSetInputTarget || tp == messages.MsgSetInputValue || tp == messages.MsgCreateDocument || tp == messages.MsgMouseClick || tp == messages.MsgSetPageLocation || tp == messages.MsgPageLoadTiming || tp == messages.MsgPageRenderTiming
}
var producer types.Producer = nil
if cfg.UseQuickwit {
producer = queue.NewProducer(cfg.MessageSizeLimit, true)
@ -60,69 +56,66 @@ func main() {
saver.InitStats()
statsLogger := logger.NewQueueStats(cfg.LoggerTimeout)
msgFilter := []int{messages.MsgMetadata, messages.MsgIssueEvent, messages.MsgSessionStart, messages.MsgSessionEnd,
messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgClickEvent,
messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr,
messages.MsgJSException, messages.MsgResourceTiming,
messages.MsgRawCustomEvent, messages.MsgCustomIssue, messages.MsgFetch, messages.MsgGraphQL,
messages.MsgStateAction, messages.MsgSetInputTarget, messages.MsgSetInputValue, messages.MsgCreateDocument,
messages.MsgMouseClick, messages.MsgSetPageLocation, messages.MsgPageLoadTiming, messages.MsgPageRenderTiming}
// Handler logic
handler := func(sessionID uint64, iter messages.Iterator, meta *types.Meta) {
statsLogger.Collect(sessionID, meta)
msgHandler := func(msg messages.Message) {
statsLogger.Collect(msg)
for iter.Next() {
if !keepMessage(iter.Type()) {
continue
// Just save session data into db without additional checks
if err := saver.InsertMessage(msg); err != nil {
if !postgres.IsPkeyViolation(err) {
log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg)
}
msg := iter.Message().Decode()
if msg == nil {
return
}
// Just save session data into db without additional checks
if err := saver.InsertMessage(sessionID, msg); err != nil {
if !postgres.IsPkeyViolation(err) {
log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, sessionID, msg)
}
return
}
session, err := pg.GetSession(sessionID)
if session == nil {
if err != nil && !errors.Is(err, cache.NilSessionInCacheError) {
log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, sessionID, msg)
}
return
}
// Save statistics to db
err = saver.InsertStats(session, msg)
if err != nil {
log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg)
}
// Handle heuristics and save to temporary queue in memory
builderMap.HandleMessage(sessionID, msg, msg.Meta().Index)
// Process saved heuristics messages as usual messages above in the code
builderMap.IterateSessionReadyMessages(sessionID, func(msg messages.Message) {
if err := saver.InsertMessage(sessionID, msg); err != nil {
if !postgres.IsPkeyViolation(err) {
log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg)
}
return
}
if err := saver.InsertStats(session, msg); err != nil {
log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg)
}
})
return
}
iter.Close()
session, err := pg.GetSession(msg.SessionID())
if session == nil {
if err != nil && !errors.Is(err, cache.NilSessionInCacheError) {
log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg)
}
return
}
// Save statistics to db
err = saver.InsertStats(session, msg)
if err != nil {
log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg)
}
// Handle heuristics and save to temporary queue in memory
builderMap.HandleMessage(msg)
// Process saved heuristics messages as usual messages above in the code
builderMap.IterateSessionReadyMessages(msg.SessionID(), func(msg messages.Message) {
if err := saver.InsertMessage(msg); err != nil {
if !postgres.IsPkeyViolation(err) {
log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg)
}
return
}
if err := saver.InsertStats(session, msg); err != nil {
log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg)
}
})
}
// Init consumer
consumer := queue.NewMessageConsumer(
consumer := queue.NewConsumer(
cfg.GroupDB,
[]string{
cfg.TopicRawWeb,
cfg.TopicAnalytics,
},
handler,
messages.NewMessageIterator(msgHandler, msgFilter, true),
false,
cfg.MessageSizeLimit,
)
@ -133,33 +126,36 @@ func main() {
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
commitTick := time.Tick(cfg.CommitBatchTimeout)
// Send collected batches to db
commitDBUpdates := func() {
start := time.Now()
pg.CommitBatches()
pgDur := time.Now().Sub(start).Milliseconds()
start = time.Now()
if err := saver.CommitStats(); err != nil {
log.Printf("Error on stats commit: %v", err)
}
chDur := time.Now().Sub(start).Milliseconds()
log.Printf("commit duration(ms), pg: %d, ch: %d", pgDur, chDur)
if err := consumer.Commit(); err != nil {
log.Printf("Error on consumer commit: %v", err)
}
}
for {
select {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
log.Printf("Caught signal %s: terminating\n", sig.String())
commitDBUpdates()
consumer.Close()
os.Exit(0)
case <-commitTick:
// Send collected batches to db
start := time.Now()
pg.CommitBatches()
pgDur := time.Now().Sub(start).Milliseconds()
start = time.Now()
if err := saver.CommitStats(consumer.HasFirstPartition()); err != nil {
log.Printf("Error on stats commit: %v", err)
}
chDur := time.Now().Sub(start).Milliseconds()
log.Printf("commit duration(ms), pg: %d, ch: %d", pgDur, chDur)
// TODO: use commit worker to save time each tick
if err := consumer.Commit(); err != nil {
log.Printf("Error on consumer commit: %v", err)
}
commitDBUpdates()
default:
// Handle new message from queue
err := consumer.ConsumeNext()
if err != nil {
if err := consumer.ConsumeNext(); err != nil {
log.Fatalf("Error on consumption: %v", err)
}
}

View file

@ -2,7 +2,7 @@ package main
import (
"log"
"openreplay/backend/pkg/queue/types"
"openreplay/backend/internal/storage"
"os"
"os/signal"
"syscall"
@ -20,42 +20,27 @@ import (
)
func main() {
metrics := monitoring.New("ender")
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
// Load service configuration
metrics := monitoring.New("ender")
cfg := ender.New()
pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres, 0, 0, metrics), cfg.ProjectExpirationTimeoutMs)
defer pg.Close()
// Init all modules
statsLogger := logger.NewQueueStats(cfg.LoggerTimeout)
sessions, err := sessionender.New(metrics, intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber)
sessions, err := sessionender.New(metrics, intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber, logger.NewQueueStats(cfg.LoggerTimeout))
if err != nil {
log.Printf("can't init ender service: %s", err)
return
}
producer := queue.NewProducer(cfg.MessageSizeLimit, true)
consumer := queue.NewMessageConsumer(
consumer := queue.NewConsumer(
cfg.GroupEnder,
[]string{
cfg.TopicRawWeb,
},
func(sessionID uint64, iter messages.Iterator, meta *types.Meta) {
for iter.Next() {
if iter.Type() == messages.MsgSessionStart || iter.Type() == messages.MsgSessionEnd {
continue
}
if iter.Message().Meta().Timestamp == 0 {
log.Printf("ZERO TS, sessID: %d, msgType: %d", sessionID, iter.Type())
}
statsLogger.Collect(sessionID, meta)
sessions.UpdateSession(sessionID, meta.Timestamp, iter.Message().Meta().Timestamp)
}
iter.Close()
},
[]string{cfg.TopicRawWeb},
messages.NewMessageIterator(
func(msg messages.Message) { sessions.UpdateSession(msg) },
[]int{messages.MsgTimestamp},
false),
false,
cfg.MessageSizeLimit,
)
@ -94,7 +79,16 @@ func main() {
currDuration, newDuration)
return true
}
if err := producer.Produce(cfg.TopicRawWeb, sessionID, messages.Encode(msg)); err != nil {
if cfg.UseEncryption {
if key := storage.GenerateEncryptionKey(); key != nil {
if err := pg.InsertSessionEncryptionKey(sessionID, key); err != nil {
log.Printf("can't save session encryption key: %s, session will not be encrypted", err)
} else {
msg.EncryptionKey = string(key)
}
}
}
if err := producer.Produce(cfg.TopicRawWeb, sessionID, msg.Encode()); err != nil {
log.Printf("can't send sessionEnd to topic: %s; sessID: %d", err, sessionID)
return false
}

View file

@ -2,7 +2,6 @@ package main
import (
"log"
"openreplay/backend/pkg/queue/types"
"os"
"os/signal"
"syscall"
@ -47,25 +46,18 @@ func main() {
// Init producer and consumer for data bus
producer := queue.NewProducer(cfg.MessageSizeLimit, true)
consumer := queue.NewMessageConsumer(
msgHandler := func(msg messages.Message) {
statsLogger.Collect(msg)
builderMap.HandleMessage(msg)
}
consumer := queue.NewConsumer(
cfg.GroupHeuristics,
[]string{
cfg.TopicRawWeb,
},
func(sessionID uint64, iter messages.Iterator, meta *types.Meta) {
var lastMessageID uint64
for iter.Next() {
statsLogger.Collect(sessionID, meta)
msg := iter.Message().Decode()
if msg == nil {
log.Printf("failed batch, sess: %d, lastIndex: %d", sessionID, lastMessageID)
continue
}
lastMessageID = msg.Meta().Index
builderMap.HandleMessage(sessionID, msg, iter.Message().Meta().Index)
}
iter.Close()
},
messages.NewMessageIterator(msgHandler, nil, true),
false,
cfg.MessageSizeLimit,
)
@ -86,7 +78,7 @@ func main() {
os.Exit(0)
case <-tick:
builderMap.IterateReadyMessages(func(sessionID uint64, readyMsg messages.Message) {
producer.Produce(cfg.TopicAnalytics, sessionID, messages.Encode(readyMsg))
producer.Produce(cfg.TopicAnalytics, sessionID, readyMsg.Encode())
})
producer.Flush(cfg.ProducerTimeout)
consumer.Commit()

View file

@ -13,12 +13,10 @@ import (
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/intervals"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/token"
)
//
func main() {
metrics := monitoring.New("integrations")
@ -84,7 +82,7 @@ func main() {
}
sessionID = sessData.ID
}
producer.Produce(cfg.TopicAnalytics, sessionID, messages.Encode(event.IntegrationEvent))
producer.Produce(cfg.TopicAnalytics, sessionID, event.IntegrationEvent.Encode())
case err := <-manager.Errors:
log.Printf("Integration error: %v\n", err)
case i := <-manager.RequestDataUpdates:

View file

@ -3,7 +3,7 @@ package main
import (
"context"
"log"
"openreplay/backend/pkg/queue/types"
"openreplay/backend/pkg/pprof"
"os"
"os/signal"
"syscall"
@ -13,13 +13,15 @@ import (
"openreplay/backend/internal/sink/assetscache"
"openreplay/backend/internal/sink/oswriter"
"openreplay/backend/internal/storage"
. "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/monitoring"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/url/assets"
)
func main() {
pprof.StartProfilingServer()
metrics := monitoring.New("sink")
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
@ -35,9 +37,10 @@ func main() {
producer := queue.NewProducer(cfg.MessageSizeLimit, true)
defer producer.Close(cfg.ProducerCloseTimeout)
rewriter := assets.NewRewriter(cfg.AssetsOrigin)
assetMessageHandler := assetscache.New(cfg, rewriter, producer)
assetMessageHandler := assetscache.New(cfg, rewriter, producer, metrics)
counter := storage.NewLogCounter()
// Session message metrics
totalMessages, err := metrics.RegisterCounter("messages_total")
if err != nil {
log.Printf("can't create messages_total metric: %s", err)
@ -51,64 +54,75 @@ func main() {
log.Printf("can't create messages_size metric: %s", err)
}
consumer := queue.NewMessageConsumer(
msgHandler := func(msg messages.Message) {
// [METRICS] Increase the number of processed messages
totalMessages.Add(context.Background(), 1)
// Send SessionEnd trigger to storage service
if msg.TypeID() == messages.MsgSessionEnd {
if err := producer.Produce(cfg.TopicTrigger, msg.SessionID(), msg.Encode()); err != nil {
log.Printf("can't send SessionEnd to trigger topic: %s; sessID: %d", err, msg.SessionID())
}
return
}
// Process assets
if msg.TypeID() == messages.MsgSetNodeAttributeURLBased ||
msg.TypeID() == messages.MsgSetCSSDataURLBased ||
msg.TypeID() == messages.MsgCSSInsertRuleURLBased ||
msg.TypeID() == messages.MsgAdoptedSSReplaceURLBased ||
msg.TypeID() == messages.MsgAdoptedSSInsertRuleURLBased {
m := msg.Decode()
if m == nil {
log.Printf("assets decode err, info: %s", msg.Meta().Batch().Info())
return
}
msg = assetMessageHandler.ParseAssets(m)
}
// Filter message
if !messages.IsReplayerType(msg.TypeID()) {
return
}
// If message timestamp is empty, use at least ts of session start
ts := msg.Meta().Timestamp
if ts == 0 {
log.Printf("zero ts; sessID: %d, msgType: %d", msg.SessionID(), msg.TypeID())
} else {
// Log ts of last processed message
counter.Update(msg.SessionID(), time.UnixMilli(ts))
}
// Write encoded message with index to session file
data := msg.EncodeWithIndex()
if data == nil {
log.Printf("can't encode with index, err: %s", err)
return
}
if messages.IsDOMType(msg.TypeID()) {
if err := writer.WriteDOM(msg.SessionID(), data); err != nil {
log.Printf("DOM Writer error: %s, info: %s", err, msg.Meta().Batch().Info())
}
}
if !messages.IsDOMType(msg.TypeID()) || msg.TypeID() == messages.MsgTimestamp {
// TODO: write only necessary timestamps
if err := writer.WriteDEV(msg.SessionID(), data); err != nil {
log.Printf("Devtools Writer error: %s, info: %s", err, msg.Meta().Batch().Info())
}
}
// [METRICS] Increase the number of written to the files messages and the message size
messageSize.Record(context.Background(), float64(len(data)))
savedMessages.Add(context.Background(), 1)
}
consumer := queue.NewConsumer(
cfg.GroupSink,
[]string{
cfg.TopicRawWeb,
},
func(sessionID uint64, iter Iterator, meta *types.Meta) {
for iter.Next() {
// [METRICS] Increase the number of processed messages
totalMessages.Add(context.Background(), 1)
// Send SessionEnd trigger to storage service
if iter.Type() == MsgSessionEnd {
if err := producer.Produce(cfg.TopicTrigger, sessionID, iter.Message().Encode()); err != nil {
log.Printf("can't send SessionEnd to trigger topic: %s; sessID: %d", err, sessionID)
}
continue
}
msg := iter.Message()
// Process assets
if iter.Type() == MsgSetNodeAttributeURLBased ||
iter.Type() == MsgSetCSSDataURLBased ||
iter.Type() == MsgCSSInsertRuleURLBased ||
iter.Type() == MsgAdoptedSSReplaceURLBased ||
iter.Type() == MsgAdoptedSSInsertRuleURLBased {
m := msg.Decode()
if m == nil {
return
}
msg = assetMessageHandler.ParseAssets(sessionID, m) // TODO: filter type only once (use iterator inide or bring ParseAssets out here).
}
// Filter message
if !IsReplayerType(msg.TypeID()) {
continue
}
// If message timestamp is empty, use at least ts of session start
ts := msg.Meta().Timestamp
if ts == 0 {
log.Printf("zero ts; sessID: %d, msgType: %d", sessionID, iter.Type())
} else {
// Log ts of last processed message
counter.Update(sessionID, time.UnixMilli(ts))
}
// Write encoded message with index to session file
data := msg.EncodeWithIndex()
if err := writer.Write(sessionID, data); err != nil {
log.Printf("Writer error: %v\n", err)
}
// [METRICS] Increase the number of written to the files messages and the message size
messageSize.Record(context.Background(), float64(len(data)))
savedMessages.Add(context.Background(), 1)
}
iter.Close()
},
messages.NewMessageIterator(msgHandler, nil, false),
false,
cfg.MessageSizeLimit,
)
@ -122,6 +136,9 @@ func main() {
select {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
if err := writer.SyncAll(); err != nil {
log.Printf("sync error: %v\n", err)
}
if err := consumer.Commit(); err != nil {
log.Printf("can't commit messages: %s", err)
}
@ -129,7 +146,7 @@ func main() {
os.Exit(0)
case <-tick:
if err := writer.SyncAll(); err != nil {
log.Fatalf("Sync error: %v\n", err)
log.Fatalf("sync error: %v\n", err)
}
counter.Print()
if err := consumer.Commit(); err != nil {
@ -142,5 +159,4 @@ func main() {
}
}
}
}

View file

@ -2,10 +2,8 @@ package main
import (
"log"
"openreplay/backend/pkg/queue/types"
"os"
"os/signal"
"strconv"
"syscall"
"time"
@ -38,24 +36,24 @@ func main() {
log.Fatalf("can't init sessionFinder module: %s", err)
}
consumer := queue.NewMessageConsumer(
consumer := queue.NewConsumer(
cfg.GroupStorage,
[]string{
cfg.TopicTrigger,
},
func(sessionID uint64, iter messages.Iterator, meta *types.Meta) {
for iter.Next() {
if iter.Type() == messages.MsgSessionEnd {
msg := iter.Message().Decode().(*messages.SessionEnd)
if err := srv.UploadKey(strconv.FormatUint(sessionID, 10), 5); err != nil {
log.Printf("can't find session: %d", sessionID)
sessionFinder.Find(sessionID, msg.Timestamp)
}
// Log timestamp of last processed session
counter.Update(sessionID, time.UnixMilli(meta.Timestamp))
messages.NewMessageIterator(
func(msg messages.Message) {
sesEnd := msg.(*messages.SessionEnd)
if err := srv.UploadSessionFiles(sesEnd); err != nil {
log.Printf("can't find session: %d", msg.SessionID())
sessionFinder.Find(msg.SessionID(), sesEnd.Timestamp)
}
}
},
// Log timestamp of last processed session
counter.Update(msg.SessionID(), time.UnixMilli(msg.Meta().Batch().Timestamp()))
},
[]int{messages.MsgSessionEnd},
true,
),
true,
cfg.MessageSizeLimit,
)

View file

@ -5,6 +5,7 @@ go 1.18
require (
cloud.google.com/go/logging v1.4.2
github.com/ClickHouse/clickhouse-go/v2 v2.2.0
github.com/Masterminds/semver v1.5.0
github.com/aws/aws-sdk-go v1.44.98
github.com/btcsuite/btcutil v1.0.2
github.com/elastic/go-elasticsearch/v7 v7.13.1

View file

@ -64,6 +64,8 @@ github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym
github.com/ClickHouse/clickhouse-go v1.5.4/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI=
github.com/ClickHouse/clickhouse-go/v2 v2.2.0 h1:dj00TDKY+xwuTJdbpspCSmTLFyWzRJerTHwaBxut1C0=
github.com/ClickHouse/clickhouse-go/v2 v2.2.0/go.mod h1:8f2XZUi7XoeU+uPIytSi1cvx8fmJxi7vIgqpvYTF1+o=
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII=

View file

@ -33,6 +33,11 @@ type cacher struct {
sizeLimit int
downloadedAssets syncfloat64.Counter
requestHeaders map[string]string
workers *WorkerPool
}
func (c *cacher) CanCache() bool {
return c.workers.CanAddTask()
}
func NewCacher(cfg *config.Config, metrics *monitoring.Metrics) *cacher {
@ -44,7 +49,7 @@ func NewCacher(cfg *config.Config, metrics *monitoring.Metrics) *cacher {
if err != nil {
log.Printf("can't create downloaded_assets metric: %s", err)
}
return &cacher{
c := &cacher{
timeoutMap: newTimeoutMap(),
s3: storage.NewS3(cfg.AWSRegion, cfg.S3BucketAssets),
httpClient: &http.Client{
@ -60,47 +65,48 @@ func NewCacher(cfg *config.Config, metrics *monitoring.Metrics) *cacher {
downloadedAssets: downloadedAssets,
requestHeaders: cfg.AssetsRequestHeaders,
}
c.workers = NewPool(64, c.CacheFile)
return c
}
func (c *cacher) cacheURL(requestURL string, sessionID uint64, depth byte, urlContext string, isJS bool) {
var cachePath string
if isJS {
cachePath = assets.GetCachePathForJS(requestURL)
} else {
cachePath = assets.GetCachePathForAssets(sessionID, requestURL)
}
if c.timeoutMap.contains(cachePath) {
return
}
c.timeoutMap.add(cachePath)
crTime := c.s3.GetCreationTime(cachePath)
if crTime != nil && crTime.After(time.Now().Add(-MAX_STORAGE_TIME)) { // recently uploaded
return
}
func (c *cacher) CacheFile(task *Task) {
c.cacheURL(task)
}
req, _ := http.NewRequest("GET", requestURL, nil)
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0")
func (c *cacher) cacheURL(t *Task) {
t.retries--
req, _ := http.NewRequest("GET", t.requestURL, nil)
if t.retries%2 == 0 {
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0")
}
for k, v := range c.requestHeaders {
req.Header.Set(k, v)
}
res, err := c.httpClient.Do(req)
if err != nil {
c.Errors <- errors.Wrap(err, urlContext)
c.Errors <- errors.Wrap(err, t.urlContext)
return
}
defer res.Body.Close()
if res.StatusCode >= 400 {
// TODO: retry
c.Errors <- errors.Wrap(fmt.Errorf("Status code is %v, ", res.StatusCode), urlContext)
printErr := true
// Retry 403 error
if res.StatusCode == 403 && t.retries > 0 {
c.workers.AddTask(t)
printErr = false
}
if printErr {
c.Errors <- errors.Wrap(fmt.Errorf("Status code is %v, ", res.StatusCode), t.urlContext)
}
return
}
data, err := ioutil.ReadAll(io.LimitReader(res.Body, int64(c.sizeLimit+1)))
if err != nil {
c.Errors <- errors.Wrap(err, urlContext)
c.Errors <- errors.Wrap(err, t.urlContext)
return
}
if len(data) > c.sizeLimit {
c.Errors <- errors.Wrap(errors.New("Maximum size exceeded"), urlContext)
c.Errors <- errors.Wrap(errors.New("Maximum size exceeded"), t.urlContext)
return
}
@ -112,44 +118,94 @@ func (c *cacher) cacheURL(requestURL string, sessionID uint64, depth byte, urlCo
strData := string(data)
if isCSS {
strData = c.rewriter.RewriteCSS(sessionID, requestURL, strData) // TODO: one method for rewrite and return list
strData = c.rewriter.RewriteCSS(t.sessionID, t.requestURL, strData) // TODO: one method for rewrite and return list
}
// TODO: implement in streams
err = c.s3.Upload(strings.NewReader(strData), cachePath, contentType, false)
err = c.s3.Upload(strings.NewReader(strData), t.cachePath, contentType, false)
if err != nil {
c.Errors <- errors.Wrap(err, urlContext)
c.Errors <- errors.Wrap(err, t.urlContext)
return
}
c.downloadedAssets.Add(context.Background(), 1)
if isCSS {
if depth > 0 {
if t.depth > 0 {
for _, extractedURL := range assets.ExtractURLsFromCSS(string(data)) {
if fullURL, cachable := assets.GetFullCachableURL(requestURL, extractedURL); cachable {
go c.cacheURL(fullURL, sessionID, depth-1, urlContext+"\n -> "+fullURL, false)
if fullURL, cachable := assets.GetFullCachableURL(t.requestURL, extractedURL); cachable {
c.checkTask(&Task{
requestURL: fullURL,
sessionID: t.sessionID,
depth: t.depth - 1,
urlContext: t.urlContext + "\n -> " + fullURL,
isJS: false,
retries: setRetries(),
})
}
}
if err != nil {
c.Errors <- errors.Wrap(err, urlContext)
c.Errors <- errors.Wrap(err, t.urlContext)
return
}
} else {
c.Errors <- errors.Wrap(errors.New("Maximum recursion cache depth exceeded"), urlContext)
c.Errors <- errors.Wrap(errors.New("Maximum recursion cache depth exceeded"), t.urlContext)
return
}
}
return
}
func (c *cacher) checkTask(newTask *Task) {
// check if file was recently uploaded
var cachePath string
if newTask.isJS {
cachePath = assets.GetCachePathForJS(newTask.requestURL)
} else {
cachePath = assets.GetCachePathForAssets(newTask.sessionID, newTask.requestURL)
}
if c.timeoutMap.contains(cachePath) {
return
}
c.timeoutMap.add(cachePath)
crTime := c.s3.GetCreationTime(cachePath)
if crTime != nil && crTime.After(time.Now().Add(-MAX_STORAGE_TIME)) {
return
}
// add new file in queue to download
newTask.cachePath = cachePath
c.workers.AddTask(newTask)
}
func (c *cacher) CacheJSFile(sourceURL string) {
go c.cacheURL(sourceURL, 0, 0, sourceURL, true)
c.checkTask(&Task{
requestURL: sourceURL,
sessionID: 0,
depth: 0,
urlContext: sourceURL,
isJS: true,
retries: setRetries(),
})
}
func (c *cacher) CacheURL(sessionID uint64, fullURL string) {
go c.cacheURL(fullURL, sessionID, MAX_CACHE_DEPTH, fullURL, false)
c.checkTask(&Task{
requestURL: fullURL,
sessionID: sessionID,
depth: MAX_CACHE_DEPTH,
urlContext: fullURL,
isJS: false,
retries: setRetries(),
})
}
func (c *cacher) UpdateTimeouts() {
c.timeoutMap.deleteOutdated()
}
func (c *cacher) Stop() {
c.workers.Stop()
}
func setRetries() int {
return 10
}

View file

@ -0,0 +1,80 @@
package cacher
import (
"log"
"sync"
)
type Task struct {
requestURL string
sessionID uint64
depth byte
urlContext string
isJS bool
cachePath string
retries int
}
type WorkerPool struct {
tasks chan *Task
wg sync.WaitGroup
done chan struct{}
term sync.Once
size int
job Job
}
func (p *WorkerPool) CanAddTask() bool {
if len(p.tasks) < cap(p.tasks) {
return true
}
return false
}
type Job func(task *Task)
func NewPool(size int, job Job) *WorkerPool {
newPool := &WorkerPool{
tasks: make(chan *Task, 128),
done: make(chan struct{}),
size: size,
job: job,
}
newPool.init()
return newPool
}
func (p *WorkerPool) init() {
p.wg.Add(p.size)
for i := 0; i < p.size; i++ {
go p.worker()
}
}
func (p *WorkerPool) worker() {
for {
select {
case newTask := <-p.tasks:
p.job(newTask)
case <-p.done:
p.wg.Done()
return
}
}
}
func (p *WorkerPool) AddTask(task *Task) {
if task.retries <= 0 {
return
}
p.tasks <- task
}
func (p *WorkerPool) Stop() {
log.Printf("stopping workers")
p.term.Do(func() {
close(p.done)
})
p.wg.Wait()
log.Printf("all workers have been stopped")
}

View file

@ -17,9 +17,6 @@ import (
)
func readFile(path string) (map[string]string, error) {
if path == "" {
return nil, fmt.Errorf("file path is empty")
}
file, err := os.Open(path)
if err != nil {
return nil, fmt.Errorf("can't open file: %s", err)
@ -40,6 +37,10 @@ func readFile(path string) (map[string]string, error) {
}
func parseFile(a interface{}, path string) {
// Skip parsing process without logs if we don't have path to config file
if path == "" {
return
}
envs, err := readFile(path)
if err != nil {
log.Printf("can't parse config file: %s", err)

View file

@ -14,6 +14,7 @@ type Config struct {
TopicRawWeb string `env:"TOPIC_RAW_WEB,required"`
ProducerTimeout int `env:"PRODUCER_TIMEOUT,default=2000"`
PartitionsNumber int `env:"PARTITIONS_NUMBER,required"`
UseEncryption bool `env:"USE_ENCRYPTION,default=false"`
}
func New() *Config {

View file

@ -17,6 +17,8 @@ type Config struct {
CacheAssets bool `env:"CACHE_ASSETS,required"`
AssetsOrigin string `env:"ASSETS_ORIGIN,required"`
ProducerCloseTimeout int `env:"PRODUCER_CLOSE_TIMEOUT,default=15000"`
CacheThreshold int64 `env:"CACHE_THRESHOLD,default=75"`
CacheExpiration int64 `env:"CACHE_EXPIRATION,default=120"`
}
func New() *Config {

View file

@ -5,7 +5,8 @@ import (
. "openreplay/backend/pkg/messages"
)
func (mi *Saver) InsertMessage(sessionID uint64, msg Message) error {
func (mi *Saver) InsertMessage(msg Message) error {
sessionID := msg.SessionID()
switch m := msg.(type) {
// Common
case *Metadata:
@ -37,23 +38,16 @@ func (mi *Saver) InsertMessage(sessionID uint64, msg Message) error {
case *PageEvent:
mi.sendToFTS(msg, sessionID)
return mi.pg.InsertWebPageEvent(sessionID, m)
case *ErrorEvent:
return mi.pg.InsertWebErrorEvent(sessionID, m)
case *FetchEvent:
mi.sendToFTS(msg, sessionID)
return mi.pg.InsertWebFetchEvent(sessionID, m)
case *GraphQLEvent:
mi.sendToFTS(msg, sessionID)
return mi.pg.InsertWebGraphQLEvent(sessionID, m)
case *JSException:
return mi.pg.InsertWebJSException(m)
case *IntegrationEvent:
return mi.pg.InsertWebErrorEvent(sessionID, &ErrorEvent{
MessageID: m.Meta().Index,
Timestamp: m.Timestamp,
Source: m.Source,
Name: m.Name,
Message: m.Message,
Payload: m.Payload,
})
return mi.pg.InsertWebIntegrationEvent(m)
// IOS
case *IOSSessionStart:

View file

@ -16,12 +16,10 @@ func (si *Saver) InsertStats(session *Session, msg Message) error {
return si.pg.InsertWebStatsPerformance(session.SessionID, m)
case *ResourceEvent:
return si.pg.InsertWebStatsResourceEvent(session.SessionID, m)
case *LongTask:
return si.pg.InsertWebStatsLongtask(session.SessionID, m)
}
return nil
}
func (si *Saver) CommitStats(optimize bool) error {
func (si *Saver) CommitStats() error {
return nil
}

View file

@ -69,12 +69,12 @@ func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request)
}
// TODO: if EXPIRED => send message for two sessions association
expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
tokenData = &token.TokenData{sessionID, expTime.UnixMilli()}
tokenData = &token.TokenData{sessionID, 0, expTime.UnixMilli()}
country := e.services.GeoIP.ExtractISOCodeFromHTTPRequest(r)
// The difference with web is mostly here:
e.services.Producer.Produce(e.cfg.TopicRawIOS, tokenData.ID, Encode(&IOSSessionStart{
sessStart := &IOSSessionStart{
Timestamp: req.Timestamp,
ProjectID: uint64(p.ProjectID),
TrackerVersion: req.TrackerVersion,
@ -85,7 +85,8 @@ func (e *Router) startSessionHandlerIOS(w http.ResponseWriter, r *http.Request)
UserDevice: ios.MapIOSDevice(req.UserDevice),
UserDeviceType: ios.GetIOSDeviceType(req.UserDevice),
UserCountry: country,
}))
}
e.services.Producer.Produce(e.cfg.TopicRawIOS, tokenData.ID, sessStart.Encode())
}
ResponseWithJSON(w, &StartIOSSessionResponse{

View file

@ -3,6 +3,7 @@ package router
import (
"encoding/json"
"errors"
"github.com/Masterminds/semver"
"go.opentelemetry.io/otel/attribute"
"io"
"log"
@ -37,6 +38,22 @@ func (e *Router) readBody(w http.ResponseWriter, r *http.Request, limit int64) (
return bodyBytes, nil
}
func getSessionTimestamp(req *StartSessionRequest, startTimeMili int64) (ts uint64) {
ts = uint64(req.Timestamp)
c, err := semver.NewConstraint(">=4.1.6")
if err != nil {
return
}
v, err := semver.NewVersion(req.TrackerVersion)
if err != nil {
return
}
if c.Check(v) {
return uint64(startTimeMili)
}
return
}
func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
@ -91,17 +108,22 @@ func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request)
ResponseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
sessionID, err := e.services.Flaker.Compose(uint64(startTime.UnixMilli()))
startTimeMili := startTime.UnixMilli()
sessionID, err := e.services.Flaker.Compose(uint64(startTimeMili))
if err != nil {
ResponseWithError(w, http.StatusInternalServerError, err)
return
}
// TODO: if EXPIRED => send message for two sessions association
expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
tokenData = &token.TokenData{ID: sessionID, ExpTime: expTime.UnixMilli()}
tokenData = &token.TokenData{
ID: sessionID,
Delay: startTimeMili - req.Timestamp,
ExpTime: expTime.UnixMilli(),
}
sessionStart := &SessionStart{
Timestamp: req.Timestamp,
Timestamp: getSessionTimestamp(req, startTimeMili),
ProjectID: uint64(p.ProjectID),
TrackerVersion: req.TrackerVersion,
RevID: req.RevID,
@ -125,7 +147,7 @@ func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request)
}
// Send sessionStart message to kafka
if err := e.services.Producer.Produce(e.cfg.TopicRawWeb, tokenData.ID, Encode(sessionStart)); err != nil {
if err := e.services.Producer.Produce(e.cfg.TopicRawWeb, tokenData.ID, sessionStart.Encode()); err != nil {
log.Printf("can't send session start: %s", err)
}
}
@ -137,6 +159,7 @@ func (e *Router) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request)
ProjectID: strconv.FormatUint(uint64(p.ProjectID), 10),
BeaconSizeLimit: e.cfg.BeaconSizeLimit,
StartTimestamp: int64(flakeid.ExtractTimestamp(tokenData.ID)),
Delay: tokenData.Delay,
})
}

View file

@ -4,7 +4,7 @@ type StartSessionRequest struct {
Token string `json:"token"`
UserUUID *string `json:"userUUID"`
RevID string `json:"revID"`
Timestamp uint64 `json:"timestamp"`
Timestamp int64 `json:"timestamp"`
TrackerVersion string `json:"trackerVersion"`
IsSnippet bool `json:"isSnippet"`
DeviceMemory uint64 `json:"deviceMemory"`

View file

@ -5,6 +5,8 @@ import (
"fmt"
"go.opentelemetry.io/otel/metric/instrument/syncfloat64"
"log"
log2 "openreplay/backend/pkg/log"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/monitoring"
"time"
)
@ -27,9 +29,10 @@ type SessionEnder struct {
timeCtrl *timeController
activeSessions syncfloat64.UpDownCounter
totalSessions syncfloat64.Counter
stats log2.QueueStats
}
func New(metrics *monitoring.Metrics, timeout int64, parts int) (*SessionEnder, error) {
func New(metrics *monitoring.Metrics, timeout int64, parts int, stats log2.QueueStats) (*SessionEnder, error) {
if metrics == nil {
return nil, fmt.Errorf("metrics module is empty")
}
@ -48,24 +51,31 @@ func New(metrics *monitoring.Metrics, timeout int64, parts int) (*SessionEnder,
timeCtrl: NewTimeController(parts),
activeSessions: activeSessions,
totalSessions: totalSessions,
stats: stats,
}, nil
}
// UpdateSession save timestamp for new sessions and update for existing sessions
func (se *SessionEnder) UpdateSession(sessionID uint64, timestamp, msgTimestamp int64) {
localTS := time.Now().UnixMilli()
currTS := timestamp
if currTS == 0 {
func (se *SessionEnder) UpdateSession(msg messages.Message) {
se.stats.Collect(msg)
var (
sessionID = msg.Meta().SessionID()
batchTimestamp = msg.Meta().Batch().Timestamp()
msgTimestamp = msg.Meta().Timestamp
localTimestamp = time.Now().UnixMilli()
)
if batchTimestamp == 0 {
log.Printf("got empty timestamp for sessionID: %d", sessionID)
return
}
se.timeCtrl.UpdateTime(sessionID, currTS)
se.timeCtrl.UpdateTime(sessionID, batchTimestamp)
sess, ok := se.sessions[sessionID]
if !ok {
// Register new session
se.sessions[sessionID] = &session{
lastTimestamp: currTS, // timestamp from message broker
lastUpdate: localTS, // local timestamp
lastUserTime: msgTimestamp, // last timestamp from user's machine
lastTimestamp: batchTimestamp, // timestamp from message broker
lastUpdate: localTimestamp, // local timestamp
lastUserTime: msgTimestamp, // last timestamp from user's machine
isEnded: false,
}
se.activeSessions.Add(context.Background(), 1)
@ -77,9 +87,9 @@ func (se *SessionEnder) UpdateSession(sessionID uint64, timestamp, msgTimestamp
sess.lastUserTime = msgTimestamp
}
// Keep information about the latest message for generating sessionEnd trigger
if currTS > sess.lastTimestamp {
sess.lastTimestamp = currTS
sess.lastUpdate = localTS
if batchTimestamp > sess.lastTimestamp {
sess.lastTimestamp = batchTimestamp
sess.lastUpdate = localTimestamp
sess.isEnded = false
}
}

View file

@ -1,35 +1,80 @@
package assetscache
import (
"context"
"crypto/md5"
"go.opentelemetry.io/otel/metric/instrument/syncfloat64"
"io"
"log"
"net/url"
"openreplay/backend/internal/config/sink"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/monitoring"
"openreplay/backend/pkg/queue/types"
"openreplay/backend/pkg/url/assets"
"time"
)
type AssetsCache struct {
cfg *sink.Config
rewriter *assets.Rewriter
producer types.Producer
type CachedAsset struct {
msg string
ts time.Time
}
func New(cfg *sink.Config, rewriter *assets.Rewriter, producer types.Producer) *AssetsCache {
type AssetsCache struct {
cfg *sink.Config
rewriter *assets.Rewriter
producer types.Producer
cache map[string]*CachedAsset
totalAssets syncfloat64.Counter
cachedAssets syncfloat64.Counter
skippedAssets syncfloat64.Counter
assetSize syncfloat64.Histogram
assetDuration syncfloat64.Histogram
}
func New(cfg *sink.Config, rewriter *assets.Rewriter, producer types.Producer, metrics *monitoring.Metrics) *AssetsCache {
// Assets metrics
totalAssets, err := metrics.RegisterCounter("assets_total")
if err != nil {
log.Printf("can't create assets_total metric: %s", err)
}
cachedAssets, err := metrics.RegisterCounter("assets_cached")
if err != nil {
log.Printf("can't create assets_cached metric: %s", err)
}
skippedAssets, err := metrics.RegisterCounter("assets_skipped")
if err != nil {
log.Printf("can't create assets_skipped metric: %s", err)
}
assetSize, err := metrics.RegisterHistogram("asset_size")
if err != nil {
log.Printf("can't create asset_size metric: %s", err)
}
assetDuration, err := metrics.RegisterHistogram("asset_duration")
if err != nil {
log.Printf("can't create asset_duration metric: %s", err)
}
return &AssetsCache{
cfg: cfg,
rewriter: rewriter,
producer: producer,
cfg: cfg,
rewriter: rewriter,
producer: producer,
cache: make(map[string]*CachedAsset, 64),
totalAssets: totalAssets,
cachedAssets: cachedAssets,
skippedAssets: skippedAssets,
assetSize: assetSize,
assetDuration: assetDuration,
}
}
func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages.Message {
func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message {
switch m := msg.(type) {
case *messages.SetNodeAttributeURLBased:
if m.Name == "src" || m.Name == "href" {
newMsg := &messages.SetNodeAttribute{
ID: m.ID,
Name: m.Name,
Value: e.handleURL(sessID, m.BaseURL, m.Value),
Value: e.handleURL(m.SessionID(), m.BaseURL, m.Value),
}
newMsg.SetMeta(msg.Meta())
return newMsg
@ -37,7 +82,7 @@ func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages.
newMsg := &messages.SetNodeAttribute{
ID: m.ID,
Name: m.Name,
Value: e.handleCSS(sessID, m.BaseURL, m.Value),
Value: e.handleCSS(m.SessionID(), m.BaseURL, m.Value),
}
newMsg.SetMeta(msg.Meta())
return newMsg
@ -45,7 +90,7 @@ func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages.
case *messages.SetCSSDataURLBased:
newMsg := &messages.SetCSSData{
ID: m.ID,
Data: e.handleCSS(sessID, m.BaseURL, m.Data),
Data: e.handleCSS(m.SessionID(), m.BaseURL, m.Data),
}
newMsg.SetMeta(msg.Meta())
return newMsg
@ -53,14 +98,14 @@ func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages.
newMsg := &messages.CSSInsertRule{
ID: m.ID,
Index: m.Index,
Rule: e.handleCSS(sessID, m.BaseURL, m.Rule),
Rule: e.handleCSS(m.SessionID(), m.BaseURL, m.Rule),
}
newMsg.SetMeta(msg.Meta())
return newMsg
case *messages.AdoptedSSReplaceURLBased:
newMsg := &messages.AdoptedSSReplace{
SheetID: m.SheetID,
Text: e.handleCSS(sessID, m.BaseURL, m.Text),
Text: e.handleCSS(m.SessionID(), m.BaseURL, m.Text),
}
newMsg.SetMeta(msg.Meta())
return newMsg
@ -68,7 +113,7 @@ func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages.
newMsg := &messages.AdoptedSSInsertRule{
SheetID: m.SheetID,
Index: m.Index,
Rule: e.handleCSS(sessID, m.BaseURL, m.Rule),
Rule: e.handleCSS(m.SessionID(), m.BaseURL, m.Rule),
}
newMsg.SetMeta(msg.Meta())
return newMsg
@ -78,10 +123,11 @@ func (e *AssetsCache) ParseAssets(sessID uint64, msg messages.Message) messages.
func (e *AssetsCache) sendAssetForCache(sessionID uint64, baseURL string, relativeURL string) {
if fullURL, cacheable := assets.GetFullCachableURL(baseURL, relativeURL); cacheable {
assetMessage := &messages.AssetCache{URL: fullURL}
if err := e.producer.Produce(
e.cfg.TopicCache,
sessionID,
messages.Encode(&messages.AssetCache{URL: fullURL}),
assetMessage.Encode(),
); err != nil {
log.Printf("can't send asset to cache topic, sessID: %d, err: %s", sessionID, err)
}
@ -94,18 +140,67 @@ func (e *AssetsCache) sendAssetsForCacheFromCSS(sessionID uint64, baseURL string
}
}
func (e *AssetsCache) handleURL(sessionID uint64, baseURL string, url string) string {
func (e *AssetsCache) handleURL(sessionID uint64, baseURL string, urlVal string) string {
if e.cfg.CacheAssets {
e.sendAssetForCache(sessionID, baseURL, url)
return e.rewriter.RewriteURL(sessionID, baseURL, url)
e.sendAssetForCache(sessionID, baseURL, urlVal)
return e.rewriter.RewriteURL(sessionID, baseURL, urlVal)
} else {
return assets.ResolveURL(baseURL, urlVal)
}
return assets.ResolveURL(baseURL, url)
}
func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) string {
ctx := context.Background()
e.totalAssets.Add(ctx, 1)
// Try to find asset in cache
h := md5.New()
// Cut first part of url (scheme + host)
u, err := url.Parse(baseURL)
if err != nil {
log.Printf("can't parse url: %s, err: %s", baseURL, err)
if e.cfg.CacheAssets {
e.sendAssetsForCacheFromCSS(sessionID, baseURL, css)
}
return e.getRewrittenCSS(sessionID, baseURL, css)
}
justUrl := u.Scheme + "://" + u.Host + "/"
// Calculate hash sum of url + css
io.WriteString(h, justUrl)
io.WriteString(h, css)
hash := string(h.Sum(nil))
// Check the resulting hash in cache
if cachedAsset, ok := e.cache[hash]; ok {
if int64(time.Now().Sub(cachedAsset.ts).Minutes()) < e.cfg.CacheExpiration {
e.skippedAssets.Add(ctx, 1)
return cachedAsset.msg
}
}
// Send asset to download in assets service
if e.cfg.CacheAssets {
e.sendAssetsForCacheFromCSS(sessionID, baseURL, css)
return e.rewriter.RewriteCSS(sessionID, baseURL, css)
}
return assets.ResolveCSS(baseURL, css)
// Rewrite asset
start := time.Now()
res := e.getRewrittenCSS(sessionID, baseURL, css)
duration := time.Now().Sub(start).Milliseconds()
e.assetSize.Record(ctx, float64(len(res)))
e.assetDuration.Record(ctx, float64(duration))
// Save asset to cache if we spent more than threshold
if duration > e.cfg.CacheThreshold {
e.cache[hash] = &CachedAsset{
msg: res,
ts: time.Now(),
}
e.cachedAssets.Add(ctx, 1)
}
// Return rewritten asset
return res
}
func (e *AssetsCache) getRewrittenCSS(sessionID uint64, url, css string) string {
if e.cfg.CacheAssets {
return e.rewriter.RewriteCSS(sessionID, url, css)
} else {
return assets.ResolveCSS(url, css)
}
}

View file

@ -3,6 +3,7 @@ package oswriter
import (
"math"
"os"
"path/filepath"
"strconv"
"time"
)
@ -10,26 +11,26 @@ import (
type Writer struct {
ulimit int
dir string
files map[uint64]*os.File
atimes map[uint64]int64
files map[string]*os.File
atimes map[string]int64
}
func NewWriter(ulimit uint16, dir string) *Writer {
return &Writer{
ulimit: int(ulimit),
dir: dir + "/",
files: make(map[uint64]*os.File),
atimes: make(map[uint64]int64),
files: make(map[string]*os.File),
atimes: make(map[string]int64),
}
}
func (w *Writer) open(key uint64) (*os.File, error) {
file, ok := w.files[key]
func (w *Writer) open(fname string) (*os.File, error) {
file, ok := w.files[fname]
if ok {
return file, nil
}
if len(w.atimes) == w.ulimit {
var m_k uint64
var m_k string
var m_t int64 = math.MaxInt64
for k, t := range w.atimes {
if t < m_t {
@ -37,21 +38,28 @@ func (w *Writer) open(key uint64) (*os.File, error) {
m_t = t
}
}
if err := w.Close(m_k); err != nil {
if err := w.close(m_k); err != nil {
return nil, err
}
}
file, err := os.OpenFile(w.dir+strconv.FormatUint(key, 10), os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
// mkdir if not exist
pathTo := w.dir + filepath.Dir(fname)
if _, err := os.Stat(pathTo); os.IsNotExist(err) {
os.MkdirAll(pathTo, 0644)
}
file, err := os.OpenFile(w.dir+fname, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
if err != nil {
return nil, err
}
w.files[key] = file
w.atimes[key] = time.Now().Unix()
w.files[fname] = file
w.atimes[fname] = time.Now().Unix()
return file, nil
}
func (w *Writer) Close(key uint64) error {
file := w.files[key]
func (w *Writer) close(fname string) error {
file := w.files[fname]
if file == nil {
return nil
}
@ -61,17 +69,24 @@ func (w *Writer) Close(key uint64) error {
if err := file.Close(); err != nil {
return err
}
delete(w.files, key)
delete(w.atimes, key)
delete(w.files, fname)
delete(w.atimes, fname)
return nil
}
func (w *Writer) Write(key uint64, data []byte) error {
file, err := w.open(key)
func (w *Writer) WriteDOM(sid uint64, data []byte) error {
return w.write(strconv.FormatUint(sid, 10)+"/dom.mob", data)
}
func (w *Writer) WriteDEV(sid uint64, data []byte) error {
return w.write(strconv.FormatUint(sid, 10)+"/devtools.mob", data)
}
func (w *Writer) write(fname string, data []byte) error {
file, err := w.open(fname)
if err != nil {
return err
}
// TODO: add check for the number of recorded bytes to file
_, err = file.Write(data)
return err
}

View file

@ -0,0 +1,17 @@
package storage
import (
"errors"
)
func GenerateEncryptionKey() []byte {
return nil
}
func EncryptData(data, fullKey []byte) ([]byte, error) {
return nil, errors.New("not supported")
}
func DecryptData(data, fullKey []byte) ([]byte, error) {
return nil, errors.New("not supported")
}

View file

@ -8,6 +8,7 @@ import (
"log"
config "openreplay/backend/internal/config/storage"
"openreplay/backend/pkg/flakeid"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/monitoring"
"openreplay/backend/pkg/storage"
"os"
@ -16,13 +17,16 @@ import (
)
type Storage struct {
cfg *config.Config
s3 *storage.S3
startBytes []byte
totalSessions syncfloat64.Counter
sessionSize syncfloat64.Histogram
readingTime syncfloat64.Histogram
archivingTime syncfloat64.Histogram
cfg *config.Config
s3 *storage.S3
startBytes []byte
totalSessions syncfloat64.Counter
sessionDOMSize syncfloat64.Histogram
sessionDevtoolsSize syncfloat64.Histogram
readingDOMTime syncfloat64.Histogram
readingTime syncfloat64.Histogram
archivingTime syncfloat64.Histogram
}
func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Storage, error) {
@ -37,10 +41,14 @@ func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Stor
if err != nil {
log.Printf("can't create sessions_total metric: %s", err)
}
sessionSize, err := metrics.RegisterHistogram("sessions_size")
sessionDOMSize, err := metrics.RegisterHistogram("sessions_size")
if err != nil {
log.Printf("can't create session_size metric: %s", err)
}
sessionDevtoolsSize, err := metrics.RegisterHistogram("sessions_dt_size")
if err != nil {
log.Printf("can't create sessions_dt_size metric: %s", err)
}
readingTime, err := metrics.RegisterHistogram("reading_duration")
if err != nil {
log.Printf("can't create reading_duration metric: %s", err)
@ -50,17 +58,30 @@ func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Stor
log.Printf("can't create archiving_duration metric: %s", err)
}
return &Storage{
cfg: cfg,
s3: s3,
startBytes: make([]byte, cfg.FileSplitSize),
totalSessions: totalSessions,
sessionSize: sessionSize,
readingTime: readingTime,
archivingTime: archivingTime,
cfg: cfg,
s3: s3,
startBytes: make([]byte, cfg.FileSplitSize),
totalSessions: totalSessions,
sessionDOMSize: sessionDOMSize,
sessionDevtoolsSize: sessionDevtoolsSize,
readingTime: readingTime,
archivingTime: archivingTime,
}, nil
}
func (s *Storage) UploadKey(key string, retryCount int) error {
func (s *Storage) UploadSessionFiles(msg *messages.SessionEnd) error {
sessionDir := strconv.FormatUint(msg.SessionID(), 10)
if err := s.uploadKey(msg.SessionID(), sessionDir+"/dom.mob", true, 5, msg.EncryptionKey); err != nil {
return err
}
if err := s.uploadKey(msg.SessionID(), sessionDir+"/devtools.mob", false, 4, msg.EncryptionKey); err != nil {
return err
}
return nil
}
// TODO: make a bit cleaner
func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCount int, encryptionKey string) error {
if retryCount <= 0 {
return nil
}
@ -68,7 +89,6 @@ func (s *Storage) UploadKey(key string, retryCount int) error {
start := time.Now()
file, err := os.Open(s.cfg.FSDir + "/" + key)
if err != nil {
sessID, _ := strconv.ParseUint(key, 10, 64)
return fmt.Errorf("File open error: %v; sessID: %s, part: %d, sessStart: %s\n",
err, key, sessID%16,
time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))),
@ -76,45 +96,123 @@ func (s *Storage) UploadKey(key string, retryCount int) error {
}
defer file.Close()
nRead, err := file.Read(s.startBytes)
if err != nil {
sessID, _ := strconv.ParseUint(key, 10, 64)
log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s",
err,
key,
sessID%16,
time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))),
)
time.AfterFunc(s.cfg.RetryTimeout, func() {
s.UploadKey(key, retryCount-1)
})
return nil
}
s.readingTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()))
start = time.Now()
startReader := bytes.NewBuffer(s.startBytes[:nRead])
if err := s.s3.Upload(s.gzipFile(startReader), key, "application/octet-stream", true); err != nil {
log.Fatalf("Storage: start upload failed. %v\n", err)
}
if nRead == s.cfg.FileSplitSize {
if err := s.s3.Upload(s.gzipFile(file), key+"e", "application/octet-stream", true); err != nil {
log.Fatalf("Storage: end upload failed. %v\n", err)
}
}
s.archivingTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()))
// Save metrics
var fileSize float64 = 0
var fileSize int64 = 0
fileInfo, err := file.Stat()
if err != nil {
log.Printf("can't get file info: %s", err)
} else {
fileSize = float64(fileInfo.Size())
fileSize = fileInfo.Size()
}
var encryptedData []byte
if shouldSplit {
nRead, err := file.Read(s.startBytes)
if err != nil {
log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s",
err,
key,
sessID%16,
time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))),
)
time.AfterFunc(s.cfg.RetryTimeout, func() {
s.uploadKey(sessID, key, shouldSplit, retryCount-1, encryptionKey)
})
return nil
}
s.readingTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()))
start = time.Now()
// Encrypt session file if we have encryption key
if encryptionKey != "" {
encryptedData, err = EncryptData(s.startBytes[:nRead], []byte(encryptionKey))
if err != nil {
log.Printf("can't encrypt data: %s", err)
encryptedData = s.startBytes[:nRead]
}
} else {
encryptedData = s.startBytes[:nRead]
}
// Compress and save to s3
startReader := bytes.NewBuffer(encryptedData)
if err := s.s3.Upload(s.gzipFile(startReader), key+"s", "application/octet-stream", true); err != nil {
log.Fatalf("Storage: start upload failed. %v\n", err)
}
// TODO: fix possible error (if we read less then FileSplitSize)
if nRead == s.cfg.FileSplitSize {
restPartSize := fileSize - int64(nRead)
fileData := make([]byte, restPartSize)
nRead, err = file.Read(fileData)
if err != nil {
log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s",
err,
key,
sessID%16,
time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))),
)
return nil
}
if int64(nRead) != restPartSize {
log.Printf("can't read the rest part of file")
}
// Encrypt session file if we have encryption key
if encryptionKey != "" {
encryptedData, err = EncryptData(fileData, []byte(encryptionKey))
if err != nil {
log.Printf("can't encrypt data: %s", err)
encryptedData = fileData
}
} else {
encryptedData = fileData
}
// Compress and save to s3
endReader := bytes.NewBuffer(encryptedData)
if err := s.s3.Upload(s.gzipFile(endReader), key+"e", "application/octet-stream", true); err != nil {
log.Fatalf("Storage: end upload failed. %v\n", err)
}
}
s.archivingTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()))
} else {
start = time.Now()
fileData := make([]byte, fileSize)
nRead, err := file.Read(fileData)
if err != nil {
log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s",
err,
key,
sessID%16,
time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))),
)
return nil
}
if int64(nRead) != fileSize {
log.Printf("can't read the rest part of file")
}
// Encrypt session file if we have encryption key
if encryptionKey != "" {
encryptedData, err = EncryptData(fileData, []byte(encryptionKey))
if err != nil {
log.Printf("can't encrypt data: %s", err)
encryptedData = fileData
}
} else {
encryptedData = fileData
}
endReader := bytes.NewBuffer(encryptedData)
if err := s.s3.Upload(s.gzipFile(endReader), key+"s", "application/octet-stream", true); err != nil {
log.Fatalf("Storage: end upload failed. %v\n", err)
}
s.archivingTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()))
}
// Save metrics
ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200)
if shouldSplit {
s.totalSessions.Add(ctx, 1)
s.sessionDOMSize.Record(ctx, float64(fileSize))
} else {
s.sessionDevtoolsSize.Record(ctx, float64(fileSize))
}
ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200)
s.sessionSize.Record(ctx, fileSize)
s.totalSessions.Add(ctx, 1)
return nil
}

View file

@ -11,6 +11,10 @@ func (c *PGCache) InsertSessionEnd(sessionID uint64, timestamp uint64) (uint64,
return c.Conn.InsertSessionEnd(sessionID, timestamp)
}
func (c *PGCache) InsertSessionEncryptionKey(sessionID uint64, key []byte) error {
return c.Conn.InsertSessionEncryptionKey(sessionID, key)
}
func (c *PGCache) HandleSessionEnd(sessionID uint64) error {
if err := c.Conn.HandleSessionEnd(sessionID); err != nil {
log.Printf("can't handle session end: %s", err)

View file

@ -71,6 +71,12 @@ func (c *PGCache) HandleWebSessionEnd(sessionID uint64, e *SessionEnd) error {
return c.HandleSessionEnd(sessionID)
}
func (c *PGCache) InsertWebJSException(e *JSException) error {
return c.InsertWebErrorEvent(e.SessionID(), WrapJSException(e))
}
func (c *PGCache) InsertWebIntegrationEvent(e *IntegrationEvent) error {
return c.InsertWebErrorEvent(e.SessionID(), WrapIntegrationEvent(e))
}
func (c *PGCache) InsertWebErrorEvent(sessionID uint64, e *ErrorEvent) error {
session, err := c.GetSession(sessionID)
if err != nil {

View file

@ -13,11 +13,6 @@ type ProjectMeta struct {
expirationTime time.Time
}
// !TODO: remove old sessions by timeout to avoid memleaks
/*
* Cache layer around the stateless PG adapter
**/
type PGCache struct {
*postgres.Conn
sessions map[uint64]*Session
@ -26,7 +21,6 @@ type PGCache struct {
projectExpirationTimeout time.Duration
}
// TODO: create conn automatically
func NewPGCache(pgConn *postgres.Conn, projectExpirationTimeoutMs int64) *PGCache {
return &PGCache{
Conn: pgConn,

View file

@ -55,8 +55,7 @@ func NewConn(url string, queueLimit, sizeLimit int, metrics *monitoring.Metrics)
}
c, err := pgxpool.Connect(context.Background(), url)
if err != nil {
log.Println(err)
log.Fatalln("pgxpool.Connect Error")
log.Fatalf("pgxpool.Connect err: %s", err)
}
conn := &Conn{
batches: make(map[uint64]*pgx.Batch),

View file

@ -82,6 +82,10 @@ func (conn *Conn) InsertSessionEnd(sessionID uint64, timestamp uint64) (uint64,
return dur, nil
}
func (conn *Conn) InsertSessionEncryptionKey(sessionID uint64, key []byte) error {
return conn.c.Exec(`UPDATE sessions SET file_key = $2 WHERE session_id = $1`, sessionID, string(key))
}
func (conn *Conn) HandleSessionEnd(sessionID uint64) error {
sqlRequest := `
UPDATE sessions
@ -99,14 +103,14 @@ func (conn *Conn) HandleSessionEnd(sessionID uint64) error {
}
func (conn *Conn) InsertRequest(sessionID uint64, timestamp uint64, index uint64, url string, duration uint64, success bool) error {
if err := conn.requests.Append(sessionID, timestamp, getSqIdx(index), url, duration, success); err != nil {
if err := conn.requests.Append(sessionID, timestamp, index, url, duration, success); err != nil {
return fmt.Errorf("insert request in bulk err: %s", err)
}
return nil
}
func (conn *Conn) InsertCustomEvent(sessionID uint64, timestamp uint64, index uint64, name string, payload string) error {
if err := conn.customEvents.Append(sessionID, timestamp, getSqIdx(index), name, payload); err != nil {
if err := conn.customEvents.Append(sessionID, timestamp, index, name, payload); err != nil {
return fmt.Errorf("insert custom event in bulk err: %s", err)
}
return nil
@ -160,20 +164,16 @@ func (conn *Conn) InsertIssueEvent(sessionID uint64, projectID uint32, e *messag
if *payload == "" || *payload == "{}" {
payload = nil
}
context := &e.Context
if *context == "" || *context == "{}" {
context = nil
}
if err = tx.exec(`
INSERT INTO issues (
project_id, issue_id, type, context_string, context
project_id, issue_id, type, context_string
) (SELECT
project_id, $2, $3, $4, CAST($5 AS jsonb)
project_id, $2, $3, $4
FROM sessions
WHERE session_id = $1
)ON CONFLICT DO NOTHING`,
sessionID, issueID, e.Type, e.ContextString, context,
sessionID, issueID, e.Type, e.ContextString,
); err != nil {
return err
}
@ -184,7 +184,7 @@ func (conn *Conn) InsertIssueEvent(sessionID uint64, projectID uint32, e *messag
$1, $2, $3, $4, CAST($5 AS jsonb)
)`,
sessionID, issueID, e.Timestamp,
getSqIdx(e.MessageID),
e.MessageID,
payload,
); err != nil {
return err
@ -204,7 +204,7 @@ func (conn *Conn) InsertIssueEvent(sessionID uint64, projectID uint32, e *messag
VALUES
($1, $2, $3, left($4, 2700), $5, 'error')
`,
sessionID, getSqIdx(e.MessageID), e.Timestamp, e.ContextString, e.Payload,
sessionID, e.MessageID, e.Timestamp, e.ContextString, e.Payload,
); err != nil {
return err
}

View file

@ -5,11 +5,6 @@ import (
"openreplay/backend/pkg/url"
)
func (conn *Conn) InsertWebStatsLongtask(sessionID uint64, l *LongTask) error {
return nil // Do we even use them?
// conn.exec(``);
}
func (conn *Conn) InsertWebStatsPerformance(sessionID uint64, p *PerformanceTrackAggr) error {
timestamp := (p.TimestampEnd + p.TimestampStart) / 2

View file

@ -2,18 +2,12 @@ package postgres
import (
"log"
"math"
"openreplay/backend/pkg/hashid"
"openreplay/backend/pkg/db/types"
. "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/url"
)
// TODO: change messages and replace everywhere to e.Index
func getSqIdx(messageID uint64) uint {
return uint(messageID % math.MaxInt32)
}
func (conn *Conn) InsertWebCustomEvent(sessionID uint64, projectID uint32, e *CustomEvent) error {
err := conn.InsertCustomEvent(sessionID, e.Timestamp,
e.MessageID,
@ -93,7 +87,7 @@ func (conn *Conn) InsertWebInputEvent(sessionID uint64, projectID uint32, e *Inp
return nil
}
func (conn *Conn) InsertWebErrorEvent(sessionID uint64, projectID uint32, e *ErrorEvent) (err error) {
func (conn *Conn) InsertWebErrorEvent(sessionID uint64, projectID uint32, e *types.ErrorEvent) (err error) {
tx, err := conn.c.Begin()
if err != nil {
return err
@ -105,7 +99,7 @@ func (conn *Conn) InsertWebErrorEvent(sessionID uint64, projectID uint32, e *Err
}
}
}()
errorID := hashid.WebErrorID(projectID, e)
errorID := e.ID(projectID)
if err = tx.exec(`
INSERT INTO errors
@ -135,6 +129,18 @@ func (conn *Conn) InsertWebErrorEvent(sessionID uint64, projectID uint32, e *Err
return err
}
err = tx.commit()
// Insert tags
sqlRequest := `
INSERT INTO public.errors_tags (
session_id, message_id, error_id, key, value
) VALUES (
$1, $2, $3, $4, $5
) ON CONFLICT DO NOTHING`
for key, value := range e.Tags {
conn.batchQueue(sessionID, sqlRequest, sessionID, e.MessageID, errorID, key, value)
}
return
}
@ -163,7 +169,7 @@ func (conn *Conn) InsertWebFetchEvent(sessionID uint64, projectID uint32, savePa
$12, $13
) ON CONFLICT DO NOTHING`
conn.batchQueue(sessionID, sqlRequest,
sessionID, e.Timestamp, getSqIdx(e.MessageID),
sessionID, e.Timestamp, e.MessageID,
e.URL, host, path, query,
request, response, e.Status, url.EnsureMethod(e.Method),
e.Duration, e.Status < 400,

View file

@ -0,0 +1,89 @@
package types
import (
"encoding/hex"
"encoding/json"
"hash/fnv"
"log"
"strconv"
. "openreplay/backend/pkg/messages"
)
type ErrorEvent struct {
MessageID uint64
Timestamp uint64
Source string
Name string
Message string
Payload string
Tags map[string]*string
}
func unquote(s string) string {
if s[0] == '"' {
return s[1 : len(s)-1]
}
return s
}
func parseTags(tagsJSON string) (tags map[string]*string, err error) {
if tagsJSON[0] == '[' {
var tagsArr []json.RawMessage
if err = json.Unmarshal([]byte(tagsJSON), &tagsArr); err != nil {
return
}
tags = make(map[string]*string)
for _, keyBts := range tagsArr {
tags[unquote(string(keyBts))] = nil
}
} else if tagsJSON[0] == '{' {
var tagsObj map[string]json.RawMessage
if err = json.Unmarshal([]byte(tagsJSON), &tagsObj); err != nil {
return
}
tags = make(map[string]*string)
for key, valBts := range tagsObj {
val := unquote(string(valBts))
tags[key] = &val
}
}
return
}
func WrapJSException(m *JSException) *ErrorEvent {
meta, err := parseTags(m.Metadata)
if err != nil {
log.Printf("Error on parsing Exception metadata: %v", err)
}
return &ErrorEvent{
MessageID: m.Meta().Index,
Timestamp: uint64(m.Meta().Timestamp),
Source: "js_exception",
Name: m.Name,
Message: m.Message,
Payload: m.Payload,
Tags: meta,
}
}
func WrapIntegrationEvent(m *IntegrationEvent) *ErrorEvent {
return &ErrorEvent{
MessageID: m.Meta().Index, // This will be always 0 here since it's coming from backend TODO: find another way to index
Timestamp: m.Timestamp,
Source: m.Source,
Name: m.Name,
Message: m.Message,
Payload: m.Payload,
}
}
func (e *ErrorEvent) ID(projectID uint32) string {
hash := fnv.New128a()
hash.Write([]byte(e.Source))
hash.Write([]byte(e.Name))
hash.Write([]byte(e.Message))
hash.Write([]byte(e.Payload))
return strconv.FormatUint(uint64(projectID), 16) + hex.EncodeToString(hash.Sum(nil))
}

View file

@ -56,15 +56,6 @@ func (b *EventMapper) Handle(message Message, messageID uint64, timestamp uint64
Selector: msg.Selector,
}
}
case *JSException:
return &ErrorEvent{
MessageID: messageID,
Timestamp: timestamp,
Source: "js_exception",
Name: msg.Name,
Message: msg.Message,
Payload: msg.Payload,
}
case *ResourceTiming:
return &ResourceEvent{
MessageID: messageID,

View file

@ -23,12 +23,3 @@ func IOSCrashID(projectID uint32, crash *messages.IOSCrash) string {
hash.Write([]byte(crash.Stacktrace))
return strconv.FormatUint(uint64(projectID), 16) + hex.EncodeToString(hash.Sum(nil))
}
func WebErrorID(projectID uint32, errorEvent *messages.ErrorEvent) string {
hash := fnv.New128a()
hash.Write([]byte(errorEvent.Source))
hash.Write([]byte(errorEvent.Name))
hash.Write([]byte(errorEvent.Message))
hash.Write([]byte(errorEvent.Payload))
return strconv.FormatUint(uint64(projectID), 16) + hex.EncodeToString(hash.Sum(nil))
}

View file

@ -5,8 +5,7 @@ import (
"log"
"time"
"openreplay/backend/pkg/queue/types"
//"openreplay/backend/pkg/env"
"openreplay/backend/pkg/messages"
)
type partitionStats struct {
@ -18,15 +17,15 @@ type partitionStats struct {
}
// Update partition statistic
func (prt *partitionStats) update(m *types.Meta) {
if prt.maxts < m.Timestamp {
prt.maxts = m.Timestamp
func (prt *partitionStats) update(m *messages.BatchInfo) {
if prt.maxts < m.Timestamp() {
prt.maxts = m.Timestamp()
}
if prt.mints > m.Timestamp || prt.mints == 0 {
prt.mints = m.Timestamp
if prt.mints > m.Timestamp() || prt.mints == 0 {
prt.mints = m.Timestamp()
}
prt.lastts = m.Timestamp
prt.lastID = m.ID
prt.lastts = m.Timestamp()
prt.lastID = m.ID()
prt.count += 1
}
@ -35,6 +34,10 @@ type queueStats struct {
tick <-chan time.Time
}
type QueueStats interface {
Collect(msg messages.Message)
}
func NewQueueStats(sec int) *queueStats {
return &queueStats{
prts: make(map[int32]*partitionStats),
@ -43,14 +46,14 @@ func NewQueueStats(sec int) *queueStats {
}
// Collect writes new data to partition statistic
func (qs *queueStats) Collect(sessionID uint64, m *types.Meta) {
prti := int32(sessionID % 16) // TODO use GetKeyPartition from kafka/key.go
func (qs *queueStats) Collect(msg messages.Message) {
prti := int32(msg.SessionID() % 16) // TODO use GetKeyPartition from kafka/key.go
prt, ok := qs.prts[prti]
if !ok {
qs.prts[prti] = &partitionStats{}
prt = qs.prts[prti]
}
prt.update(m)
prt.update(msg.Meta().Batch())
select {
case <-qs.tick:

View file

@ -1,197 +0,0 @@
package messages
import (
"bytes"
"io"
"log"
"strings"
)
type Iterator interface {
Next() bool // Return true if we have next message
Type() int // Return type of the next message
Message() Message // Return raw or decoded message
Close()
}
type iteratorImpl struct {
data *bytes.Reader
index uint64
timestamp int64
version uint64
msgType uint64
msgSize uint64
canSkip bool
msg Message
url string
}
func NewIterator(data []byte) Iterator {
return &iteratorImpl{
data: bytes.NewReader(data),
}
}
func (i *iteratorImpl) Next() bool {
if i.canSkip {
if _, err := i.data.Seek(int64(i.msgSize), io.SeekCurrent); err != nil {
log.Printf("seek err: %s", err)
return false
}
}
i.canSkip = false
var err error
i.msgType, err = ReadUint(i.data)
if err != nil {
if err == io.EOF {
return false
}
log.Printf("can't read message type: %s", err)
return false
}
if i.version > 0 && messageHasSize(i.msgType) {
// Read message size if it is a new protocol version
i.msgSize, err = ReadSize(i.data)
if err != nil {
log.Printf("can't read message size: %s", err)
return false
}
i.msg = &RawMessage{
tp: i.msgType,
size: i.msgSize,
meta: &message{},
reader: i.data,
skipped: &i.canSkip,
}
i.canSkip = true
} else {
i.msg, err = ReadMessage(i.msgType, i.data)
if err == io.EOF {
return false
} else if err != nil {
if strings.HasPrefix(err.Error(), "Unknown message code:") {
code := strings.TrimPrefix(err.Error(), "Unknown message code: ")
i.msg, err = DecodeExtraMessage(code, i.data)
if err != nil {
log.Printf("can't decode msg: %s", err)
return false
}
} else {
log.Printf("Batch Message decoding error on message with index %v, err: %s", i.index, err)
return false
}
}
i.msg = transformDeprecated(i.msg)
}
// Process meta information
isBatchMeta := false
switch i.msgType {
case MsgBatchMetadata:
if i.index != 0 { // Might be several 0-0 BatchMeta in a row without an error though
log.Printf("Batch Metadata found at the end of the batch")
return false
}
msg := i.msg.Decode()
if msg == nil {
return false
}
m := msg.(*BatchMetadata)
i.index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
i.timestamp = m.Timestamp
i.version = m.Version
i.url = m.Url
isBatchMeta = true
if i.version > 1 {
log.Printf("incorrect batch version, skip current batch")
return false
}
case MsgBatchMeta: // Is not required to be present in batch since IOS doesn't have it (though we might change it)
if i.index != 0 { // Might be several 0-0 BatchMeta in a row without an error though
log.Printf("Batch Meta found at the end of the batch")
return false
}
msg := i.msg.Decode()
if msg == nil {
return false
}
m := msg.(*BatchMeta)
i.index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
i.timestamp = m.Timestamp
isBatchMeta = true
// continue readLoop
case MsgIOSBatchMeta:
if i.index != 0 { // Might be several 0-0 BatchMeta in a row without an error though
log.Printf("Batch Meta found at the end of the batch")
return false
}
msg := i.msg.Decode()
if msg == nil {
return false
}
m := msg.(*IOSBatchMeta)
i.index = m.FirstIndex
i.timestamp = int64(m.Timestamp)
isBatchMeta = true
// continue readLoop
case MsgTimestamp:
msg := i.msg.Decode()
if msg == nil {
return false
}
m := msg.(*Timestamp)
i.timestamp = int64(m.Timestamp)
// No skipping here for making it easy to encode back the same sequence of message
// continue readLoop
case MsgSessionStart:
msg := i.msg.Decode()
if msg == nil {
return false
}
m := msg.(*SessionStart)
i.timestamp = int64(m.Timestamp)
case MsgSessionEnd:
msg := i.msg.Decode()
if msg == nil {
return false
}
m := msg.(*SessionEnd)
i.timestamp = int64(m.Timestamp)
case MsgSetPageLocation:
msg := i.msg.Decode()
if msg == nil {
return false
}
m := msg.(*SetPageLocation)
i.url = m.URL
}
i.msg.Meta().Index = i.index
i.msg.Meta().Timestamp = i.timestamp
i.msg.Meta().Url = i.url
if !isBatchMeta { // Without that indexes will be unique anyway, though shifted by 1 because BatchMeta is not counted in tracker
i.index++
}
return true
}
func (i *iteratorImpl) Type() int {
return int(i.msgType)
}
func (i *iteratorImpl) Message() Message {
return i.msg
}
func (i *iteratorImpl) Close() {
_, err := i.data.Seek(0, io.SeekEnd)
if err != nil {
log.Printf("can't set seek pointer at the end: %s", err)
}
}
func messageHasSize(msgType uint64) bool {
return !(msgType == 80 || msgType == 81 || msgType == 82)
}

View file

@ -1,56 +0,0 @@
package messages
import (
"encoding/binary"
"fmt"
"io"
)
type SessionSearch struct {
message
Timestamp uint64
Partition uint64
}
func (msg *SessionSearch) Encode() []byte {
buf := make([]byte, 11)
buf[0] = 127
p := 1
p = WriteUint(msg.Timestamp, buf, p)
p = WriteUint(msg.Partition, buf, p)
return buf[:p]
}
func (msg *SessionSearch) EncodeWithIndex() []byte {
encoded := msg.Encode()
if IsIOSType(msg.TypeID()) {
return encoded
}
data := make([]byte, len(encoded)+8)
copy(data[8:], encoded[:])
binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index)
return data
}
func (msg *SessionSearch) Decode() Message {
return msg
}
func (msg *SessionSearch) TypeID() int {
return 127
}
func DecodeExtraMessage(code string, reader io.Reader) (Message, error) {
var err error
if code != "127" {
return nil, fmt.Errorf("unknown message code: %s", code)
}
msg := &SessionSearch{}
if msg.Timestamp, err = ReadUint(reader); err != nil {
return nil, fmt.Errorf("can't read message timestamp: %s", err)
}
if msg.Partition, err = ReadUint(reader); err != nil {
return nil, fmt.Errorf("can't read last partition: %s", err)
}
return msg, nil
}

View file

@ -1,5 +0,0 @@
package messages
func Encode(msg Message) []byte {
return msg.Encode()
}

View file

@ -2,9 +2,13 @@
package messages
func IsReplayerType(id int) bool {
return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 79 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id
return 80 != id && 81 != id && 82 != id && 1 != id && 3 != id && 17 != id && 23 != id && 24 != id && 25 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 33 != id && 35 != id && 36 != id && 42 != id && 43 != id && 50 != id && 51 != id && 52 != id && 53 != id && 56 != id && 62 != id && 63 != id && 64 != id && 66 != id && 78 != id && 126 != id && 127 != id && 107 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 99 != id && 101 != id && 104 != id && 110 != id && 111 != id
}
func IsIOSType(id int) bool {
return 107 == id || 90 == id || 91 == id || 92 == id || 93 == id || 94 == id || 95 == id || 96 == id || 97 == id || 98 == id || 99 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 110 == id || 111 == id
}
func IsDOMType(id int) bool {
return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 37 == id || 38 == id || 49 == id || 54 == id || 55 == id || 59 == id || 60 == id || 61 == id || 67 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id
}

View file

@ -0,0 +1,215 @@
package messages
import (
"bytes"
"fmt"
"io"
"log"
)
// MessageHandler processes one message using service logic
type MessageHandler func(Message)
// MessageIterator iterates by all messages in batch
type MessageIterator interface {
Iterate(batchData []byte, batchInfo *BatchInfo)
}
type messageIteratorImpl struct {
filter map[int]struct{}
preFilter map[int]struct{}
handler MessageHandler
autoDecode bool
version uint64
size uint64
canSkip bool
broken bool
messageInfo *message
batchInfo *BatchInfo
}
func NewMessageIterator(messageHandler MessageHandler, messageFilter []int, autoDecode bool) MessageIterator {
iter := &messageIteratorImpl{handler: messageHandler, autoDecode: autoDecode}
if len(messageFilter) != 0 {
filter := make(map[int]struct{}, len(messageFilter))
for _, msgType := range messageFilter {
filter[msgType] = struct{}{}
}
iter.filter = filter
}
iter.preFilter = map[int]struct{}{
MsgBatchMetadata: {}, MsgBatchMeta: {}, MsgTimestamp: {},
MsgSessionStart: {}, MsgSessionEnd: {}, MsgSetPageLocation: {},
MsgSessionEndDeprecated: {}}
return iter
}
func (i *messageIteratorImpl) prepareVars(batchInfo *BatchInfo) {
i.batchInfo = batchInfo
i.messageInfo = &message{batch: batchInfo}
i.version = 0
i.canSkip = false
i.broken = false
i.size = 0
}
func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) {
// Prepare iterator before processing messages in batch
i.prepareVars(batchInfo)
// Initialize batch reader
reader := bytes.NewReader(batchData)
// Process until end of batch or parsing error
for {
// Increase message index (can be overwritten by batch info message)
i.messageInfo.Index++
if i.broken {
log.Printf("skipping broken batch, info: %s", i.batchInfo.Info())
return
}
if i.canSkip {
if _, err := reader.Seek(int64(i.size), io.SeekCurrent); err != nil {
log.Printf("can't skip message: %s, info: %s", err, i.batchInfo.Info())
return
}
}
i.canSkip = false
// Read message type
msgType, err := ReadUint(reader)
if err != nil {
if err != io.EOF {
log.Printf("can't read message type: %s, info: %s", err, i.batchInfo.Info())
}
return
}
var msg Message
// Read message body (and decode if protocol version less than 1)
if i.version > 0 && messageHasSize(msgType) {
// Read message size if it is a new protocol version
i.size, err = ReadSize(reader)
if err != nil {
log.Printf("can't read message size: %s, info: %s", err, i.batchInfo.Info())
return
}
msg = &RawMessage{
tp: msgType,
size: i.size,
reader: reader,
skipped: &i.canSkip,
broken: &i.broken,
meta: i.messageInfo,
}
i.canSkip = true
} else {
msg, err = ReadMessage(msgType, reader)
if err != nil {
if err != io.EOF {
log.Printf("can't read message body: %s, info: %s", err, i.batchInfo.Info())
}
return
}
msg = transformDeprecated(msg)
}
// Preprocess "system" messages
if _, ok := i.preFilter[msg.TypeID()]; ok {
msg = msg.Decode()
if msg == nil {
log.Printf("decode error, type: %d, info: %s", msgType, i.batchInfo.Info())
return
}
if err := i.preprocessing(msg); err != nil {
log.Printf("message preprocessing err: %s", err)
return
}
}
// Skip messages we don't have in filter
if i.filter != nil {
if _, ok := i.filter[msg.TypeID()]; !ok {
continue
}
}
if i.autoDecode {
msg = msg.Decode()
if msg == nil {
log.Printf("decode error, type: %d, info: %s", msgType, i.batchInfo.Info())
return
}
}
// Set meta information for message
msg.Meta().SetMeta(i.messageInfo)
// Process message
i.handler(msg)
}
}
func (i *messageIteratorImpl) zeroTsLog(msgType string) {
log.Printf("zero timestamp in %s, info: %s", msgType, i.batchInfo.Info())
}
func (i *messageIteratorImpl) preprocessing(msg Message) error {
switch m := msg.(type) {
case *BatchMetadata:
if i.messageInfo.Index > 1 { // Might be several 0-0 BatchMeta in a row without an error though
return fmt.Errorf("batchMetadata found at the end of the batch, info: %s", i.batchInfo.Info())
}
if m.Version > 1 {
return fmt.Errorf("incorrect batch version: %d, skip current batch, info: %s", i.version, i.batchInfo.Info())
}
i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
i.messageInfo.Timestamp = m.Timestamp
if m.Timestamp == 0 {
i.zeroTsLog("BatchMetadata")
}
i.messageInfo.Url = m.Url
i.version = m.Version
i.batchInfo.version = m.Version
case *BatchMeta: // Is not required to be present in batch since IOS doesn't have it (though we might change it)
if i.messageInfo.Index > 1 { // Might be several 0-0 BatchMeta in a row without an error though
return fmt.Errorf("batchMeta found at the end of the batch, info: %s", i.batchInfo.Info())
}
i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
i.messageInfo.Timestamp = m.Timestamp
if m.Timestamp == 0 {
i.zeroTsLog("BatchMeta")
}
case *Timestamp:
i.messageInfo.Timestamp = int64(m.Timestamp)
if m.Timestamp == 0 {
i.zeroTsLog("Timestamp")
}
case *SessionStart:
i.messageInfo.Timestamp = int64(m.Timestamp)
if m.Timestamp == 0 {
i.zeroTsLog("SessionStart")
log.Printf("zero session start, project: %d, UA: %s, tracker: %s, info: %s",
m.ProjectID, m.UserAgent, m.TrackerVersion, i.batchInfo.Info())
}
case *SessionEnd:
i.messageInfo.Timestamp = int64(m.Timestamp)
if m.Timestamp == 0 {
i.zeroTsLog("SessionEnd")
}
case *SetPageLocation:
i.messageInfo.Url = m.URL
}
return nil
}
func messageHasSize(msgType uint64) bool {
return !(msgType == 80 || msgType == 81 || msgType == 82)
}

View file

@ -2,13 +2,18 @@ package messages
func transformDeprecated(msg Message) Message {
switch m := msg.(type) {
case *MouseClickDepricated:
return &MouseClick{
ID: m.ID,
HesitationTime: m.HesitationTime,
Label: m.Label,
case *JSExceptionDeprecated:
return &JSException{
Name: m.Name,
Message: m.Message,
Payload: m.Payload,
Metadata: "{}",
}
case *SessionEndDeprecated:
return &SessionEnd{
Timestamp: m.Timestamp,
EncryptionKey: "",
}
default:
return msg
}
return msg
}

View file

@ -1,20 +1,6 @@
package messages
type message struct {
Timestamp int64
Index uint64
Url string
}
func (m *message) Meta() *message {
return m
}
func (m *message) SetMeta(origin *message) {
m.Timestamp = origin.Timestamp
m.Index = origin.Index
m.Url = origin.Url
}
import "fmt"
type Message interface {
Encode() []byte
@ -22,4 +8,74 @@ type Message interface {
Decode() Message
TypeID() int
Meta() *message
SessionID() uint64
}
// BatchInfo represents common information for all messages inside data batch
type BatchInfo struct {
sessionID uint64
id uint64
topic string
partition uint64
timestamp int64
version uint64
}
func NewBatchInfo(sessID uint64, topic string, id, partition uint64, ts int64) *BatchInfo {
return &BatchInfo{
sessionID: sessID,
id: id,
topic: topic,
partition: partition,
timestamp: ts,
}
}
func (b *BatchInfo) SessionID() uint64 {
return b.sessionID
}
func (b *BatchInfo) ID() uint64 {
return b.id
}
func (b *BatchInfo) Timestamp() int64 {
return b.timestamp
}
func (b *BatchInfo) Info() string {
return fmt.Sprintf("session: %d, partition: %d, offset: %d, ver: %d", b.sessionID, b.partition, b.id, b.version)
}
type message struct {
Timestamp int64
Index uint64
Url string
batch *BatchInfo
}
func (m *message) Batch() *BatchInfo {
return m.batch
}
func (m *message) Meta() *message {
return m
}
func (m *message) SetMeta(origin *message) {
m.batch = origin.batch
m.Timestamp = origin.Timestamp
m.Index = origin.Index
m.Url = origin.Url
}
func (m *message) SessionID() uint64 {
return m.batch.sessionID
}
func (m *message) SetSessionID(sessID uint64) {
if m.batch == nil {
m.batch = &BatchInfo{}
}
m.batch.sessionID = sessID
}

File diff suppressed because it is too large Load diff

View file

@ -16,6 +16,7 @@ type RawMessage struct {
meta *message
encoded bool
skipped *bool
broken *bool
}
func (m *RawMessage) Encode() []byte {
@ -28,7 +29,7 @@ func (m *RawMessage) Encode() []byte {
*m.skipped = false
_, err := io.ReadFull(m.reader, m.data[1:])
if err != nil {
log.Printf("message encode err: %s", err)
log.Printf("message encode err: %s, type: %d, sess: %d", err, m.tp, m.SessionID())
return nil
}
return m.data
@ -36,7 +37,10 @@ func (m *RawMessage) Encode() []byte {
func (m *RawMessage) EncodeWithIndex() []byte {
if !m.encoded {
m.Encode()
if m.Encode() == nil {
*m.broken = true
return nil
}
}
if IsIOSType(int(m.tp)) {
return m.data
@ -49,13 +53,18 @@ func (m *RawMessage) EncodeWithIndex() []byte {
func (m *RawMessage) Decode() Message {
if !m.encoded {
m.Encode()
if m.Encode() == nil {
*m.broken = true
return nil
}
}
msg, err := ReadMessage(m.tp, bytes.NewReader(m.data[1:]))
if err != nil {
log.Printf("decode err: %s", err)
*m.broken = true
return nil
}
msg = transformDeprecated(msg)
msg.Meta().SetMeta(m.meta)
return msg
}
@ -67,3 +76,10 @@ func (m *RawMessage) TypeID() int {
func (m *RawMessage) Meta() *message {
return m.meta
}
func (m *RawMessage) SessionID() uint64 {
if m.meta != nil {
return m.meta.SessionID()
}
return 0
}

File diff suppressed because it is too large Load diff

View file

@ -1,12 +1,13 @@
package queue
import (
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue/types"
"openreplay/backend/pkg/redisstream"
)
func NewConsumer(group string, topics []string, handler types.MessageHandler, _ bool, _ int) types.Consumer {
return redisstream.NewConsumer(group, topics, handler)
func NewConsumer(group string, topics []string, iterator messages.MessageIterator, _ bool, _ int) types.Consumer {
return redisstream.NewConsumer(group, topics, iterator)
}
func NewProducer(_ int, _ bool) types.Producer {

View file

@ -1,12 +0,0 @@
package queue
import (
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue/types"
)
func NewMessageConsumer(group string, topics []string, handler types.RawMessageHandler, autoCommit bool, messageSizeLimit int) types.Consumer {
return NewConsumer(group, topics, func(sessionID uint64, value []byte, meta *types.Meta) {
handler(sessionID, messages.NewIterator(value), meta)
}, autoCommit, messageSizeLimit)
}

View file

@ -1,30 +1,17 @@
package types
import (
"openreplay/backend/pkg/messages"
)
// Consumer reads batches of session data from queue (redis or kafka)
type Consumer interface {
ConsumeNext() error
Commit() error
CommitBack(gap int64) error
Commit() error
Close()
HasFirstPartition() bool
}
// Producer sends batches of session data to queue (redis or kafka)
type Producer interface {
Produce(topic string, key uint64, value []byte) error
ProduceToPartition(topic string, partition, key uint64, value []byte) error
Close(timeout int)
Flush(timeout int)
Close(timeout int)
}
type Meta struct {
ID uint64
Topic string
Timestamp int64
}
type MessageHandler func(uint64, []byte, *Meta)
type DecodedMessageHandler func(uint64, messages.Message, *Meta)
type RawMessageHandler func(uint64, messages.Iterator, *Meta)

View file

@ -3,6 +3,7 @@ package redisstream
import (
"log"
"net"
"openreplay/backend/pkg/messages"
"sort"
"strconv"
"strings"
@ -10,8 +11,6 @@ import (
_redis "github.com/go-redis/redis"
"github.com/pkg/errors"
"openreplay/backend/pkg/queue/types"
)
type idsInfo struct {
@ -21,16 +20,16 @@ type idsInfo struct {
type streamPendingIDsMap map[string]*idsInfo
type Consumer struct {
redis *_redis.Client
streams []string
group string
messageHandler types.MessageHandler
idsPending streamPendingIDsMap
lastTs int64
autoCommit bool
redis *_redis.Client
streams []string
group string
messageIterator messages.MessageIterator
idsPending streamPendingIDsMap
lastTs int64
autoCommit bool
}
func NewConsumer(group string, streams []string, messageHandler types.MessageHandler) *Consumer {
func NewConsumer(group string, streams []string, messageIterator messages.MessageIterator) *Consumer {
redis := getRedisClient()
for _, stream := range streams {
err := redis.XGroupCreateMkStream(stream, group, "0").Err()
@ -52,12 +51,12 @@ func NewConsumer(group string, streams []string, messageHandler types.MessageHan
}
return &Consumer{
redis: redis,
messageHandler: messageHandler,
streams: streams,
group: group,
autoCommit: true,
idsPending: idsPending,
redis: redis,
messageIterator: messageIterator,
streams: streams,
group: group,
autoCommit: true,
idsPending: idsPending,
}
}
@ -102,11 +101,8 @@ func (c *Consumer) ConsumeNext() error {
if idx > 0x1FFF {
return errors.New("Too many messages per ms in redis")
}
c.messageHandler(sessionID, []byte(valueString), &types.Meta{
Topic: r.Stream,
Timestamp: int64(ts),
ID: ts<<13 | (idx & 0x1FFF), // Max: 4096 messages/ms for 69 years
})
bID := ts<<13 | (idx & 0x1FFF) // Max: 4096 messages/ms for 69 years
c.messageIterator.Iterate([]byte(valueString), messages.NewBatchInfo(sessionID, r.Stream, bID, 0, int64(ts)))
if c.autoCommit {
if err = c.redis.XAck(r.Stream, c.group, m.ID).Err(); err != nil {
return errors.Wrapf(err, "Acknoledgment error for messageID %v", m.ID)
@ -161,7 +157,3 @@ func (c *Consumer) CommitBack(gap int64) error {
func (c *Consumer) Close() {
// noop
}
func (c *Consumer) HasFirstPartition() bool {
return false
}

View file

@ -66,6 +66,7 @@ func (b *builder) handleMessage(message Message, messageID uint64) {
b.lastSystemTime = time.Now()
for _, p := range b.processors {
if rm := p.Handle(message, messageID, b.timestamp); rm != nil {
rm.Meta().SetMeta(message.Meta())
b.readyMsgs = append(b.readyMsgs, rm)
}
}

View file

@ -30,7 +30,9 @@ func (m *builderMap) GetBuilder(sessionID uint64) *builder {
return b
}
func (m *builderMap) HandleMessage(sessionID uint64, msg Message, messageID uint64) {
func (m *builderMap) HandleMessage(msg Message) {
sessionID := msg.SessionID()
messageID := msg.Meta().Index
b := m.GetBuilder(sessionID)
b.handleMessage(msg, messageID)
}
@ -39,6 +41,7 @@ func (m *builderMap) iterateSessionReadyMessages(sessionID uint64, b *builder, i
if b.ended || b.lastSystemTime.Add(FORCE_DELETE_TIMEOUT).Before(time.Now()) {
for _, p := range b.processors {
if rm := p.Build(); rm != nil {
rm.Meta().SetSessionID(sessionID)
b.readyMsgs = append(b.readyMsgs, rm)
}
}

View file

@ -23,6 +23,7 @@ func NewTokenizer(secret string) *Tokenizer {
type TokenData struct {
ID uint64
Delay int64
ExpTime int64
}
@ -34,6 +35,7 @@ func (tokenizer *Tokenizer) sign(body string) []byte {
func (tokenizer *Tokenizer) Compose(d TokenData) string {
body := strconv.FormatUint(d.ID, 36) +
"." + strconv.FormatInt(d.Delay, 36) +
"." + strconv.FormatInt(d.ExpTime, 36)
sign := base58.Encode(tokenizer.sign(body))
return body + "." + sign
@ -41,12 +43,12 @@ func (tokenizer *Tokenizer) Compose(d TokenData) string {
func (tokenizer *Tokenizer) Parse(token string) (*TokenData, error) {
data := strings.Split(token, ".")
if len(data) != 3 {
if len(data) != 4 {
return nil, errors.New("wrong token format")
}
if !hmac.Equal(
base58.Decode(data[len(data)-1]),
tokenizer.sign(data[0]+"."+data[1]),
tokenizer.sign(strings.Join(data[:len(data)-1], ".")),
) {
return nil, errors.New("wrong token sign")
}
@ -54,12 +56,16 @@ func (tokenizer *Tokenizer) Parse(token string) (*TokenData, error) {
if err != nil {
return nil, err
}
expTime, err := strconv.ParseInt(data[1], 36, 64)
delay, err := strconv.ParseInt(data[1], 36, 64)
if err != nil {
return nil, err
}
expTime, err := strconv.ParseInt(data[2], 36, 64)
if err != nil {
return nil, err
}
if expTime <= time.Now().UnixMilli() {
return &TokenData{id, expTime}, EXPIRED
return &TokenData{id, delay, expTime}, EXPIRED
}
return &TokenData{id, expTime}, nil
return &TokenData{id, delay, expTime}, nil
}

View file

@ -3,31 +3,40 @@ package datasaver
import (
"fmt"
"log"
"openreplay/backend/pkg/messages"
. "openreplay/backend/pkg/messages"
)
func (mi *Saver) InsertMessage(sessionID uint64, msg messages.Message) error {
func (mi *Saver) InsertMessage(msg Message) error {
sessionID := msg.SessionID()
switch m := msg.(type) {
// Common
case *messages.Metadata:
case *Metadata:
if err := mi.pg.InsertMetadata(sessionID, m); err != nil {
return fmt.Errorf("insert metadata err: %s", err)
}
return nil
case *messages.IssueEvent:
case *IssueEvent:
session, err := mi.pg.GetSession(sessionID)
if err != nil {
log.Printf("can't get session info for CH: %s", err)
} else {
if err := mi.ch.InsertIssue(session, m); err != nil {
log.Printf("can't insert issue event into clickhouse: %s", err)
}
}
return mi.pg.InsertIssueEvent(sessionID, m)
//TODO: message adapter (transformer) (at the level of pkg/message) for types: *IOSMetadata, *IOSIssueEvent and others
// Web
case *messages.SessionStart:
case *SessionStart:
return mi.pg.HandleWebSessionStart(sessionID, m)
case *messages.SessionEnd:
case *SessionEnd:
return mi.pg.HandleWebSessionEnd(sessionID, m)
case *messages.UserID:
case *UserID:
return mi.pg.InsertWebUserID(sessionID, m)
case *messages.UserAnonymousID:
case *UserAnonymousID:
return mi.pg.InsertWebUserAnonymousID(sessionID, m)
case *messages.CustomEvent:
case *CustomEvent:
session, err := mi.pg.GetSession(sessionID)
if err != nil {
log.Printf("can't get session info for CH: %s", err)
@ -37,17 +46,19 @@ func (mi *Saver) InsertMessage(sessionID uint64, msg messages.Message) error {
}
}
return mi.pg.InsertWebCustomEvent(sessionID, m)
case *messages.ClickEvent:
case *ClickEvent:
return mi.pg.InsertWebClickEvent(sessionID, m)
case *messages.InputEvent:
case *InputEvent:
return mi.pg.InsertWebInputEvent(sessionID, m)
// Unique Web messages
case *messages.PageEvent:
case *PageEvent:
return mi.pg.InsertWebPageEvent(sessionID, m)
case *messages.ErrorEvent:
return mi.pg.InsertWebErrorEvent(sessionID, m)
case *messages.FetchEvent:
case *JSException:
return mi.pg.InsertWebJSException(m)
case *IntegrationEvent:
return mi.pg.InsertWebIntegrationEvent(m)
case *FetchEvent:
session, err := mi.pg.GetSession(sessionID)
if err != nil {
log.Printf("can't get session info for CH: %s", err)
@ -62,7 +73,7 @@ func (mi *Saver) InsertMessage(sessionID uint64, msg messages.Message) error {
}
}
return mi.pg.InsertWebFetchEvent(sessionID, m)
case *messages.GraphQLEvent:
case *GraphQLEvent:
session, err := mi.pg.GetSession(sessionID)
if err != nil {
log.Printf("can't get session info for CH: %s", err)
@ -72,39 +83,30 @@ func (mi *Saver) InsertMessage(sessionID uint64, msg messages.Message) error {
}
}
return mi.pg.InsertWebGraphQLEvent(sessionID, m)
case *messages.IntegrationEvent:
return mi.pg.InsertWebErrorEvent(sessionID, &messages.ErrorEvent{
MessageID: m.Meta().Index,
Timestamp: m.Timestamp,
Source: m.Source,
Name: m.Name,
Message: m.Message,
Payload: m.Payload,
})
case *messages.SetPageLocation:
case *SetPageLocation:
return mi.pg.InsertSessionReferrer(sessionID, m.Referrer)
// IOS
case *messages.IOSSessionStart:
case *IOSSessionStart:
return mi.pg.InsertIOSSessionStart(sessionID, m)
case *messages.IOSSessionEnd:
case *IOSSessionEnd:
return mi.pg.InsertIOSSessionEnd(sessionID, m)
case *messages.IOSUserID:
case *IOSUserID:
return mi.pg.InsertIOSUserID(sessionID, m)
case *messages.IOSUserAnonymousID:
case *IOSUserAnonymousID:
return mi.pg.InsertIOSUserAnonymousID(sessionID, m)
case *messages.IOSCustomEvent:
case *IOSCustomEvent:
return mi.pg.InsertIOSCustomEvent(sessionID, m)
case *messages.IOSClickEvent:
case *IOSClickEvent:
return mi.pg.InsertIOSClickEvent(sessionID, m)
case *messages.IOSInputEvent:
case *IOSInputEvent:
return mi.pg.InsertIOSInputEvent(sessionID, m)
// Unique IOS messages
case *messages.IOSNetworkCall:
case *IOSNetworkCall:
return mi.pg.InsertIOSNetworkCall(sessionID, m)
case *messages.IOSScreenEnter:
case *IOSScreenEnter:
return mi.pg.InsertIOSScreenEnter(sessionID, m)
case *messages.IOSCrash:
case *IOSCrash:
return mi.pg.InsertIOSCrash(sessionID, m)
}

View file

@ -32,12 +32,14 @@ func (si *Saver) InsertStats(session *types.Session, msg messages.Message) error
return si.ch.InsertWebPageEvent(session, m)
case *messages.ResourceEvent:
return si.ch.InsertWebResourceEvent(session, m)
case *messages.ErrorEvent:
return si.ch.InsertWebErrorEvent(session, m)
case *messages.JSException:
return si.ch.InsertWebErrorEvent(session, types.WrapJSException(m))
case *messages.IntegrationEvent:
return si.ch.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m))
}
return nil
}
func (si *Saver) CommitStats(optimize bool) error {
func (si *Saver) CommitStats() error {
return si.ch.Commit()
}

View file

@ -0,0 +1,65 @@
package storage
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"errors"
"fmt"
"math/rand"
)
const letterSet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
func GenerateEncryptionKey() []byte {
return append(generateRandomBytes(16), generateRandomBytes(16)...)
}
func generateRandomBytes(size int) []byte {
b := make([]byte, size)
for i := range b {
b[i] = letterSet[rand.Int63()%int64(len(letterSet))]
}
return b
}
func fillLastBlock(rawText []byte, blockSize int) []byte {
padding := blockSize - len(rawText)%blockSize
padText := bytes.Repeat([]byte{byte(padding)}, padding)
return append(rawText, padText...)
}
func EncryptData(data, fullKey []byte) ([]byte, error) {
if len(fullKey) != 32 {
return nil, errors.New("wrong format of encryption key")
}
key, iv := fullKey[:16], fullKey[16:]
// Fill the last block of data by zeros
paddedData := fillLastBlock(data, aes.BlockSize)
// Create new AES cipher with CBC encryptor
block, err := aes.NewCipher(key)
if err != nil {
return nil, fmt.Errorf("cbc encryptor failed: %s", err)
}
mode := cipher.NewCBCEncrypter(block, iv)
// Encrypting data
ciphertext := make([]byte, len(paddedData))
mode.CryptBlocks(ciphertext, paddedData)
// Return encrypted data
return ciphertext, nil
}
func DecryptData(data, fullKey []byte) ([]byte, error) {
if len(fullKey) != 32 {
return nil, errors.New("wrong format of encryption key")
}
key, iv := fullKey[:16], fullKey[16:]
block, err := aes.NewCipher(key)
if err != nil {
return nil, fmt.Errorf("cbc encryptor failed: %s", err)
}
cbc := cipher.NewCBCDecrypter(block, iv)
res := make([]byte, len(data))
cbc.CryptBlocks(res, data)
return res, nil
}

View file

@ -0,0 +1,55 @@
package clickhouse
import (
"context"
"errors"
"fmt"
"log"
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
)
type Bulk interface {
Append(args ...interface{}) error
Send() error
}
type bulkImpl struct {
conn driver.Conn
query string
values [][]interface{}
}
func NewBulk(conn driver.Conn, query string) (Bulk, error) {
switch {
case conn == nil:
return nil, errors.New("clickhouse connection is empty")
case query == "":
return nil, errors.New("query is empty")
}
return &bulkImpl{
conn: conn,
query: query,
values: make([][]interface{}, 0),
}, nil
}
func (b *bulkImpl) Append(args ...interface{}) error {
b.values = append(b.values, args)
return nil
}
func (b *bulkImpl) Send() error {
batch, err := b.conn.PrepareBatch(context.Background(), b.query)
if err != nil {
return fmt.Errorf("can't create new batch: %s", err)
}
for _, set := range b.values {
if err := batch.Append(set...); err != nil {
log.Printf("can't append value set to batch, err: %s", err)
log.Printf("failed query: %s", b.query)
}
}
b.values = make([][]interface{}, 0)
return batch.Send()
}

View file

@ -1,13 +1,11 @@
package clickhouse
import (
"context"
"errors"
"fmt"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
"log"
"math"
"openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/hashid"
"openreplay/backend/pkg/messages"
@ -18,54 +16,6 @@ import (
"openreplay/backend/pkg/license"
)
type Bulk interface {
Append(args ...interface{}) error
Send() error
}
type bulkImpl struct {
conn driver.Conn
query string
values [][]interface{}
}
func NewBulk(conn driver.Conn, query string) (Bulk, error) {
switch {
case conn == nil:
return nil, errors.New("clickhouse connection is empty")
case query == "":
return nil, errors.New("query is empty")
}
return &bulkImpl{
conn: conn,
query: query,
values: make([][]interface{}, 0),
}, nil
}
func (b *bulkImpl) Append(args ...interface{}) error {
b.values = append(b.values, args)
return nil
}
func (b *bulkImpl) Send() error {
batch, err := b.conn.PrepareBatch(context.Background(), b.query)
if err != nil {
return fmt.Errorf("can't create new batch: %s", err)
}
for _, set := range b.values {
if err := batch.Append(set...); err != nil {
log.Printf("can't append value set to batch, err: %s", err)
log.Printf("failed query: %s", b.query)
}
}
b.values = make([][]interface{}, 0)
return batch.Send()
}
var CONTEXT_MAP = map[uint64]string{0: "unknown", 1: "self", 2: "same-origin-ancestor", 3: "same-origin-descendant", 4: "same-origin", 5: "cross-origin-ancestor", 6: "cross-origin-descendant", 7: "cross-origin-unreachable", 8: "multiple-contexts"}
var CONTAINER_TYPE_MAP = map[uint64]string{0: "window", 1: "iframe", 2: "embed", 3: "object"}
type Connector interface {
Prepare() error
Commit() error
@ -74,12 +24,13 @@ type Connector interface {
InsertWebPageEvent(session *types.Session, msg *messages.PageEvent) error
InsertWebClickEvent(session *types.Session, msg *messages.ClickEvent) error
InsertWebInputEvent(session *types.Session, msg *messages.InputEvent) error
InsertWebErrorEvent(session *types.Session, msg *messages.ErrorEvent) error
InsertWebErrorEvent(session *types.Session, msg *types.ErrorEvent) error
InsertWebPerformanceTrackAggr(session *types.Session, msg *messages.PerformanceTrackAggr) error
InsertAutocomplete(session *types.Session, msgType, msgValue string) error
InsertRequest(session *types.Session, msg *messages.FetchEvent, savePayload bool) error
InsertCustom(session *types.Session, msg *messages.CustomEvent) error
InsertGraphQL(session *types.Session, msg *messages.GraphQLEvent) error
InsertIssue(session *types.Session, msg *messages.IssueEvent) error
}
type connectorImpl struct {
@ -131,11 +82,13 @@ var batches = map[string]string{
"pages": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"clicks": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, hesitation_time, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
"inputs": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)",
"errors": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, source, name, message, error_id, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
"errors": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, source, name, message, error_id, event_type, error_tags_keys, error_tags_values) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"performance": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"requests": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"custom": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
"graphql": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, request_body, response_body, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
"issuesEvents": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, issue_id, issue_type, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
"issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)",
}
func (c *connectorImpl) Prepare() error {
@ -162,6 +115,32 @@ func (c *connectorImpl) checkError(name string, err error) {
}
}
func (c *connectorImpl) InsertIssue(session *types.Session, msg *messages.IssueEvent) error {
issueID := hashid.IssueID(session.ProjectID, msg)
if err := c.batches["issuesEvents"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MessageID,
datetime(msg.Timestamp),
issueID,
msg.Type,
"ISSUE",
); err != nil {
c.checkError("issuesEvents", err)
return fmt.Errorf("can't append to issuesEvents batch: %s", err)
}
if err := c.batches["issues"].Append(
uint16(session.ProjectID),
issueID,
msg.Type,
msg.ContextString,
); err != nil {
c.checkError("issues", err)
return fmt.Errorf("can't append to issues batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertWebSession(session *types.Session) error {
if session.Duration == nil {
return errors.New("trying to insert session with nil duration")
@ -297,7 +276,13 @@ func (c *connectorImpl) InsertWebInputEvent(session *types.Session, msg *message
return nil
}
func (c *connectorImpl) InsertWebErrorEvent(session *types.Session, msg *messages.ErrorEvent) error {
func (c *connectorImpl) InsertWebErrorEvent(session *types.Session, msg *types.ErrorEvent) error {
keys, values := make([]string, 0, len(msg.Tags)), make([]*string, 0, len(msg.Tags))
for k, v := range msg.Tags {
keys = append(keys, k)
values = append(values, v)
}
if err := c.batches["errors"].Append(
session.SessionID,
uint16(session.ProjectID),
@ -306,8 +291,10 @@ func (c *connectorImpl) InsertWebErrorEvent(session *types.Session, msg *message
msg.Source,
nullableString(msg.Name),
msg.Message,
hashid.WebErrorID(session.ProjectID, msg),
msg.ID(session.ProjectID),
"ERROR",
keys,
values,
); err != nil {
c.checkError("errors", err)
return fmt.Errorf("can't append to errors batch: %s", err)
@ -420,40 +407,3 @@ func (c *connectorImpl) InsertGraphQL(session *types.Session, msg *messages.Grap
}
return nil
}
func nullableUint16(v uint16) *uint16 {
var p *uint16 = nil
if v != 0 {
p = &v
}
return p
}
func nullableUint32(v uint32) *uint32 {
var p *uint32 = nil
if v != 0 {
p = &v
}
return p
}
func nullableString(v string) *string {
var p *string = nil
if v != "" {
p = &v
}
return p
}
func datetime(timestamp uint64) time.Time {
t := time.Unix(int64(timestamp/1e3), 0)
// Temporal solution for not correct timestamps in performance messages
if t.Year() < 2022 || t.Year() > 2025 {
return time.Now()
}
return t
}
func getSqIdx(messageID uint64) uint {
return uint(messageID % math.MaxInt32)
}

View file

@ -0,0 +1,38 @@
package clickhouse
import (
"time"
)
func nullableUint16(v uint16) *uint16 {
var p *uint16 = nil
if v != 0 {
p = &v
}
return p
}
func nullableUint32(v uint32) *uint32 {
var p *uint32 = nil
if v != 0 {
p = &v
}
return p
}
func nullableString(v string) *string {
var p *string = nil
if v != "" {
p = &v
}
return p
}
func datetime(timestamp uint64) time.Time {
t := time.Unix(int64(timestamp/1e3), 0)
// Temporal solution for not correct timestamps in performance messages
if t.Year() < 2022 || t.Year() > 2025 {
return time.Now()
}
return t
}

View file

@ -8,7 +8,6 @@ import (
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/queue/types"
"strconv"
)
const numberOfPartitions = 16
@ -55,19 +54,16 @@ func NewSessionFinder(cfg *config.Config, stg *storage.Storage) (SessionFinder,
done: make(chan struct{}, 1),
}
finder.producer = queue.NewProducer(cfg.MessageSizeLimit, false)
finder.consumer = queue.NewMessageConsumer(
finder.consumer = queue.NewConsumer(
cfg.GroupFailover,
[]string{
cfg.TopicFailover,
},
func(sessionID uint64, iter messages.Iterator, meta *types.Meta) {
for iter.Next() {
if iter.Type() == 127 {
m := iter.Message().Decode().(*messages.SessionSearch)
finder.findSession(sessionID, m.Timestamp, m.Partition)
}
}
},
messages.NewMessageIterator(
func(msg messages.Message) {
m := msg.(*messages.SessionSearch)
finder.findSession(m.SessionID(), m.Timestamp, m.Partition)
}, []int{messages.MsgSessionSearch}, true),
true,
cfg.MessageSizeLimit,
)
@ -93,7 +89,9 @@ func (s *sessionFinderImpl) worker() {
}
func (s *sessionFinderImpl) findSession(sessionID, timestamp, partition uint64) {
err := s.storage.UploadKey(strconv.FormatUint(sessionID, 10), 5)
sessEnd := &messages.SessionEnd{Timestamp: timestamp}
sessEnd.SetSessionID(sessionID)
err := s.storage.UploadSessionFiles(sessEnd)
if err == nil {
log.Printf("found session: %d in partition: %d, original: %d",
sessionID, partition, sessionID%numberOfPartitions)
@ -128,7 +126,7 @@ func (s *sessionFinderImpl) nextPartition(partition uint64) uint64 {
// Create sessionSearch message and send it to queue
func (s *sessionFinderImpl) sendSearchMessage(sessionID, timestamp, partition uint64) {
msg := &messages.SessionSearch{Timestamp: timestamp, Partition: partition}
if err := s.producer.ProduceToPartition(s.topicName, partition, sessionID, messages.Encode(msg)); err != nil {
if err := s.producer.ProduceToPartition(s.topicName, partition, sessionID, msg.Encode()); err != nil {
log.Printf("can't send SessionSearch to failover topic: %s; sessID: %d", err, sessionID)
}
}

View file

@ -2,6 +2,7 @@ package kafka
import (
"log"
"openreplay/backend/pkg/messages"
"os"
"time"
@ -9,16 +10,15 @@ import (
"gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/queue/types"
)
type Message = kafka.Message
type Consumer struct {
c *kafka.Consumer
messageHandler types.MessageHandler
commitTicker *time.Ticker
pollTimeout uint
c *kafka.Consumer
messageIterator messages.MessageIterator
commitTicker *time.Ticker
pollTimeout uint
lastReceivedPrtTs map[int32]int64
}
@ -26,7 +26,7 @@ type Consumer struct {
func NewConsumer(
group string,
topics []string,
messageHandler types.MessageHandler,
messageIterator messages.MessageIterator,
autoCommit bool,
messageSizeLimit int,
) *Consumer {
@ -70,7 +70,7 @@ func NewConsumer(
return &Consumer{
c: c,
messageHandler: messageHandler,
messageIterator: messageIterator,
commitTicker: commitTicker,
pollTimeout: 200,
lastReceivedPrtTs: make(map[int32]int64),
@ -171,11 +171,14 @@ func (consumer *Consumer) ConsumeNext() error {
return errors.Wrap(e.TopicPartition.Error, "Consumer Partition Error")
}
ts := e.Timestamp.UnixMilli()
consumer.messageHandler(decodeKey(e.Key), e.Value, &types.Meta{
Topic: *(e.TopicPartition.Topic),
ID: uint64(e.TopicPartition.Offset),
Timestamp: ts,
})
consumer.messageIterator.Iterate(
e.Value,
messages.NewBatchInfo(
decodeKey(e.Key),
*(e.TopicPartition.Topic),
uint64(e.TopicPartition.Offset),
uint64(e.TopicPartition.Partition),
ts))
consumer.lastReceivedPrtTs[e.TopicPartition.Partition] = ts
case kafka.Error:
if e.Code() == kafka.ErrAllBrokersDown || e.Code() == kafka.ErrMaxPollExceeded {
@ -194,16 +197,3 @@ func (consumer *Consumer) Close() {
log.Printf("Kafka consumer close error: %v", err)
}
}
func (consumer *Consumer) HasFirstPartition() bool {
assigned, err := consumer.c.Assignment()
if err != nil {
return false
}
for _, p := range assigned {
if p.Partition == 1 {
return true
}
}
return false
}

View file

@ -3,12 +3,13 @@ package queue
import (
"openreplay/backend/pkg/kafka"
"openreplay/backend/pkg/license"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue/types"
)
func NewConsumer(group string, topics []string, handler types.MessageHandler, autoCommit bool, messageSizeLimit int) types.Consumer {
func NewConsumer(group string, topics []string, iterator messages.MessageIterator, autoCommit bool, messageSizeLimit int) types.Consumer {
license.CheckLicense()
return kafka.NewConsumer(group, topics, handler, autoCommit, messageSizeLimit)
return kafka.NewConsumer(group, topics, iterator, autoCommit, messageSizeLimit)
}
func NewProducer(messageSizeLimit int, useBatch bool) types.Producer {

View file

@ -63,7 +63,7 @@ class SessionStart(Message):
self.user_id = user_id
class SessionEnd(Message):
class SessionEndDeprecated(Message):
__id__ = 3
def __init__(self, timestamp):
@ -213,15 +213,6 @@ class MouseMove(Message):
self.y = y
class MouseClickDepricated(Message):
__id__ = 21
def __init__(self, id, hesitation_time, label):
self.id = id
self.hesitation_time = hesitation_time
self.label = label
class ConsoleLog(Message):
__id__ = 22
@ -254,7 +245,7 @@ class PageRenderTiming(Message):
self.time_to_interactive = time_to_interactive
class JSException(Message):
class JSExceptionDeprecated(Message):
__id__ = 25
def __init__(self, name, message, payload):
@ -349,18 +340,6 @@ class ClickEvent(Message):
self.selector = selector
class ErrorEvent(Message):
__id__ = 34
def __init__(self, message_id, timestamp, source, name, message, payload):
self.message_id = message_id
self.timestamp = timestamp
self.source = source
self.name = name
self.message = message
self.payload = payload
class ResourceEvent(Message):
__id__ = 35
@ -752,6 +731,32 @@ class Zustand(Message):
self.state = state
class JSException(Message):
__id__ = 78
def __init__(self, name, message, payload, metadata):
self.name = name
self.message = message
self.payload = payload
self.metadata = metadata
class SessionEnd(Message):
__id__ = 126
def __init__(self, timestamp, encryption_key):
self.timestamp = timestamp
self.encryption_key = encryption_key
class SessionSearch(Message):
__id__ = 127
def __init__(self, timestamp, partition):
self.timestamp = timestamp
self.partition = partition
class IOSBatchMeta(Message):
__id__ = 107

View file

@ -124,7 +124,7 @@ class MessageCodec(Codec):
)
if message_id == 3:
return SessionEnd(
return SessionEndDeprecated(
timestamp=self.read_uint(reader)
)
@ -237,13 +237,6 @@ class MessageCodec(Codec):
y=self.read_uint(reader)
)
if message_id == 21:
return MouseClickDepricated(
id=self.read_uint(reader),
hesitation_time=self.read_uint(reader),
label=self.read_string(reader)
)
if message_id == 22:
return ConsoleLog(
level=self.read_string(reader),
@ -271,7 +264,7 @@ class MessageCodec(Codec):
)
if message_id == 25:
return JSException(
return JSExceptionDeprecated(
name=self.read_string(reader),
message=self.read_string(reader),
payload=self.read_string(reader)
@ -347,16 +340,6 @@ class MessageCodec(Codec):
selector=self.read_string(reader)
)
if message_id == 34:
return ErrorEvent(
message_id=self.read_uint(reader),
timestamp=self.read_uint(reader),
source=self.read_string(reader),
name=self.read_string(reader),
message=self.read_string(reader),
payload=self.read_string(reader)
)
if message_id == 35:
return ResourceEvent(
message_id=self.read_uint(reader),
@ -668,6 +651,26 @@ class MessageCodec(Codec):
state=self.read_string(reader)
)
if message_id == 78:
return JSException(
name=self.read_string(reader),
message=self.read_string(reader),
payload=self.read_string(reader),
metadata=self.read_string(reader)
)
if message_id == 126:
return SessionEnd(
timestamp=self.read_uint(reader),
encryption_key=self.read_string(reader)
)
if message_id == 127:
return SessionSearch(
timestamp=self.read_uint(reader),
partition=self.read_uint(reader)
)
if message_id == 107:
return IOSBatchMeta(
timestamp=self.read_uint(reader),

View file

@ -60,4 +60,6 @@ BEGIN
END;
$$ LANGUAGE plpgsql;
ALTER TABLE sessions ADD file_key BYTEA NULL;
COMMIT;

View file

View file

@ -22,5 +22,5 @@ MINIO_ACCESS_KEY = ''
MINIO_SECRET_KEY = ''
# APP and TRACKER VERSIONS
VERSION = '1.8.1'
TRACKER_VERSION = '4.1.0'
VERSION = '1.8.2'
TRACKER_VERSION = '4.1.5'

View file

@ -1,7 +1,7 @@
# openreplay-ui
OpenReplay prototype UI
On new icon addition:
On new icon addition:
`yarn gen:icons`
## Documentation
@ -14,3 +14,7 @@ On new icon addition:
Labels in comments:
TEMP = temporary code
TODO = things to implement
## Contributing notes
Please use `dev` branch as base and target branch.

View file

@ -26,6 +26,7 @@ const siteIdRequiredPaths = [
'/dashboards',
'/metrics',
'/unprocessed',
'/notes',
// '/custom_metrics/sessions',
];
@ -37,7 +38,7 @@ const noStoringFetchPathStarts = [
// null?
export const clean = (obj, forbidenValues = [ undefined, '' ]) => {
const keys = Array.isArray(obj)
const keys = Array.isArray(obj)
? new Array(obj.length).fill().map((_, i) => i)
: Object.keys(obj);
const retObj = Array.isArray(obj) ? [] : {};
@ -49,7 +50,7 @@ export const clean = (obj, forbidenValues = [ undefined, '' ]) => {
retObj[key] = value;
}
});
return retObj;
}
@ -70,7 +71,7 @@ export default class APIClient {
this.siteId = siteId;
}
fetch(path, params, options = { clean: true }) {
fetch(path, params, options = { clean: true }) {
if (params !== undefined) {
const cleanedParams = options.clean ? clean(params) : params;
this.init.body = JSON.stringify(cleanedParams);

View file

@ -2,27 +2,27 @@ import logger from 'App/logger';
import APIClient from './api_client';
import { UPDATE, DELETE } from './duck/jwt';
export default store => next => (action) => {
export default (store) => (next) => (action) => {
const { types, call, ...rest } = action;
if (!call) {
return next(action);
}
const [ REQUEST, SUCCESS, FAILURE ] = types;
const [REQUEST, SUCCESS, FAILURE] = types;
next({ ...rest, type: REQUEST });
const client = new APIClient();
return call(client)
.then(async response => {
.then(async (response) => {
if (response.status === 403) {
next({ type: DELETE });
}
if (!response.ok) {
const text = await response.text()
const text = await response.text();
return Promise.reject(text);
}
return response.json()
return response.json();
})
.then(json => json || {}) // TEMP TODO on server: no empty responces
.then((json) => json || {}) // TEMP TODO on server: no empty responces
.then(({ jwt, errors, data }) => {
if (errors) {
next({ type: FAILURE, errors, data });
@ -34,14 +34,22 @@ export default store => next => (action) => {
}
})
.catch((e) => {
logger.error("Error during API request. ", e)
return next({ type: FAILURE, errors: JSON.parse(e).errors || [] });
logger.error('Error during API request. ', e);
return next({ type: FAILURE, errors: parseError(e) });
});
};
function parseError(e) {
try {
return JSON.parse(e).errors || [];
} catch {
return e;
}
}
function jwtExpired(token) {
try {
const base64Url = token.split('.')[ 1 ];
const base64Url = token.split('.')[1];
const base64 = base64Url.replace('-', '+').replace('_', '/');
const tokenObj = JSON.parse(window.atob(base64));
return tokenObj.exp * 1000 < Date.now(); // exp in Unix time (sec)

View file

@ -22,24 +22,30 @@ function Notifications(props: Props) {
useEffect(() => {
const interval = setInterval(() => {
notificationStore.fetchNotificationsCount()
notificationStore.fetchNotificationsCount();
}, AUTOREFRESH_INTERVAL);
return () => clearInterval(interval);
}, []);
return useObserver(() => (
<Popup content={ `Alerts` } >
<div className={ stl.button } onClick={ () => showModal(<AlertTriggersModal />, { right: true }) }>
<div className={ stl.counter } data-hidden={ count === 0 }>
{ count }
</div>
<Icon name="bell" size="18" />
<Popup content={`Alerts`}>
<div
className={stl.button}
onClick={() => showModal(<AlertTriggersModal />, { right: true })}
>
<div className={stl.counter} data-hidden={count === 0}>
{count}
</div>
</Popup>
<Icon name="bell" size="18" color="gray-dark" />
</div>
</Popup>
));
}
export default connect((state: any) => ({
notifications: state.getIn(['notifications', 'list']),
}), { fetchList, setLastRead, setViewed, clearAll })(Notifications);
export default connect(
(state: any) => ({
notifications: state.getIn(['notifications', 'list']),
}),
{ fetchList, setLastRead, setViewed, clearAll }
)(Notifications);

View file

@ -9,16 +9,16 @@
display: flex;
align-items: center;
padding: 0 15px;
height: 50px;
height: 49px;
transition: all 0.3s;
&:hover {
background-color: $gray-lightest;
background-color: $active-blue;
transition: all 0.2s;
}
&[data-active=true] {
background-color: $gray-lightest;
background-color: $active-blue;
}
}

View file

@ -19,11 +19,11 @@ function ChatControls({ stream, endCall, videoEnabled, setVideoEnabled, isPresta
if (!stream) { return; }
setAudioEnabled(stream.toggleAudio());
}
const toggleVideo = () => {
if (!stream) { return; }
stream.toggleVideo()
.then(setVideoEnabled)
.then((v) => setVideoEnabled(v))
}
/** muting user if he is auto connected to the call */

View file

@ -1,27 +1,35 @@
import React, { useState, useEffect } from 'react'
import VideoContainer from '../components/VideoContainer'
import cn from 'classnames'
import Counter from 'App/components/shared/SessionItem/Counter'
import stl from './chatWindow.module.css'
import ChatControls from '../ChatControls/ChatControls'
import React, { useState, useEffect } from 'react';
import VideoContainer from '../components/VideoContainer';
import cn from 'classnames';
import Counter from 'App/components/shared/SessionItem/Counter';
import stl from './chatWindow.module.css';
import ChatControls from '../ChatControls/ChatControls';
import Draggable from 'react-draggable';
import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream';
import { toggleVideoLocalStream } from 'Player'
export interface Props {
incomeStream: MediaStream[] | null,
localStream: LocalStream | null,
userId: string,
incomeStream: MediaStream[] | null;
localStream: LocalStream | null;
userId: string;
isPrestart?: boolean;
endCall: () => void
endCall: () => void;
}
function ChatWindow({ userId, incomeStream, localStream, endCall, isPrestart }: Props) {
const [localVideoEnabled, setLocalVideoEnabled] = useState(false)
const [localVideoEnabled, setLocalVideoEnabled] = useState(false);
const [anyRemoteEnabled, setRemoteEnabled] = useState(false);
const onlyLocalEnabled = localVideoEnabled && !anyRemoteEnabled;
useEffect(() => {
toggleVideoLocalStream(localVideoEnabled)
}, [localVideoEnabled])
return (
<Draggable handle=".handle" bounds="body">
<Draggable handle=".handle" bounds="body" defaultPosition={{ x: 50, y: 200 }}>
<div
className={cn(stl.wrapper, "fixed radius bg-white shadow-xl mt-16")}
className={cn(stl.wrapper, 'fixed radius bg-white shadow-xl mt-16')}
style={{ width: '280px' }}
>
<div className="handle flex items-center p-2 cursor-move select-none border-b">
@ -30,21 +38,39 @@ function ChatWindow({ userId, incomeStream, localStream, endCall, isPrestart }:
<br />
{incomeStream && incomeStream.length > 2 ? ' (+ other agents in the call)' : ''}
</div>
<Counter startTime={new Date().getTime() } className="text-sm ml-auto" />
<Counter startTime={new Date().getTime()} className="text-sm ml-auto" />
</div>
<div className={cn(stl.videoWrapper, 'relative')} style={{ minHeight: localVideoEnabled ? 52 : undefined}}>
{incomeStream
? incomeStream.map(stream => <React.Fragment key={stream.id}><VideoContainer stream={ stream } /></React.Fragment>) : (
<div
className={cn(stl.videoWrapper, 'relative')}
style={{ minHeight: onlyLocalEnabled ? 210 : 'unset' }}
>
{incomeStream ? (
incomeStream.map((stream) => (
<React.Fragment key={stream.id}>
<VideoContainer stream={stream} setRemoteEnabled={setRemoteEnabled} />
</React.Fragment>
))
) : (
<div className={stl.noVideo}>Error obtaining incoming streams</div>
)}
<div className={cn("absolute bottom-0 right-0 z-50", localVideoEnabled ? "" : "!hidden")}>
<VideoContainer stream={ localStream ? localStream.stream : null } muted height={50} />
<div className={cn('absolute bottom-0 right-0 z-50', localVideoEnabled ? '' : '!hidden')}>
<VideoContainer
stream={localStream ? localStream.stream : null}
muted
height={anyRemoteEnabled ? 50 : 'unset'}
/>
</div>
</div>
<ChatControls videoEnabled={localVideoEnabled} setVideoEnabled={setLocalVideoEnabled} stream={localStream} endCall={endCall} isPrestart={isPrestart} />
<ChatControls
videoEnabled={localVideoEnabled}
setVideoEnabled={setLocalVideoEnabled}
stream={localStream}
endCall={endCall}
isPrestart={isPrestart}
/>
</div>
</Draggable>
)
);
}
export default ChatWindow
export default ChatWindow;

View file

@ -1,12 +1,13 @@
import React, { useEffect, useRef } from 'react'
import React, { useEffect, useRef } from 'react';
interface Props {
stream: MediaStream | null
muted?: boolean,
height?: number
stream: MediaStream | null;
muted?: boolean;
height?: number | string;
setRemoteEnabled?: (isEnabled: boolean) => void;
}
function VideoContainer({ stream, muted = false, height = 280 }: Props) {
function VideoContainer({ stream, muted = false, height = 280, setRemoteEnabled }: Props) {
const ref = useRef<HTMLVideoElement>(null);
const [isEnabled, setEnabled] = React.useState(false);
@ -14,24 +15,43 @@ function VideoContainer({ stream, muted = false, height = 280 }: Props) {
if (ref.current) {
ref.current.srcObject = stream;
}
}, [ ref.current, stream, stream.getVideoTracks()[0]?.getSettings().width ])
}, [ref.current, stream, stream.getVideoTracks()[0]?.getSettings().width]);
useEffect(() => {
if (!stream) { return }
if (!stream) {
return;
}
const iid = setInterval(() => {
const settings = stream.getVideoTracks()[0]?.getSettings()
const isDummyVideoTrack = settings ? (settings.width === 2 || settings.frameRate === 0 || !settings.frameRate && !settings.width) : true
const shouldBeEnabled = !isDummyVideoTrack
isEnabled !== shouldBeEnabled ? setEnabled(shouldBeEnabled) : null;
}, 500)
return () => clearInterval(iid)
}, [ stream, isEnabled ])
const track = stream.getVideoTracks()[0]
const settings = track?.getSettings();
const isDummyVideoTrack = settings
? settings.width === 2 ||
settings.frameRate === 0 ||
(!settings.frameRate && !settings.width)
: true;
const shouldBeEnabled = track.enabled && !isDummyVideoTrack;
if (isEnabled !== shouldBeEnabled) {
setEnabled(shouldBeEnabled);
setRemoteEnabled?.(shouldBeEnabled);
}
}, 500);
return () => clearInterval(iid);
}, [stream, isEnabled]);
return (
<div className={"flex-1"} style={{ display: isEnabled ? undefined : 'none', width: isEnabled ? undefined : "0px!important" , height: isEnabled ? undefined : "0px!important" , border: "1px solid grey" }}>
<video autoPlay ref={ ref } muted={ muted } style={{ height: height }} />
<div
className={'flex-1'}
style={{
display: isEnabled ? undefined : 'none',
width: isEnabled ? undefined : '0px!important',
height: isEnabled ? undefined : '0px!important',
border: '1px solid grey',
}}
>
<video autoPlay ref={ref} muted={muted} style={{ height: height }} />
</div>
)
);
}
export default VideoContainer
export default VideoContainer;

View file

@ -52,7 +52,7 @@ export default class Client extends React.PureComponent {
<div className={ styles.tabMenu }>
<PreferencesMenu activeTab={activeTab} />
</div>
<div className="bg-white w-full rounded-lg mx-4 my-6 border">
<div className="bg-white w-full rounded-lg mx-4 my-8 border">
{ activeTab && this.renderActiveTab() }
</div>
</div>

View file

@ -87,7 +87,7 @@ piniaStorePlugin(examplePiniaStore)
<DocLink
className="mt-4"
label="Integrate Vuex"
label="Integrate Pinia"
url="https://docs.openreplay.com/plugins/pinia"
/>
</div>

View file

@ -81,7 +81,7 @@ const useBearStore = create(
}
/>
<DocLink className="mt-4" label="Integrate Vuex" url="https://docs.openreplay.com/plugins/zustand" />
<DocLink className="mt-4" label="Integrate Zustand" url="https://docs.openreplay.com/plugins/zustand" />
</div>
</div>
);

View file

@ -146,7 +146,7 @@ const NewAlert = (props: IProps) => {
// @ts-ignore
.toJS();
const writeQueryOption = (
e: React.ChangeEvent,
@ -196,7 +196,7 @@ const NewAlert = (props: IProps) => {
>
</div>
</div>
<div className="px-6 pb-3 flex flex-col">
<Section
index="1"
@ -244,7 +244,7 @@ const NewAlert = (props: IProps) => {
title="Notify Through"
description="You'll be noticed in app notifications. Additionally opt in to receive alerts on:"
content={
<NotifyHooks
<NotifyHooks
instance={instance}
onChangeCheck={onChangeCheck}
slackChannels={slackChannels}
@ -257,14 +257,14 @@ const NewAlert = (props: IProps) => {
</div>
<div className="flex items-center justify-between p-6 border-t">
<BottomButtons
<BottomButtons
loading={loading}
instance={instance}
deleting={deleting}
onDelete={onDelete}
/>
</div>
</Form>
<div className="bg-white mt-4 border rounded mb-10">

View file

@ -9,7 +9,7 @@ import DashboardListItem from './DashboardListItem';
function DashboardList() {
const { dashboardStore } = useStore();
const [shownDashboards, setDashboards] = React.useState([]);
const dashboards = dashboardStore.dashboards;
const dashboards = dashboardStore.sortedDashboards;
const dashboardsSearch = dashboardStore.dashboardsSearch;
React.useEffect(() => {

View file

@ -74,11 +74,13 @@ function DashboardRouter(props: Props) {
<Alerts siteId={siteId} />
</Route>
<Route exact strict path={withSiteId(alertCreate(), siteId)}>
<CreateAlert siteId={siteId} />
<Route exact path={withSiteId(alertCreate(), siteId)}>
{/* @ts-ignore */}
<CreateAlert siteId={siteId as string} />
</Route>
<Route exact strict path={withSiteId(alertEdit(), siteId)}>
<Route exact path={withSiteId(alertEdit(), siteId)}>
{/* @ts-ignore */}
<CreateAlert siteId={siteId} {...props} />
</Route>
</Switch>

View file

@ -23,7 +23,7 @@ function DashboardSideMenu(props: Props) {
return (
<div>
<SideMenuHeader className="mb-4 flex items-center" text="Preferences" />
{/* <SideMenuHeader className="mb-4 flex items-center" text="Dashboard" /> */}
<div className="w-full">
<SideMenuitem
active={isDashboards}
@ -33,7 +33,7 @@ function DashboardSideMenu(props: Props) {
onClick={() => redirect(withSiteId(dashboard(), siteId))}
/>
</div>
<div className="border-t w-full my-2" />
<div className="w-full my-2" />
<div className="w-full">
<SideMenuitem
active={isMetric}
@ -43,7 +43,7 @@ function DashboardSideMenu(props: Props) {
onClick={() => redirect(withSiteId(metrics(), siteId))}
/>
</div>
<div className="border-t w-full my-2" />
<div className="w-full my-2" />
<div className="w-full">
<SideMenuitem
active={isAlerts}

View file

@ -21,172 +21,192 @@ import AddMetricContainer from '../DashboardWidgetGrid/AddMetricContainer';
import OutsideClickDetectingDiv from 'Shared/OutsideClickDetectingDiv';
interface IProps {
siteId: string;
dashboardId: any;
renderReport?: any;
siteId: string;
dashboardId: any;
renderReport?: any;
}
type Props = IProps & RouteComponentProps;
function DashboardView(props: Props) {
const { siteId, dashboardId } = props;
const { dashboardStore } = useStore();
const { showModal } = useModal();
const { siteId, dashboardId } = props;
const { dashboardStore } = useStore();
const { showModal } = useModal();
const [showTooltip, setShowTooltip] = React.useState(false);
const [focusTitle, setFocusedInput] = React.useState(true);
const [showEditModal, setShowEditModal] = React.useState(false);
const [showTooltip, setShowTooltip] = React.useState(false);
const [focusTitle, setFocusedInput] = React.useState(true);
const [showEditModal, setShowEditModal] = React.useState(false);
const showAlertModal = dashboardStore.showAlertModal;
const loading = dashboardStore.fetchingDashboard;
const dashboard: any = dashboardStore.selectedDashboard;
const period = dashboardStore.period;
const showAlertModal = dashboardStore.showAlertModal;
const loading = dashboardStore.fetchingDashboard;
const dashboard: any = dashboardStore.selectedDashboard;
const period = dashboardStore.period;
const queryParams = new URLSearchParams(props.location.search);
const queryParams = new URLSearchParams(props.location.search);
const trimQuery = () => {
if (!queryParams.has('modal')) return;
queryParams.delete('modal');
props.history.replace({
search: queryParams.toString(),
});
};
const pushQuery = () => {
if (!queryParams.has('modal')) props.history.push('?modal=addMetric');
};
const trimQuery = () => {
if (!queryParams.has('modal')) return;
queryParams.delete('modal');
props.history.replace({
search: queryParams.toString(),
});
};
const pushQuery = () => {
if (!queryParams.has('modal')) props.history.push('?modal=addMetric');
};
useEffect(() => {
if (queryParams.has('modal')) {
onAddWidgets();
trimQuery();
}
}, []);
useEffect(() => {
if (queryParams.has('modal')) {
onAddWidgets();
trimQuery();
}
}, []);
useEffect(() => {
const isExists = dashboardStore.getDashboardById(dashboardId);
if (!isExists) {
props.history.push(withSiteId(`/dashboard`, siteId));
}
}, [dashboardId]);
useEffect(() => {
const isExists = dashboardStore.getDashboardById(dashboardId);
if (!isExists) {
props.history.push(withSiteId(`/dashboard`, siteId));
}
}, [dashboardId]);
useEffect(() => {
if (!dashboard || !dashboard.dashboardId) return;
dashboardStore.fetch(dashboard.dashboardId);
}, [dashboard]);
useEffect(() => {
if (!dashboard || !dashboard.dashboardId) return;
dashboardStore.fetch(dashboard.dashboardId);
}, [dashboard]);
const onAddWidgets = () => {
dashboardStore.initDashboard(dashboard);
showModal(<DashboardModal siteId={siteId} onMetricAdd={pushQuery} dashboardId={dashboardId} />, { right: true });
};
const onEdit = (isTitle: boolean) => {
dashboardStore.initDashboard(dashboard);
setFocusedInput(isTitle);
setShowEditModal(true);
};
const onDelete = async () => {
if (
await confirm({
header: 'Confirm',
confirmButton: 'Yes, delete',
confirmation: `Are you sure you want to permanently delete this Dashboard?`,
})
) {
dashboardStore.deleteDashboard(dashboard).then(() => {
props.history.push(withSiteId(`/dashboard`, siteId));
});
}
};
if (!dashboard) return null;
return (
<Loader loading={loading}>
<div style={{ maxWidth: '1300px', margin: 'auto' }}>
<DashboardEditModal show={showEditModal} closeHandler={() => setShowEditModal(false)} focusTitle={focusTitle} />
<Breadcrumb
items={[
{
label: 'Dashboards',
to: withSiteId('/dashboard', siteId),
},
{ label: (dashboard && dashboard.name) || '' },
]}
/>
<div className="flex items-center mb-2 justify-between">
<div className="flex items-center" style={{ flex: 3 }}>
<PageTitle
title={
// @ts-ignore
<Tooltip delay={100} arrow title="Double click to rename">
{dashboard?.name}
</Tooltip>
}
onDoubleClick={() => onEdit(true)}
className="mr-3 select-none border-b border-b-borderColor-transparent hover:border-dotted hover:border-gray-medium cursor-pointer"
actionButton={
/* @ts-ignore */
<Tooltip
open={showTooltip}
interactive
useContext
// @ts-ignore
theme="nopadding"
hideDelay={0}
duration={0}
distance={20}
html={
<div style={{ padding: 0 }}>
<OutsideClickDetectingDiv onClickOutside={() => setShowTooltip(false)}>
<AddMetricContainer onAction={() => setShowTooltip(false)} isPopup siteId={siteId} />
</OutsideClickDetectingDiv>
</div>
}
>
<Button variant="primary" onClick={() => setShowTooltip(true)}>
Add Metric
</Button>
</Tooltip>
}
/>
</div>
<div className="flex items-center" style={{ flex: 1, justifyContent: 'end' }}>
<div className="flex items-center flex-shrink-0 justify-end" style={{ width: '300px' }}>
<SelectDateRange
style={{ width: '300px' }}
period={period}
onChange={(period: any) => dashboardStore.setPeriod(period)}
right={true}
/>
</div>
<div className="mx-4" />
<div className="flex items-center flex-shrink-0">
<DashboardOptions
editHandler={onEdit}
deleteHandler={onDelete}
renderReport={props.renderReport}
isTitlePresent={!!dashboard?.description}
/>
</div>
</div>
</div>
<div className="pb-4">
{/* @ts-ignore */}
<Tooltip delay={100} arrow title="Double click to rename" className='w-fit !block'>
<h2
className="my-2 font-normal w-fit text-disabled-text border-b border-b-borderColor-transparent hover:border-dotted hover:border-gray-medium cursor-pointer"
onDoubleClick={() => onEdit(false)}
>
{dashboard?.description || 'Describe the purpose of this dashboard'}
</h2>
</Tooltip>
</div>
<DashboardWidgetGrid siteId={siteId} dashboardId={dashboardId} onEditHandler={onAddWidgets} id="report" />
<AlertFormModal showModal={showAlertModal} onClose={() => dashboardStore.updateKey('showAlertModal', false)} />
</div>
</Loader>
const onAddWidgets = () => {
dashboardStore.initDashboard(dashboard);
showModal(
<DashboardModal siteId={siteId} onMetricAdd={pushQuery} dashboardId={dashboardId} />,
{ right: true }
);
};
const onEdit = (isTitle: boolean) => {
dashboardStore.initDashboard(dashboard);
setFocusedInput(isTitle);
setShowEditModal(true);
};
const onDelete = async () => {
if (
await confirm({
header: 'Confirm',
confirmButton: 'Yes, delete',
confirmation: `Are you sure you want to permanently delete this Dashboard?`,
})
) {
dashboardStore.deleteDashboard(dashboard).then(() => {
props.history.push(withSiteId(`/dashboard`, siteId));
});
}
};
if (!dashboard) return null;
return (
<Loader loading={loading}>
<div style={{ maxWidth: '1300px', margin: 'auto' }}>
<DashboardEditModal
show={showEditModal}
closeHandler={() => setShowEditModal(false)}
focusTitle={focusTitle}
/>
<Breadcrumb
items={[
{
label: 'Dashboards',
to: withSiteId('/dashboard', siteId),
},
{ label: (dashboard && dashboard.name) || '' },
]}
/>
<div className="flex items-center mb-2 justify-between">
<div className="flex items-center" style={{ flex: 3 }}>
<PageTitle
title={
// @ts-ignore
<Tooltip delay={100} arrow title="Double click to rename">
{dashboard?.name}
</Tooltip>
}
onDoubleClick={() => onEdit(true)}
className="mr-3 select-none border-b border-b-borderColor-transparent hover:border-dotted hover:border-gray-medium cursor-pointer"
actionButton={
<OutsideClickDetectingDiv onClickOutside={() => setShowTooltip(false)}>
<Tooltip
open={showTooltip}
interactive
useContext
// @ts-ignore
theme="nopadding"
hideDelay={0}
duration={0}
distance={20}
html={
<div style={{ padding: 0 }}>
<AddMetricContainer
onAction={() => setShowTooltip(false)}
isPopup
siteId={siteId}
/>
</div>
}
>
<Button variant="primary" onClick={() => setShowTooltip(true)}>
Add Metric
</Button>
</Tooltip>
</OutsideClickDetectingDiv>
}
/>
</div>
<div className="flex items-center" style={{ flex: 1, justifyContent: 'end' }}>
<div className="flex items-center flex-shrink-0 justify-end" style={{ width: '300px' }}>
<SelectDateRange
style={{ width: '300px' }}
period={period}
onChange={(period: any) => dashboardStore.setPeriod(period)}
right={true}
/>
</div>
<div className="mx-4" />
<div className="flex items-center flex-shrink-0">
<DashboardOptions
editHandler={onEdit}
deleteHandler={onDelete}
renderReport={props.renderReport}
isTitlePresent={!!dashboard?.description}
/>
</div>
</div>
</div>
<div className="pb-4">
{/* @ts-ignore */}
<Tooltip delay={100} arrow title="Double click to rename" className="w-fit !block">
<h2
className="my-2 font-normal w-fit text-disabled-text border-b border-b-borderColor-transparent hover:border-dotted hover:border-gray-medium cursor-pointer"
onDoubleClick={() => onEdit(false)}
>
{dashboard?.description || 'Describe the purpose of this dashboard'}
</h2>
</Tooltip>
</div>
<DashboardWidgetGrid
siteId={siteId}
dashboardId={dashboardId}
onEditHandler={onAddWidgets}
id="report"
/>
<AlertFormModal
showModal={showAlertModal}
onClose={() => dashboardStore.updateKey('showAlertModal', false)}
/>
</div>
</Loader>
);
}
// @ts-ignore
export default withPageTitle('Dashboards - OpenReplay')(withReport(withRouter(withModal(observer(DashboardView)))));
export default withPageTitle('Dashboards - OpenReplay')(
withReport(withRouter(withModal(observer(DashboardView))))
);

View file

@ -1,18 +1,18 @@
import { useObserver } from 'mobx-react-lite';
import { observer } from 'mobx-react-lite';
import React, { useEffect } from 'react';
import { NoContent, Pagination, Icon } from 'UI';
import { useStore } from 'App/mstore';
import { filterList } from 'App/utils';
import MetricListItem from '../MetricListItem';
import { sliceListPerPage } from 'App/utils';
import { IWidget } from 'App/mstore/types/widget';
import Widget from 'App/mstore/types/widget';
function MetricsList({ siteId }: { siteId: string }) {
const { metricStore } = useStore();
const metrics = useObserver(() => metricStore.metrics);
const metricsSearch = useObserver(() => metricStore.metricsSearch);
const metrics = metricStore.sortedWidgets;
const metricsSearch = metricStore.metricsSearch;
const filterByDashboard = (item: IWidget, searchRE: RegExp) => {
const filterByDashboard = (item: Widget, searchRE: RegExp) => {
const dashboardsStr = item.dashboards.map((d: any) => d.name).join(' ');
return searchRE.test(dashboardsStr);
};
@ -26,7 +26,7 @@ function MetricsList({ siteId }: { siteId: string }) {
metricStore.updateKey('sessionsPage', 1);
}, []);
return useObserver(() => (
return (
<NoContent
show={lenth === 0}
title={
@ -68,7 +68,7 @@ function MetricsList({ siteId }: { siteId: string }) {
/>
</div>
</NoContent>
));
);
}
export default MetricsList;
export default observer(MetricsList);

View file

@ -19,155 +19,138 @@ import SessionBar from './SessionBar';
@withSiteIdRouter
@connect(
(state) => ({
error: state.getIn(['errors', 'instance']),
trace: state.getIn(['errors', 'instanceTrace']),
sourcemapUploaded: state.getIn(['errors', 'sourcemapUploaded']),
resolveToggleLoading: state.getIn(['errors', 'resolve', 'loading']) || state.getIn(['errors', 'unresolve', 'loading']),
ignoreLoading: state.getIn(['errors', 'ignore', 'loading']),
toggleFavoriteLoading: state.getIn(['errors', 'toggleFavorite', 'loading']),
traceLoading: state.getIn(['errors', 'fetchTrace', 'loading']),
}),
{
resolve,
unresolve,
ignore,
toggleFavorite,
addFilterByKeyAndValue,
}
(state) => ({
error: state.getIn(['errors', 'instance']),
trace: state.getIn(['errors', 'instanceTrace']),
sourcemapUploaded: state.getIn(['errors', 'sourcemapUploaded']),
resolveToggleLoading:
state.getIn(['errors', 'resolve', 'loading']) ||
state.getIn(['errors', 'unresolve', 'loading']),
ignoreLoading: state.getIn(['errors', 'ignore', 'loading']),
toggleFavoriteLoading: state.getIn(['errors', 'toggleFavorite', 'loading']),
traceLoading: state.getIn(['errors', 'fetchTrace', 'loading']),
}),
{
resolve,
unresolve,
ignore,
toggleFavorite,
addFilterByKeyAndValue,
}
)
export default class MainSection extends React.PureComponent {
resolve = () => {
const { error } = this.props;
this.props.resolve(error.errorId);
};
resolve = () => {
const { error } = this.props;
this.props.resolve(error.errorId);
};
unresolve = () => {
const { error } = this.props;
this.props.unresolve(error.errorId);
};
unresolve = () => {
const { error } = this.props;
this.props.unresolve(error.errorId);
};
ignore = () => {
const { error } = this.props;
this.props.ignore(error.errorId);
};
bookmark = () => {
const { error } = this.props;
this.props.toggleFavorite(error.errorId);
};
ignore = () => {
const { error } = this.props;
this.props.ignore(error.errorId);
};
bookmark = () => {
const { error } = this.props;
this.props.toggleFavorite(error.errorId);
};
findSessions = () => {
this.props.addFilterByKeyAndValue(FilterKey.ERROR, this.props.error.message);
this.props.history.push(sessionsRoute());
};
findSessions = () => {
this.props.addFilterByKeyAndValue(FilterKey.ERROR, this.props.error.message);
this.props.history.push(sessionsRoute());
};
render() {
const { error, trace, sourcemapUploaded, ignoreLoading, resolveToggleLoading, toggleFavoriteLoading, className, traceLoading } = this.props;
const isPlayer = window.location.pathname.includes('/session/')
render() {
const {
error,
trace,
sourcemapUploaded,
ignoreLoading,
resolveToggleLoading,
toggleFavoriteLoading,
className,
traceLoading,
} = this.props;
const isPlayer = window.location.pathname.includes('/session/');
return (
<div className={cn(className, 'bg-white border-radius-3 thin-gray-border mb-6')}>
<div className="m-4">
<ErrorName
className="text-lg leading-relaxed"
name={error.name}
message={error.stack0InfoString}
lineThrough={error.status === RESOLVED}
/>
<div className="flex justify-between items-center">
<div className="flex items-center color-gray-dark" style={{ wordBreak: 'break-all' }}>
{error.message}
</div>
<div className="text-center">
<div className="flex">
<Label topValue={error.sessions} topValueSize="text-lg" bottomValue="Sessions" />
<Label topValue={error.users} topValueSize="text-lg" bottomValue="Users" />
</div>
<div className="text-xs color-gray-medium">Over the past 30 days</div>
</div>
</div>
</div>
{/* <Divider />
<div className="flex m-4">
{ error.status === UNRESOLVED
? <IconButton
outline
className="mr-3"
label="Resolve"
size="small"
icon="check"
loading={ resolveToggleLoading }
onClick={ this.resolve }
/>
: <IconButton
outline
className="mr-3"
label="Unresolve"
size="small"
icon="exclamation-circle"
loading={ resolveToggleLoading }
onClick={ this.unresolve }
/>
}
{ error.status !== IGNORED &&
<IconButton
outline
className="mr-3"
label="Ignore"
size="small"
icon="ban"
loading={ ignoreLoading }
onClick={ this.ignore }
/>
}
<IconButton
primaryText
label="Bookmark"
size="small"
compact
icon={ error.favorite ? "star-solid" : "star" }
loading={ toggleFavoriteLoading }
onClick={ this.bookmark }
/>
<SharePopup
entity="errors"
id={ error.errorId }
trigger={
<IconButton
primaryText
label="Share"
size="small"
icon="share-alt"
/>
}
/>
</div> */}
<Divider />
{!isPlayer && (
<div className="m-4">
<h3 className="text-xl inline-block mr-2">Last session with this error</h3>
<span className="font-thin text-sm">{resentOrDate(error.lastOccurrence)}</span>
<SessionBar className="my-4" session={error.lastHydratedSession} />
<Button variant="text-primary" onClick={this.findSessions}>
Find all sessions with this error
<Icon className="ml-1" name="next1" color="teal" />
</Button>
</div>
)}
<Divider />
<div className="m-4">
<Loader loading={traceLoading}>
<ErrorDetails
name={error.name}
message={error.message}
errorStack={trace}
error={error}
sourcemapUploaded={sourcemapUploaded}
/>
</Loader>
</div>
return (
<div className={cn(className, 'bg-white border-radius-3 thin-gray-border mb-6')}>
<div className="m-4">
<ErrorName
className="text-lg leading-relaxed"
name={error.name}
message={error.stack0InfoString}
lineThrough={error.status === RESOLVED}
/>
<div className="flex flex-col">
<div
className="flex items-center color-gray-dark font-semibold"
style={{ wordBreak: 'break-all' }}
>
{error.message}
</div>
);
}
<div className="flex items-center mt-2">
<div className="flex">
<Label
topValue={error.sessions}
horizontal
topValueSize="text-lg"
bottomValue="Sessions"
/>
<Label
topValue={error.users}
horizontal
topValueSize="text-lg"
bottomValue="Users"
/>
</div>
<div className="text-xs color-gray-medium">Over the past 30 days</div>
</div>
</div>
</div>
<Divider />
<div className="m-4">
<div className="flex items-center">
<h3 className="text-xl inline-block mr-2">Last session with this error</h3>
<span className="font-thin text-sm">{resentOrDate(error.lastOccurrence)}</span>
<Button className="ml-auto" variant="text-primary" onClick={this.findSessions}>
Find all sessions with this error
<Icon className="ml-1" name="next1" color="teal" />
</Button>
</div>
<SessionBar className="my-4" session={error.lastHydratedSession} />
{error.customTags.length > 0 ? (
<div className="flex items-start flex-col">
<div>
<span className="font-semibold">More Info</span> <span className="text-disabled-text">(most recent call)</span>
</div>
<div className="mt-4 flex items-center gap-3 w-full flex-wrap">
{error.customTags.map((tag) => (
<div className="flex items-center rounded overflow-hidden bg-gray-lightest">
<div className="bg-gray-light-shade py-1 px-2 text-disabled-text">{Object.entries(tag)[0][0]}</div> <div className="py-1 px-2 text-gray-dark">{Object.entries(tag)[0][1]}</div>
</div>
))}
</div>
</div>
) : null}
</div>
<Divider />
<div className="m-4">
<Loader loading={traceLoading}>
<ErrorDetails
name={error.name}
message={error.message}
errorStack={trace}
error={error}
sourcemapUploaded={sourcemapUploaded}
/>
</Loader>
</div>
</div>
);
}
}

View file

@ -31,12 +31,12 @@ function partitionsWrapper(partitions = [], mapCountry = false) {
.sort((a, b) => b.count - a.count)
.slice(0, showLength)
.map(p => ({
label: mapCountry
? (countries[p.name] || "Unknown")
label: mapCountry
? (countries[p.name] || "Unknown")
: p.name,
prc: p.count/sum * 100,
}))
if (otherPrcsSum > 0) {
show.push({
label: "Other",
@ -47,9 +47,9 @@ function partitionsWrapper(partitions = [], mapCountry = false) {
return show;
}
function tagsWrapper(tags = []) {
return tags.map(({ name, partitions }) => ({
name,
partitions: partitionsWrapper(partitions, name === "country")
return tags.map(({ name, partitions }) => ({
name,
partitions: partitionsWrapper(partitions, name === "country")
}))
}
@ -59,7 +59,7 @@ function dataWrapper(data = {}) {
chart30: data.chart30 || [],
tags: tagsWrapper(data.tags),
};
}
}
@connect(state => ({
error: state.getIn([ "errors", "instance" ])
@ -75,7 +75,7 @@ export default class SideSection extends React.PureComponent {
}
render() {
const {
const {
className,
error,
data,
@ -96,20 +96,20 @@ export default class SideSection extends React.PureComponent {
timeFormat={'l'}
/>
<div className="mb-6" />
<DateAgo
<DateAgo
className="my-4"
title="First Seen"
timestamp={ error.firstOccurrence }
/>
<DateAgo
<DateAgo
className="my-4"
title="Last Seen"
timestamp={ error.lastOccurrence }
/>
{ data.tags.length > 0 && <h4 className="text-xl mt-6 mb-3">Tags</h4> }
{ data.tags.length > 0 && <h4 className="text-xl mt-6 mb-3">Summary</h4> }
<Loader loading={loading}>
{ data.tags.map(({ name, partitions }) =>
<DistributionBar
<DistributionBar
key={ name }
title={name}
partitions={partitions}

View file

@ -1,10 +1,10 @@
import React from 'react';
import cn from "classnames";
function Label({ className, topValue, topValueSize = 'text-base', bottomValue, topMuted = false, bottomMuted = false }) {
function Label({ className, topValue, topValueSize = 'text-base', bottomValue, topMuted = false, bottomMuted = false, horizontal = false }) {
return (
<div className={ cn(className, "flex flex-col items-center px-4") } >
<div className={ cn(topValueSize, { "color-gray-medium": topMuted }) } >{ topValue }</div>
<div className={ cn(className, "flex items-center px-4", horizontal ? '!pl-0' : 'flex-col') } >
<div className={ cn(topValueSize, { "color-gray-medium": topMuted }, horizontal ? 'mr-2' : '') } >{ topValue }</div>
<div className={ cn("font-light text-sm", { "color-gray-medium": bottomMuted }) }>{ bottomValue }</div>
</div>
);
@ -12,4 +12,4 @@ function Label({ className, topValue, topValueSize = 'text-base', bottomValue, t
Label.displayName = "Label";
export default Label;
export default Label;

View file

@ -1,83 +1,99 @@
import React from 'react';
import FunnelStepText from './FunnelStepText';
import { Icon, Popup } from 'UI';
import { Tooltip } from 'react-tippy';
interface Props {
filter: any;
filter: any;
}
function FunnelBar(props: Props) {
const { filter } = props;
// const completedPercentage = calculatePercentage(filter.sessionsCount, filter.dropDueToIssues);
const { filter } = props;
// const completedPercentage = calculatePercentage(filter.sessionsCount, filter.dropDueToIssues);
return (
<div className="w-full mb-4">
<FunnelStepText filter={filter} />
<div style={{
height: '25px',
// marginBottom: '10px',
width: '100%',
backgroundColor: '#f5f5f5',
position: 'relative',
borderRadius: '3px',
overflow: 'hidden',
}}>
<div className="flex items-center" style={{
width: `${filter.completedPercentage}%`,
position: 'absolute',
top: 0,
left: 0,
bottom: 0,
// height: '10px',
backgroundColor: '#00b5ad',
}}>
<div className="color-white absolute right-0 flex items-center font-medium mr-2 leading-3">{filter.completedPercentage}%</div>
</div>
{filter.dropDueToIssues > 0 && (
<div className="flex items-center justify-end" style={{
width: `${filter.dropDueToIssuesPercentage}%`,
position: 'absolute',
top: 0,
right: 0,
bottom: 0,
left: `${filter.completedPercentage}%`,
// height: '10px',
backgroundColor: '#ff5a5f',
opacity: 0.5,
}}>
<Popup
content="Drop due to issues"
// offset={100}
sticky={true}
>
<div className="color-white w-full flex items-center font-medium mr-2 leading-3">{filter.dropDueToIssuesPercentage}%</div>
</Popup>
</div>
)}
</div>
<div className="flex justify-between py-2">
<div className="flex items-center">
<Icon name="arrow-right-short" size="20" color="green" />
<span className="mx-1 font-medium">{filter.sessionsCount}</span>
<span>Completed</span>
</div>
<div className="flex items-center">
<Icon name="caret-down-fill" color="red" size={16} />
<span className="font-medium mx-1 color-red">{filter.droppedCount}</span>
<span>Dropped</span>
</div>
</div>
return (
<div className="w-full mb-4">
<FunnelStepText filter={filter} />
<div
style={{
height: '25px',
// marginBottom: '10px',
width: '100%',
backgroundColor: '#f5f5f5',
position: 'relative',
borderRadius: '3px',
overflow: 'hidden',
}}
>
<div
className="flex items-center"
style={{
width: `${filter.completedPercentage}%`,
position: 'absolute',
top: 0,
left: 0,
bottom: 0,
// height: '10px',
backgroundColor: '#00b5ad',
}}
>
<div className="color-white absolute right-0 flex items-center font-medium mr-2 leading-3">
{/* {filter.completedPercentage}% */}
</div>
</div>
);
{filter.dropDueToIssues > 0 && (
<div
className="flex items-center"
style={{
position: 'absolute',
top: 0,
right: 0,
bottom: 0,
left: `${filter.completedPercentage}%`,
opacity: 0.5,
padding: '4px',
}}
>
<div
className="stripes relative"
style={{
width: `${filter.dropDueToIssuesPercentage}%`,
// backgroundColor: 'red',
height: '16px',
}}
>
{/* @ts-ignore */}
<Tooltip title={`${filter.dropDueToIssues} (${filter.dropDueToIssuesPercentage}%) Dropped due to issues`} position="top-start">
<div className="w-full h-8 absolute"/>
</Tooltip>
</div>
</div>
)}
</div>
<div className="flex justify-between py-2">
<div className="flex items-center">
<Icon name="arrow-right-short" size="20" color="green" />
<span className="mx-1 font-medium">{filter.sessionsCount}</span>
<span className="color-gray-medium text-sm">
({filter.completedPercentage}%) Completed
</span>
{/* <span>Completed</span> */}
</div>
<div className="flex items-center">
<Icon name="caret-down-fill" color="red" size={16} />
<span className="font-medium mx-1 color-red">{filter.droppedCount}</span>
<span className="text-sm color-red">({filter.droppedPercentage}%) Dropped</span>
</div>
</div>
</div>
);
}
export default FunnelBar;
const calculatePercentage = (completed: number, dropped: number) => {
const total = completed + dropped;
if (dropped === 0) return 100;
if (total === 0) return 0;
const total = completed + dropped;
if (dropped === 0) return 100;
if (total === 0) return 0;
return Math.round((completed / dropped) * 100);
}
return Math.round((completed / dropped) * 100);
};

View file

@ -0,0 +1,70 @@
import React from 'react';
import { NavLink, withRouter } from 'react-router-dom';
import {
sessions,
metrics,
assist,
client,
dashboard,
withSiteId,
CLIENT_DEFAULT_TAB,
} from 'App/routes';
import SiteDropdown from '../SiteDropdown';
import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG';
import styles from '../header.module.css';
const DASHBOARD_PATH = dashboard();
const METRICS_PATH = metrics();
const SESSIONS_PATH = sessions();
const ASSIST_PATH = assist();
interface Props {
siteId: any;
}
function DefaultMenuView(props: Props) {
const { siteId } = props;
return (
<div className="flex items-center">
<NavLink to={withSiteId(SESSIONS_PATH, props.siteId)}>
<div className="relative select-none">
<div className="px-4 py-2">
<AnimatedSVG name={ICONS.LOGO_SMALL} size="30" />
</div>
<div className="absolute bottom-0" style={{ fontSize: '7px', right: '5px' }}>
v{window.env.VERSION}
</div>
</div>
</NavLink>
<SiteDropdown />
<NavLink
to={withSiteId(SESSIONS_PATH, siteId)}
className={styles.nav}
activeClassName={styles.active}
>
{'Sessions'}
</NavLink>
<NavLink
to={withSiteId(ASSIST_PATH, siteId)}
className={styles.nav}
activeClassName={styles.active}
>
{'Assist'}
</NavLink>
<NavLink
to={withSiteId(DASHBOARD_PATH, siteId)}
className={styles.nav}
activeClassName={styles.active}
isActive={(_, location) => {
return (
location.pathname.includes(DASHBOARD_PATH) || location.pathname.includes(METRICS_PATH)
);
}}
>
<span>{'Dashboards'}</span>
</NavLink>
</div>
);
}
export default DefaultMenuView;

View file

@ -0,0 +1 @@
export { default } from './DefaultMenuView'

View file

@ -2,57 +2,40 @@ import React, { useEffect, useState } from 'react';
import { connect } from 'react-redux';
import { NavLink, withRouter } from 'react-router-dom';
import cn from 'classnames';
import {
sessions,
metrics,
assist,
client,
dashboard,
withSiteId,
CLIENT_DEFAULT_TAB,
} from 'App/routes';
import { client, CLIENT_DEFAULT_TAB } from 'App/routes';
import { logout } from 'Duck/user';
import { Icon, Popup } from 'UI';
import SiteDropdown from './SiteDropdown';
import styles from './header.module.css';
import OnboardingExplore from './OnboardingExplore/OnboardingExplore'
import Announcements from '../Announcements';
import OnboardingExplore from './OnboardingExplore/OnboardingExplore';
import Notifications from '../Alerts/Notifications';
import { init as initSite } from 'Duck/site';
import { getInitials } from 'App/utils';
import ErrorGenPanel from 'App/dev/components';
import Alerts from '../Alerts/Alerts';
import AnimatedSVG, { ICONS } from '../shared/AnimatedSVG/AnimatedSVG';
import { fetchListActive as fetchMetadata } from 'Duck/customField';
import { useStore } from 'App/mstore';
import { useObserver } from 'mobx-react-lite';
import UserMenu from './UserMenu';
import SettingsMenu from './SettingsMenu';
import DefaultMenuView from './DefaultMenuView';
import PreferencesView from './PreferencesView';
const DASHBOARD_PATH = dashboard();
const METRICS_PATH = metrics();
const SESSIONS_PATH = sessions();
const ASSIST_PATH = assist();
const CLIENT_PATH = client(CLIENT_DEFAULT_TAB);
const Header = (props) => {
const {
sites, location, account,
onLogoutClick, siteId,
boardingCompletion = 100, showAlerts = false,
} = props;
const name = account.get('name').split(" ")[0];
const [hideDiscover, setHideDiscover] = useState(false)
const { sites, account, siteId, boardingCompletion = 100, showAlerts = false } = props;
const name = account.get('name');
const [hideDiscover, setHideDiscover] = useState(false);
const { userStore, notificationStore } = useStore();
const initialDataFetched = useObserver(() => userStore.initialDataFetched);
let activeSite = null;
const onAccountClick = () => {
props.history.push(CLIENT_PATH);
}
const isPreferences = window.location.pathname.includes('/client/');
useEffect(() => {
if (!account.id || initialDataFetched) return;
setTimeout(() => {
Promise.all([
userStore.fetchLimits(),
@ -65,90 +48,64 @@ const Header = (props) => {
}, [account]);
useEffect(() => {
activeSite = sites.find(s => s.id == siteId);
activeSite = sites.find((s) => s.id == siteId);
props.initSite(activeSite);
}, [siteId])
}, [siteId]);
return (
<div className={ cn(styles.header) } style={{ height: '50px'}}>
<NavLink to={ withSiteId(SESSIONS_PATH, siteId) }>
<div className="relative select-none">
<div className="px-4 py-2">
<AnimatedSVG name={ICONS.LOGO_SMALL} size="30" />
</div>
<div className="absolute bottom-0" style={{ fontSize: '7px', right: '5px' }}>v{window.env.VERSION}</div>
</div>
</NavLink>
<SiteDropdown />
<div className={ styles.divider } />
<NavLink
to={ withSiteId(SESSIONS_PATH, siteId) }
className={ styles.nav }
activeClassName={ styles.active }
>
{ 'Sessions' }
</NavLink>
<NavLink
to={ withSiteId(ASSIST_PATH, siteId) }
className={ styles.nav }
activeClassName={ styles.active }
>
{ 'Assist' }
</NavLink>
<NavLink
to={ withSiteId(DASHBOARD_PATH, siteId) }
className={ styles.nav }
activeClassName={ styles.active }
isActive={ (_, location) => {
return location.pathname.includes(DASHBOARD_PATH) || location.pathname.includes(METRICS_PATH);
}}
>
<span>{ 'Dashboards' }</span>
</NavLink>
<div className={ styles.right }>
<Announcements />
<div className={ styles.divider } />
{ (boardingCompletion < 100 && !hideDiscover) && (
<React.Fragment>
<div
className={cn(styles.header, 'fixed w-full bg-white flex justify-between')}
style={{ height: '50px' }}
>
{!isPreferences && <DefaultMenuView siteId={siteId} />}
{isPreferences && <PreferencesView />}
<div className={styles.right}>
{boardingCompletion < 100 && !hideDiscover && (
<React.Fragment>
<OnboardingExplore onComplete={() => setHideDiscover(true)} />
<div className={ styles.divider } />
</React.Fragment>
)}
<Notifications />
<div className={ styles.divider } />
<Popup content={ `Preferences` } >
<NavLink to={ CLIENT_PATH } className={ styles.headerIcon }><Icon name="cog" size="20" /></NavLink>
</Popup>
<div className={ styles.divider } />
<div className={ styles.userDetails }>
<div className={cn(styles.userDetails, 'group cursor-pointer')}>
<Popup content={`Preferences`} disabled>
<div className="flex items-center">
<NavLink to={CLIENT_PATH}>
<Icon name="gear" size="20" color="gray-dark" />
</NavLink>
<SettingsMenu className="invisible group-hover:visible" account={account} />
</div>
</Popup>
</div>
<div className={cn(styles.userDetails, 'group cursor-pointer')}>
<div className="flex items-center">
<div className="mr-5">{ name }</div>
<Icon color="gray-medium" name="ellipsis-v" size="24" />
<div className="w-10 h-10 bg-tealx rounded-full flex items-center justify-center color-white">
{getInitials(name)}
</div>
</div>
<ul>
<li><button onClick={ onAccountClick }>{ 'Account' }</button></li>
<li><button onClick={ onLogoutClick }>{ 'Logout' }</button></li>
</ul>
<UserMenu className="invisible group-hover:visible" />
</div>
{<ErrorGenPanel />}
</div>
{ <ErrorGenPanel/> }
{showAlerts && <Alerts />}
</div>
);
};
export default withRouter(connect(
state => ({
account: state.getIn([ 'user', 'account' ]),
siteId: state.getIn([ 'site', 'siteId' ]),
sites: state.getIn([ 'site', 'list' ]),
showAlerts: state.getIn([ 'dashboard', 'showAlerts' ]),
boardingCompletion: state.getIn([ 'dashboard', 'boardingCompletion' ])
}),
{ onLogoutClick: logout, initSite, fetchMetadata },
)(Header));
export default withRouter(
connect(
(state) => ({
account: state.getIn(['user', 'account']),
siteId: state.getIn(['site', 'siteId']),
sites: state.getIn(['site', 'list']),
showAlerts: state.getIn(['dashboard', 'showAlerts']),
boardingCompletion: state.getIn(['dashboard', 'boardingCompletion']),
}),
{ onLogoutClick: logout, initSite, fetchMetadata }
)(Header)
);

View file

@ -24,13 +24,17 @@ function NewProjectButton(props: Props) {
};
return (
<div
className={cn('flex items-center justify-center py-3 cursor-pointer hover:bg-active-blue ', { disabled: !canAddProject })}
onClick={onClick}
>
<Icon name="plus" size={12} className="mr-2" color="teal" />
<span className="color-teal">Add New Project</span>
</div>
<li onClick={onClick}>
<Icon name="folder-plus" size="16" color="teal" />
<span className="ml-3 color-teal">Add Project</span>
</li>
// <div
// className={cn('flex items-center justify-center py-3 cursor-pointer hover:bg-active-blue ', { disabled: !canAddProject })}
// onClick={onClick}
// >
// <Icon name="plus" size={12} className="mr-2" color="teal" />
// <span className="color-teal">Add New Project</span>
// </div>
);
}

Some files were not shown because too many files have changed in this diff Show more