Merge branch 'dev' into live-se-red

This commit is contained in:
nick-delirium 2024-12-11 15:12:46 +01:00
commit 1385ba40a1
No known key found for this signature in database
GPG key ID: 93ABD695DF5FDBA0
79 changed files with 3329 additions and 2805 deletions

View file

@ -5,7 +5,7 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler
from decouple import config
from fastapi import FastAPI
from chalicelib.core import alerts_processor
from chalicelib.core.alerts import alerts_processor
from chalicelib.utils import pg_client

View file

@ -0,0 +1,10 @@
import logging
from decouple import config
logger = logging.getLogger(__name__)
if config("EXP_ALERTS", cast=bool, default=False):
logging.info(">>> Using experimental alerts")
from . import alerts_processor_ch as alerts_processor
else:
from . import alerts_processor as alerts_processor

View file

@ -4,13 +4,14 @@ import logging
from pydantic_core._pydantic_core import ValidationError
import schemas
from chalicelib.core import alerts
from chalicelib.core import alerts_listener
from chalicelib.core import sessions
from chalicelib.core.alerts import alerts
from chalicelib.core.alerts import alerts_listener
from chalicelib.core.alerts import sessions
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
logger = logging.getLogger(__name__)
LeftToDb = {
schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",

View file

@ -3,9 +3,9 @@ import logging
from pydantic_core._pydantic_core import ValidationError
import schemas
from chalicelib.core import alerts
from chalicelib.core import alerts_listener, alerts_processor
from chalicelib.core import sessions_exp as sessions
from chalicelib.core.alerts import alerts
from chalicelib.core.alerts import alerts_listener, alerts_processor
from chalicelib.core.alerts import sessions
from chalicelib.utils import pg_client, ch_client, exp_ch_helper
from chalicelib.utils.TimeUTC import TimeUTC

View file

@ -0,0 +1,6 @@
from decouple import config
if config("EXP_ALERTS", cast=bool, default=False):
from chalicelib.core.sessions_ch import *
else:
from chalicelib.core.sessions import *

View file

@ -3,11 +3,12 @@ import logging
from typing import List, Union
import schemas
from chalicelib.core import events, metadata, projects, performance_event, metrics, sessions_favorite, sessions_legacy
from chalicelib.core import events, metadata, projects, performance_event, metrics, sessions_favorite, sessions
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
from chalicelib.utils import sql_helper as sh
logger = logging.getLogger(__name__)
SESSION_PROJECTION_COLS_CH = """\
s.project_id,
s.session_id AS session_id,
@ -1690,24 +1691,4 @@ def check_recording_status(project_id: int) -> dict:
# TODO: rewrite this function to use ClickHouse
def search_sessions_by_ids(project_id: int, session_ids: list, sort_by: str = 'session_id',
ascending: bool = False) -> dict:
if session_ids is None or len(session_ids) == 0:
return {"total": 0, "sessions": []}
with pg_client.PostgresClient() as cur:
meta_keys = metadata.get(project_id=project_id)
params = {"project_id": project_id, "session_ids": tuple(session_ids)}
order_direction = 'ASC' if ascending else 'DESC'
main_query = cur.mogrify(f"""SELECT {sessions_legacy.SESSION_PROJECTION_BASE_COLS}
{"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])}
FROM public.sessions AS s
WHERE project_id=%(project_id)s
AND session_id IN %(session_ids)s
ORDER BY {sort_by} {order_direction};""", params)
cur.execute(main_query)
rows = cur.fetchall()
if len(meta_keys) > 0:
for s in rows:
s["metadata"] = {}
for m in meta_keys:
s["metadata"][m["key"]] = s.pop(f'metadata_{m["index"]}')
return {"total": len(rows), "sessions": helper.list_to_camel_case(rows)}
return sessions.search_sessions_by_ids(project_id, session_ids, sort_by, ascending)

View file

@ -765,30 +765,6 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
return n_critical_issues, issues_dict, total_drop_due_to_issues
def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id,
metric_format: schemas.MetricExtendedFormatType):
output = []
stages = filter_d.events
if len(stages) == 0:
logger.debug("no stages found")
return output, 0
# The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
# Obtain the first part of the output
stages_list = get_stages(stages, rows, metric_format=metric_format)
if len(rows) == 0:
return stages_list, 0
# Obtain the second part of the output
total_drop_due_to_issues = get_issues(stages, rows,
first_stage=1,
last_stage=len(filter_d.events),
drop_only=True)
return stages_list, total_drop_due_to_issues
def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None):
output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []})
stages = filter_d.events

View file

@ -17,17 +17,6 @@ def get_main_sessions_table(timestamp=0):
return "experimental.sessions"
def get_user_favorite_sessions_table(timestamp=0):
return "experimental.user_favorite_sessions"
def get_user_viewed_sessions_table(timestamp=0):
return "experimental.user_viewed_sessions"
def get_user_viewed_errors_table(timestamp=0):
return "experimental.user_viewed_errors"
def get_main_js_errors_sessions_table(timestamp=0):
return get_main_events_table(timestamp=timestamp)

View file

@ -71,4 +71,5 @@ sourcemaps_reader=http://sourcemapreader-openreplay.app.svc.cluster.local:9000/s
STAGE=default-foss
TZ=UTC
EXP_CH_DRIVER=true
EXP_AUTOCOMPLETE=true
EXP_AUTOCOMPLETE=true
EXP_ALERTS=true

View file

@ -11,59 +11,6 @@ from .transformers_validators import transform_email, remove_whitespace, remove_
force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric
def transform_old_filter_type(cls, values):
if values.get("type") is None:
return values
values["type"] = {
# filters
"USEROS": FilterType.USER_OS.value,
"USERBROWSER": FilterType.USER_BROWSER.value,
"USERDEVICE": FilterType.USER_DEVICE.value,
"USERCOUNTRY": FilterType.USER_COUNTRY.value,
"USERID": FilterType.USER_ID.value,
"USERANONYMOUSID": FilterType.USER_ANONYMOUS_ID.value,
"REFERRER": FilterType.REFERRER.value,
"REVID": FilterType.REV_ID.value,
"USEROS_IOS": FilterType.USER_OS_MOBILE.value,
"USERDEVICE_IOS": FilterType.USER_DEVICE_MOBILE.value,
"USERCOUNTRY_IOS": FilterType.USER_COUNTRY_MOBILE.value,
"USERID_IOS": FilterType.USER_ID_MOBILE.value,
"USERANONYMOUSID_IOS": FilterType.USER_ANONYMOUS_ID_MOBILE.value,
"REVID_IOS": FilterType.REV_ID_MOBILE.value,
"DURATION": FilterType.DURATION.value,
"PLATFORM": FilterType.PLATFORM.value,
"METADATA": FilterType.METADATA.value,
"ISSUE": FilterType.ISSUE.value,
"EVENTS_COUNT": FilterType.EVENTS_COUNT.value,
"UTM_SOURCE": FilterType.UTM_SOURCE.value,
"UTM_MEDIUM": FilterType.UTM_MEDIUM.value,
"UTM_CAMPAIGN": FilterType.UTM_CAMPAIGN.value,
# events:
"CLICK": EventType.CLICK.value,
"INPUT": EventType.INPUT.value,
"LOCATION": EventType.LOCATION.value,
"CUSTOM": EventType.CUSTOM.value,
"REQUEST": EventType.REQUEST.value,
"FETCH": EventType.REQUEST_DETAILS.value,
"GRAPHQL": EventType.GRAPHQL.value,
"STATEACTION": EventType.STATE_ACTION.value,
"ERROR": EventType.ERROR.value,
"CLICK_IOS": EventType.CLICK_MOBILE.value,
"INPUT_IOS": EventType.INPUT_MOBILE.value,
"VIEW_IOS": EventType.VIEW_MOBILE.value,
"CUSTOM_IOS": EventType.CUSTOM_MOBILE.value,
"REQUEST_IOS": EventType.REQUEST_MOBILE.value,
"ERROR_IOS": EventType.ERROR_MOBILE.value,
"DOM_COMPLETE": PerformanceEventType.LOCATION_DOM_COMPLETE.value,
"LARGEST_CONTENTFUL_PAINT_TIME": PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME.value,
"TTFB": PerformanceEventType.LOCATION_TTFB.value,
"AVG_CPU_LOAD": PerformanceEventType.LOCATION_AVG_CPU_LOAD.value,
"AVG_MEMORY_USAGE": PerformanceEventType.LOCATION_AVG_MEMORY_USAGE.value,
"FETCH_FAILED": PerformanceEventType.FETCH_FAILED.value,
}.get(values["type"], values["type"])
return values
class _GRecaptcha(BaseModel):
g_recaptcha_response: Optional[str] = Field(default=None, alias='g-recaptcha-response')
@ -602,7 +549,6 @@ class SessionSearchEventSchema2(BaseModel):
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
_transform = model_validator(mode='before')(transform_old_filter_type)
@model_validator(mode="after")
def event_validator(self):
@ -639,7 +585,6 @@ class SessionSearchFilterSchema(BaseModel):
source: Optional[Union[ErrorSource, str]] = Field(default=None)
_remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values)
_transform = model_validator(mode='before')(transform_old_filter_type)
_single_to_list_values = field_validator('value', mode='before')(single_to_list)
@model_validator(mode="before")
@ -898,6 +843,11 @@ class CardSeriesSchema(BaseModel):
class MetricTimeseriesViewType(str, Enum):
LINE_CHART = "lineChart"
AREA_CHART = "areaChart"
BAR_CHART = "barChart"
PIE_CHART = "pieChart"
PROGRESS_CHART = "progressChart"
TABLE_CHART = "table"
METRIC_CHART = "metric"
class MetricTableViewType(str, Enum):
@ -1356,8 +1306,6 @@ class LiveSessionSearchFilterSchema(BaseModel):
operator: Literal[SearchEventOperator.IS, SearchEventOperator.CONTAINS] \
= Field(default=SearchEventOperator.CONTAINS)
_transform = model_validator(mode='before')(transform_old_filter_type)
@model_validator(mode="after")
def __validator(self):
if self.type is not None and self.type == LiveFilterType.METADATA:

View file

@ -0,0 +1,43 @@
package main
import (
"context"
analyticsConfig "openreplay/backend/internal/config/analytics"
"openreplay/backend/pkg/analytics"
"openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/metrics"
analyticsMetrics "openreplay/backend/pkg/metrics/analytics"
databaseMetrics "openreplay/backend/pkg/metrics/database"
"openreplay/backend/pkg/metrics/web"
"openreplay/backend/pkg/server"
"openreplay/backend/pkg/server/api"
)
func main() {
ctx := context.Background()
log := logger.New()
cfg := analyticsConfig.New(log)
webMetrics := web.New("analytics")
metrics.New(log, append(webMetrics.List(), append(analyticsMetrics.List(), databaseMetrics.List()...)...))
pgConn, err := pool.New(cfg.Postgres.String())
if err != nil {
log.Fatal(ctx, "can't init postgres connection: %s", err)
}
defer pgConn.Close()
builder, err := analytics.NewServiceBuilder(log, cfg, webMetrics, pgConn)
if err != nil {
log.Fatal(ctx, "can't init services: %s", err)
}
router, err := api.NewRouter(&cfg.HTTP, log)
if err != nil {
log.Fatal(ctx, "failed while creating router: %s", err)
}
router.AddHandlers(api.NoPrefix, builder.AnalyticsAPI)
router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware)
server.Run(ctx, log, &cfg.HTTP, router)
}

View file

@ -6,6 +6,7 @@ import (
config "openreplay/backend/internal/config/db"
"openreplay/backend/internal/db"
"openreplay/backend/internal/db/datasaver"
"openreplay/backend/pkg/db/clickhouse"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/db/redis"
@ -33,9 +34,15 @@ func main() {
}
defer pgConn.Close()
// Init events module
pg := postgres.NewConn(log, pgConn)
defer pg.Close()
chConn := clickhouse.NewConnector(cfg.Clickhouse)
if err := chConn.Prepare(); err != nil {
log.Fatal(ctx, "can't prepare clickhouse: %s", err)
}
defer chConn.Stop()
// Init db proxy module (postgres + clickhouse + batches)
dbProxy := postgres.NewConn(log, pgConn, chConn)
defer dbProxy.Close()
// Init redis connection
redisClient, err := redis.New(&cfg.Redis)
@ -49,7 +56,7 @@ func main() {
tagsManager := tags.New(log, pgConn)
// Init data saver
saver := datasaver.New(log, cfg, pg, sessManager, tagsManager)
saver := datasaver.New(log, cfg, dbProxy, chConn, sessManager, tagsManager)
// Message filter
msgFilter := []int{

View file

@ -0,0 +1,29 @@
package analytics
import (
"time"
"openreplay/backend/internal/config/common"
"openreplay/backend/internal/config/configurator"
"openreplay/backend/internal/config/objectstorage"
"openreplay/backend/internal/config/redis"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/logger"
)
type Config struct {
common.Config
common.Postgres
redis.Redis
objectstorage.ObjectsConfig
common.HTTP
FSDir string `env:"FS_DIR,required"`
ProjectExpiration time.Duration `env:"PROJECT_EXPIRATION,default=10m"`
WorkerID uint16
}
func New(log logger.Logger) *Config {
cfg := &Config{WorkerID: env.WorkerID()}
configurator.Process(log, cfg)
return cfg
}

View file

@ -57,10 +57,18 @@ type Redshift struct {
// Clickhouse config
type Clickhouse struct {
URL string `env:"CLICKHOUSE_STRING"`
Database string `env:"CLICKHOUSE_DATABASE,default=default"`
UserName string `env:"CLICKHOUSE_USERNAME,default=default"`
Password string `env:"CLICKHOUSE_PASSWORD,default="`
URL string `env:"CLICKHOUSE_STRING"`
Database string `env:"CLICKHOUSE_DATABASE,default=default"`
UserName string `env:"CLICKHOUSE_USERNAME,default=default"`
Password string `env:"CLICKHOUSE_PASSWORD,default="`
LegacyUserName string `env:"CH_USERNAME,default=default"`
LegacyPassword string `env:"CH_PASSWORD,default="`
}
func (cfg *Clickhouse) GetTrimmedURL() string {
chUrl := strings.TrimPrefix(cfg.URL, "tcp://")
chUrl = strings.TrimSuffix(chUrl, "/default")
return chUrl
}
// ElasticSearch config

View file

@ -11,6 +11,7 @@ import (
type Config struct {
common.Config
common.Postgres
common.Clickhouse
redis.Redis
ProjectExpiration time.Duration `env:"PROJECT_EXPIRATION,default=10m"`
LoggerTimeout int `env:"LOG_QUEUE_STATS_INTERVAL_SEC,required"`

View file

@ -0,0 +1,9 @@
package datasaver
import (
"openreplay/backend/pkg/messages"
)
func (s *saverImpl) init() {}
func (s *saverImpl) sendToFTS(msg messages.Message, projID uint32) {}

View file

@ -1,17 +0,0 @@
package datasaver
import (
. "openreplay/backend/pkg/messages"
)
func (s *saverImpl) init() {
// noop
}
func (s *saverImpl) handleExtraMessage(msg Message) error {
switch m := msg.(type) {
case *PerformanceTrackAggr:
return s.pg.InsertWebStatsPerformance(m)
}
return nil
}

View file

@ -0,0 +1,72 @@
package datasaver
import (
"context"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/sessions"
)
func (s *saverImpl) handleMobileMessage(sessCtx context.Context, session *sessions.Session, msg messages.Message) error {
switch m := msg.(type) {
case *messages.MobileSessionEnd:
return s.ch.InsertMobileSession(session)
case *messages.MobileUserID:
if err := s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil {
return err
}
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERIDMOBILE", m.ID)
return nil
case *messages.MobileUserAnonymousID:
if err := s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil {
return err
}
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSIDMOBILE", m.ID)
return nil
case *messages.MobileMetadata:
return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value)
case *messages.MobileEvent:
if err := s.pg.InsertMobileEvent(session, m); err != nil {
return err
}
return s.ch.InsertMobileCustom(session, m)
case *messages.MobileClickEvent:
if err := s.pg.InsertMobileClickEvent(session, m); err != nil {
return err
}
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil {
return err
}
return s.ch.InsertMobileClick(session, m)
case *messages.MobileSwipeEvent:
if err := s.pg.InsertMobileSwipeEvent(session, m); err != nil {
return err
}
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil {
return err
}
return s.ch.InsertMobileSwipe(session, m)
case *messages.MobileInputEvent:
if err := s.pg.InsertMobileInputEvent(session, m); err != nil {
return err
}
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil {
return err
}
return s.ch.InsertMobileInput(session, m)
case *messages.MobileNetworkCall:
if err := s.pg.InsertMobileNetworkCall(session, m); err != nil {
return err
}
return s.ch.InsertMobileRequest(session, m, session.SaveRequestPayload)
case *messages.MobileCrash:
if err := s.pg.InsertMobileCrash(session.SessionID, session.ProjectID, m); err != nil {
return err
}
if err := s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000); err != nil {
return err
}
return s.ch.InsertMobileCrash(session, m)
}
return nil
}

View file

@ -30,11 +30,18 @@ type saverImpl struct {
tags tags.Tags
}
func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, session sessions.Sessions, tags tags.Tags) Saver {
func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, ch clickhouse.Connector, session sessions.Sessions, tags tags.Tags) Saver {
switch {
case pg == nil:
log.Fatal(context.Background(), "pg pool is empty")
case ch == nil:
log.Fatal(context.Background(), "ch pool is empty")
}
s := &saverImpl{
log: log,
cfg: cfg,
pg: pg,
ch: ch,
sessions: session,
tags: tags,
}
@ -43,21 +50,34 @@ func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, session sessions.
}
func (s *saverImpl) Handle(msg Message) {
sessCtx := context.WithValue(context.Background(), "sessionID", msg.SessionID())
if msg.TypeID() == MsgCustomEvent {
defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent)))
}
var (
sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID())
session *sessions.Session
err error
)
if msg.TypeID() == MsgSessionEnd || msg.TypeID() == MsgMobileSessionEnd {
session, err = s.sessions.GetUpdated(msg.SessionID(), true)
} else {
session, err = s.sessions.Get(msg.SessionID())
}
if err != nil || session == nil {
s.log.Error(sessCtx, "error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg)
return
}
if IsMobileType(msg.TypeID()) {
// Handle Mobile messages
if err := s.handleMobileMessage(msg); err != nil {
if err := s.handleMobileMessage(sessCtx, session, msg); err != nil {
if !postgres.IsPkeyViolation(err) {
s.log.Error(sessCtx, "mobile message insertion error, msg: %+v, err: %s", msg, err)
}
return
}
} else {
// Handle Web messages
if err := s.handleMessage(msg); err != nil {
if err := s.handleWebMessage(sessCtx, session, msg); err != nil {
if !postgres.IsPkeyViolation(err) {
s.log.Error(sessCtx, "web message insertion error, msg: %+v, err: %s", msg, err)
}
@ -65,180 +85,22 @@ func (s *saverImpl) Handle(msg Message) {
}
}
if err := s.handleExtraMessage(msg); err != nil {
s.log.Error(sessCtx, "extra message insertion error, msg: %+v, err: %s", msg, err)
}
s.sendToFTS(msg, session.ProjectID)
return
}
func (s *saverImpl) handleMobileMessage(msg Message) error {
session, err := s.sessions.Get(msg.SessionID())
if err != nil {
return err
}
switch m := msg.(type) {
case *MobileUserID:
if err = s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil {
return err
}
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERIDMOBILE", m.ID)
return nil
case *MobileUserAnonymousID:
if err = s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil {
return err
}
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSIDMOBILE", m.ID)
return nil
case *MobileMetadata:
return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value)
case *MobileEvent:
return s.pg.InsertMobileEvent(session, m)
case *MobileClickEvent:
if err := s.pg.InsertMobileClickEvent(session, m); err != nil {
return err
}
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
case *MobileSwipeEvent:
if err := s.pg.InsertMobileSwipeEvent(session, m); err != nil {
return err
}
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
case *MobileInputEvent:
if err := s.pg.InsertMobileInputEvent(session, m); err != nil {
return err
}
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
case *MobileNetworkCall:
return s.pg.InsertMobileNetworkCall(session, m)
case *MobileCrash:
if err := s.pg.InsertMobileCrash(session.SessionID, session.ProjectID, m); err != nil {
return err
}
return s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000)
}
return nil
}
func (s *saverImpl) handleMessage(msg Message) error {
session, err := s.sessions.Get(msg.SessionID())
if err != nil {
return err
}
sessCtx := context.WithValue(context.Background(), "sessionID", msg.SessionID())
switch m := msg.(type) {
case *SessionStart:
return s.pg.HandleStartEvent(m)
case *SessionEnd:
return s.pg.HandleEndEvent(m.SessionID())
case *Metadata:
return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value)
case *IssueEvent:
if m.Type == "dead_click" || m.Type == "click_rage" {
if s.tags.ShouldIgnoreTag(session.ProjectID, m.Context) {
return nil
}
}
err = s.pg.InsertIssueEvent(session, m)
if err != nil {
return err
}
return s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(m.Type))
case *CustomIssue:
ie := &IssueEvent{
Type: "custom",
Timestamp: m.Timestamp,
MessageID: m.Index,
ContextString: m.Name,
Payload: m.Payload,
}
ie.SetMeta(m.Meta())
if err = s.pg.InsertIssueEvent(session, ie); err != nil {
return err
}
return s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(ie.Type))
case *UserID:
if err = s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil {
return err
}
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERID", m.ID)
return nil
case *UserAnonymousID:
if err = s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil {
return err
}
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSID", m.ID)
return nil
case *CustomEvent:
return s.pg.InsertWebCustomEvent(session, m)
case *MouseClick:
if err = s.pg.InsertWebClickEvent(session, m); err != nil {
return err
}
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
case *PageEvent:
if err = s.pg.InsertWebPageEvent(session, m); err != nil {
return err
}
s.sessions.UpdateReferrer(session.SessionID, m.Referrer)
s.sessions.UpdateUTM(session.SessionID, m.URL)
return s.sessions.UpdateEventsStats(session.SessionID, 1, 1)
case *NetworkRequest:
return s.pg.InsertWebNetworkRequest(session, m)
case *GraphQL:
return s.pg.InsertWebGraphQL(session, m)
case *JSException:
wrapper, err := types.WrapJSException(m)
if err != nil {
s.log.Warn(sessCtx, "error on wrapping JSException: %v", err)
}
if err = s.pg.InsertWebErrorEvent(session, wrapper); err != nil {
return err
}
return s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000)
case *IntegrationEvent:
return s.pg.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m))
case *InputChange:
if err = s.pg.InsertInputChangeEvent(session, m); err != nil {
return err
}
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
case *MouseThrashing:
if err = s.pg.InsertMouseThrashing(session, m); err != nil {
return err
}
return s.sessions.UpdateIssuesStats(session.SessionID, 0, 50)
case *CanvasNode:
if err = s.pg.InsertCanvasNode(session, m); err != nil {
return err
}
case *TagTrigger:
if err = s.pg.InsertTagTrigger(session, m); err != nil {
return err
}
}
return nil
}
func (s *saverImpl) Commit() error {
if s.pg != nil {
s.pg.Commit()
}
if s.ch != nil {
s.ch.Commit()
}
s.pg.Commit()
s.ch.Commit()
return nil
}
func (s *saverImpl) Close() error {
if s.pg != nil {
if err := s.pg.Close(); err != nil {
s.log.Error(context.Background(), "pg.Close error: %s", err)
}
if err := s.pg.Close(); err != nil {
s.log.Error(context.Background(), "pg.Close error: %s", err)
}
if s.ch != nil {
if err := s.ch.Stop(); err != nil {
s.log.Error(context.Background(), "ch.Close error: %s", err)
}
if err := s.ch.Stop(); err != nil {
s.log.Error(context.Background(), "ch.Close error: %s", err)
}
return nil
}

View file

@ -0,0 +1,146 @@
package datasaver
import (
"context"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/sessions"
)
func (s *saverImpl) handleWebMessage(sessCtx context.Context, session *sessions.Session, msg messages.Message) error {
switch m := msg.(type) {
case *messages.SessionStart:
return s.pg.HandleStartEvent(m)
case *messages.SessionEnd:
if err := s.pg.HandleEndEvent(m.SessionID()); err != nil {
return err
}
session, err := s.sessions.GetUpdated(m.SessionID(), true)
if err != nil {
return err
}
return s.ch.InsertWebSession(session)
case *messages.Metadata:
return s.sessions.UpdateMetadata(m.SessionID(), m.Key, m.Value)
case *messages.IssueEvent:
if m.Type == "dead_click" || m.Type == "click_rage" {
if s.tags.ShouldIgnoreTag(session.ProjectID, m.Context) {
return nil
}
}
if err := s.pg.InsertIssueEvent(session, m); err != nil {
return err
}
if err := s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(m.Type)); err != nil {
return err
}
return s.ch.InsertIssue(session, m)
case *messages.CustomIssue:
ie := &messages.IssueEvent{
Type: "custom",
Timestamp: m.Timestamp,
MessageID: m.Index,
ContextString: m.Name,
Payload: m.Payload,
}
ie.SetMeta(m.Meta())
if err := s.pg.InsertIssueEvent(session, ie); err != nil {
return err
}
return s.sessions.UpdateIssuesStats(session.SessionID, 0, postgres.GetIssueScore(ie.Type))
case *messages.UserID:
if err := s.sessions.UpdateUserID(session.SessionID, m.ID); err != nil {
return err
}
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERID", m.ID)
return nil
case *messages.UserAnonymousID:
if err := s.sessions.UpdateAnonymousID(session.SessionID, m.ID); err != nil {
return err
}
s.pg.InsertAutocompleteValue(session.SessionID, session.ProjectID, "USERANONYMOUSID", m.ID)
return nil
case *messages.CustomEvent:
if err := s.pg.InsertWebCustomEvent(session, m); err != nil {
return err
}
return s.ch.InsertCustom(session, m)
case *messages.MouseClick:
if err := s.pg.InsertWebClickEvent(session, m); err != nil {
return err
}
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil {
return err
}
return s.ch.InsertWebClickEvent(session, m)
case *messages.PageEvent:
if err := s.pg.InsertWebPageEvent(session, m); err != nil {
return err
}
s.sessions.UpdateReferrer(session.SessionID, m.Referrer)
s.sessions.UpdateUTM(session.SessionID, m.URL)
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 1); err != nil {
return err
}
return s.ch.InsertWebPageEvent(session, m)
case *messages.NetworkRequest:
if err := s.pg.InsertWebNetworkRequest(session, m); err != nil {
return err
}
return s.ch.InsertRequest(session, m, session.SaveRequestPayload)
case *messages.GraphQL:
if err := s.pg.InsertWebGraphQL(session, m); err != nil {
return err
}
return s.ch.InsertGraphQL(session, m)
case *messages.JSException:
wrapper, err := types.WrapJSException(m)
if err != nil {
s.log.Warn(sessCtx, "error on wrapping JSException: %v", err)
}
if err = s.pg.InsertWebErrorEvent(session, wrapper); err != nil {
return err
}
if err := s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000); err != nil {
return err
}
return s.ch.InsertWebErrorEvent(session, wrapper)
case *messages.IntegrationEvent:
if err := s.pg.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m)); err != nil {
return err
}
return s.ch.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m))
case *messages.InputChange:
if err := s.pg.InsertInputChangeEvent(session, m); err != nil {
return err
}
if err := s.sessions.UpdateEventsStats(session.SessionID, 1, 0); err != nil {
return err
}
return s.ch.InsertWebInputDuration(session, m)
case *messages.MouseThrashing:
if err := s.pg.InsertMouseThrashing(session, m); err != nil {
return err
}
if err := s.sessions.UpdateIssuesStats(session.SessionID, 0, 50); err != nil {
return err
}
return s.ch.InsertMouseThrashing(session, m)
case *messages.CanvasNode:
if err := s.pg.InsertCanvasNode(session, m); err != nil {
return err
}
case *messages.TagTrigger:
if err := s.pg.InsertTagTrigger(session, m); err != nil {
return err
}
case *messages.PerformanceTrackAggr:
if err := s.pg.InsertWebStatsPerformance(m); err != nil {
return err
}
return s.ch.InsertWebPerformanceTrackAggr(session, m)
}
return nil
}

View file

@ -0,0 +1,205 @@
package api
import (
"encoding/json"
"fmt"
"github.com/gorilla/mux"
"net/http"
"openreplay/backend/pkg/server/api"
"openreplay/backend/pkg/server/user"
"strconv"
"time"
)
func getId(r *http.Request) (int, error) {
vars := mux.Vars(r)
idStr := vars["id"]
if idStr == "" {
return 0, fmt.Errorf("invalid dashboard ID")
}
id, err := strconv.Atoi(idStr)
if err != nil {
return 0, fmt.Errorf("invalid dashboard ID")
}
return id, nil
}
func (e *handlersImpl) createDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize)
return
}
bodySize = len(bodyBytes)
req := &CreateDashboardRequest{}
if err := json.Unmarshal(bodyBytes, req); err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
resp := &GetDashboardResponse{
Dashboard: Dashboard{
DashboardID: 1,
Name: req.Name,
Description: req.Description,
IsPublic: req.IsPublic,
IsPinned: req.IsPinned,
},
}
currentUser := r.Context().Value("userData").(*user.User)
e.log.Info(r.Context(), "User ID: ", currentUser.ID)
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
}
// getDashboards
func (e *handlersImpl) getDashboards(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
//id, err := getId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
resp := &GetDashboardsResponse{
Dashboards: []Dashboard{
{
DashboardID: 1,
Name: "Dashboard",
Description: "Description",
IsPublic: true,
IsPinned: false,
},
},
Total: 1,
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
}
func (e *handlersImpl) getDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
id, err := getId(r)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
resp := &GetDashboardResponse{
Dashboard: Dashboard{
DashboardID: id,
Name: "Dashboard",
Description: "Description",
IsPublic: true,
IsPinned: false,
},
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
}
func (e *handlersImpl) updateDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
//id, err := getId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit)
if err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize)
return
}
bodySize = len(bodyBytes)
req := &UpdateDashboardRequest{}
if err := json.Unmarshal(bodyBytes, req); err != nil {
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
return
}
resp := &GetDashboardResponse{
Dashboard: Dashboard{
DashboardID: 1,
Name: req.Name,
Description: req.Description,
IsPublic: req.IsPublic,
IsPinned: req.IsPinned,
},
}
e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize)
}
func (e *handlersImpl) deleteDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
//id, err := getId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
e.log.Info(r.Context(), "Dashboard deleted")
e.responser.ResponseOK(e.log, r.Context(), w, startTime, r.URL.Path, bodySize)
}
func (e *handlersImpl) pinDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
//id, err := getId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
e.log.Info(r.Context(), "Dashboard pinned")
e.responser.ResponseOK(e.log, r.Context(), w, startTime, r.URL.Path, bodySize)
}
// add card to dashboard
func (e *handlersImpl) addCardToDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
//id, err := getId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
e.log.Info(r.Context(), "Card added to dashboard")
e.responser.ResponseOK(e.log, r.Context(), w, startTime, r.URL.Path, bodySize)
}
// remove card from dashboard
func (e *handlersImpl) removeCardFromDashboard(w http.ResponseWriter, r *http.Request) {
startTime := time.Now()
bodySize := 0
//id, err := getId(r)
//if err != nil {
// e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize)
// return
//}
e.responser.ResponseOK(e.log, r.Context(), w, startTime, r.URL.Path, bodySize)
}

View file

@ -0,0 +1,40 @@
package api
import (
config "openreplay/backend/internal/config/analytics"
"openreplay/backend/pkg/analytics/service"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/objectstorage"
"openreplay/backend/pkg/server/api"
"openreplay/backend/pkg/server/keys"
)
type handlersImpl struct {
log logger.Logger
responser *api.Responser
objStorage objectstorage.ObjectStorage
jsonSizeLimit int64
keys keys.Keys
service service.Service
}
func (e *handlersImpl) GetAll() []*api.Description {
return []*api.Description{
{"/v1/analytics/{projectId}/dashboards", e.createDashboard, "POST"},
{"/v1/analytics/{projectId}/dashboards", e.getDashboards, "GET"},
{"/v1/analytics/{projectId}/dashboards/{id}", e.getDashboard, "GET"},
{"/v1/analytics/{projectId}/dashboards/{id}", e.updateDashboard, "PUT"},
{"/v1/analytics/{projectId}/dashboards/{id}", e.deleteDashboard, "DELETE"},
}
}
func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, objStore objectstorage.ObjectStorage, keys keys.Keys, service service.Service) (api.Handlers, error) {
return &handlersImpl{
log: log,
responser: responser,
objStorage: objStore,
jsonSizeLimit: cfg.JsonSizeLimit,
keys: keys,
service: service,
}, nil
}

View file

@ -0,0 +1,60 @@
package api
type Dashboard struct {
DashboardID int `json:"dashboard_id"`
Name string `json:"name"`
Description string `json:"description"`
IsPublic bool `json:"is_public"`
IsPinned bool `json:"is_pinned"`
}
type CreateDashboardResponse struct {
DashboardID int `json:"dashboard_id"`
}
type GetDashboardResponse struct {
Dashboard
}
type GetDashboardsResponse struct {
Dashboards []Dashboard `json:"dashboards"`
Total uint64 `json:"total"`
}
// REQUESTS
type CreateDashboardRequest struct {
Name string `json:"name"`
Description string `json:"description"`
IsPublic bool `json:"is_public"`
IsPinned bool `json:"is_pinned"`
Metrics []int `json:"metrics"`
}
type GetDashboardsRequest struct {
Page uint64 `json:"page"`
Limit uint64 `json:"limit"`
Order string `json:"order"`
Query string `json:"query"`
FilterBy string `json:"filterBy"`
}
type UpdateDashboardRequest struct {
Name string `json:"name"`
Description string `json:"description"`
IsPublic bool `json:"is_public"`
IsPinned bool `json:"is_pinned"`
Metrics []int `json:"metrics"`
}
type PinDashboardRequest struct {
IsPinned bool `json:"is_pinned"`
}
type AddCardToDashboardRequest struct {
CardIDs []int `json:"card_ids"`
}
type DeleteCardFromDashboardRequest struct {
CardIDs []int `json:"card_ids"`
}

View file

@ -0,0 +1,57 @@
package analytics
import (
"openreplay/backend/pkg/metrics/web"
"openreplay/backend/pkg/server/tracer"
"time"
"openreplay/backend/internal/config/analytics"
analyticsAPI "openreplay/backend/pkg/analytics/api"
"openreplay/backend/pkg/analytics/service"
"openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/objectstorage/store"
"openreplay/backend/pkg/server/api"
"openreplay/backend/pkg/server/auth"
"openreplay/backend/pkg/server/keys"
"openreplay/backend/pkg/server/limiter"
)
type ServicesBuilder struct {
Auth auth.Auth
RateLimiter *limiter.UserRateLimiter
AuditTrail tracer.Tracer
AnalyticsAPI api.Handlers
}
func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.Web, pgconn pool.Pool) (*ServicesBuilder, error) {
objStore, err := store.NewStore(&cfg.ObjectsConfig)
if err != nil {
return nil, err
}
newKeys := keys.NewKeys(log, pgconn)
responser := api.NewResponser(webMetrics)
audiTrail, err := tracer.NewTracer(log, pgconn)
if err != nil {
return nil, err
}
analyticsService, err := service.NewService(log, pgconn, objStore)
if err != nil {
return nil, err
}
handlers, err := analyticsAPI.NewHandlers(log, cfg, responser, objStore, keys.NewKeys(log, pgconn), analyticsService)
if err != nil {
return nil, err
}
return &ServicesBuilder{
Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, newKeys),
RateLimiter: limiter.NewUserRateLimiter(10, 30, 1*time.Minute, 5*time.Minute),
AuditTrail: audiTrail,
AnalyticsAPI: handlers,
}, nil
}

View file

@ -0,0 +1,34 @@
package service
import (
"errors"
"openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/objectstorage"
)
type Service interface {
}
type serviceImpl struct {
log logger.Logger
conn pool.Pool
storage objectstorage.ObjectStorage
}
func NewService(log logger.Logger, conn pool.Pool, storage objectstorage.ObjectStorage) (Service, error) {
switch {
case log == nil:
return nil, errors.New("logger is empty")
case conn == nil:
return nil, errors.New("connection pool is empty")
case storage == nil:
return nil, errors.New("object storage is empty")
}
return &serviceImpl{
log: log,
conn: conn,
storage: storage,
}, nil
}

View file

@ -5,10 +5,11 @@ import (
"errors"
"fmt"
"log"
"openreplay/backend/pkg/metrics/database"
"time"
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
"openreplay/backend/pkg/metrics/database"
)
type Bulk interface {

View file

@ -1,19 +1,31 @@
package clickhouse
import (
"errors"
"fmt"
"log"
"strings"
"time"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
"openreplay/backend/internal/config/common"
"openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/hashid"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/sessions"
"openreplay/backend/pkg/url"
)
type Connector interface {
Prepare() error
Commit() error
Stop() error
// Web
InsertWebSession(session *sessions.Session) error
InsertWebPageEvent(session *sessions.Session, msg *messages.PageEvent) error
InsertWebClickEvent(session *sessions.Session, msg *messages.MouseClick) error
InsertWebInputEvent(session *sessions.Session, msg *messages.InputEvent) error
InsertWebErrorEvent(session *sessions.Session, msg *types.ErrorEvent) error
InsertWebPerformanceTrackAggr(session *sessions.Session, msg *messages.PerformanceTrackAggr) error
InsertAutocomplete(session *sessions.Session, msgType, msgValue string) error
@ -21,4 +33,669 @@ type Connector interface {
InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error
InsertGraphQL(session *sessions.Session, msg *messages.GraphQL) error
InsertIssue(session *sessions.Session, msg *messages.IssueEvent) error
InsertWebInputDuration(session *sessions.Session, msg *messages.InputChange) error
InsertMouseThrashing(session *sessions.Session, msg *messages.MouseThrashing) error
// Mobile
InsertMobileSession(session *sessions.Session) error
InsertMobileCustom(session *sessions.Session, msg *messages.MobileEvent) error
InsertMobileClick(session *sessions.Session, msg *messages.MobileClickEvent) error
InsertMobileSwipe(session *sessions.Session, msg *messages.MobileSwipeEvent) error
InsertMobileInput(session *sessions.Session, msg *messages.MobileInputEvent) error
InsertMobileRequest(session *sessions.Session, msg *messages.MobileNetworkCall, savePayload bool) error
InsertMobileCrash(session *sessions.Session, msg *messages.MobileCrash) error
}
type task struct {
bulks []Bulk
}
func NewTask() *task {
return &task{bulks: make([]Bulk, 0, 21)}
}
type connectorImpl struct {
conn driver.Conn
batches map[string]Bulk //driver.Batch
workerTask chan *task
done chan struct{}
finished chan struct{}
}
func NewConnector(cfg common.Clickhouse) Connector {
conn, err := clickhouse.Open(&clickhouse.Options{
Addr: []string{cfg.GetTrimmedURL()},
Auth: clickhouse.Auth{
Database: cfg.Database,
Username: cfg.LegacyUserName,
Password: cfg.LegacyPassword,
},
MaxOpenConns: 20,
MaxIdleConns: 15,
ConnMaxLifetime: 3 * time.Minute,
Compression: &clickhouse.Compression{
Method: clickhouse.CompressionLZ4,
},
})
if err != nil {
log.Fatal(err)
}
c := &connectorImpl{
conn: conn,
batches: make(map[string]Bulk, 20),
workerTask: make(chan *task, 1),
done: make(chan struct{}),
finished: make(chan struct{}),
}
go c.worker()
return c
}
func (c *connectorImpl) newBatch(name, query string) error {
batch, err := NewBulk(c.conn, name, query)
if err != nil {
return fmt.Errorf("can't create new batch: %s", err)
}
c.batches[name] = batch
return nil
}
var batches = map[string]string{
// Web
"sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, timezone, utm_source, utm_medium, utm_campaign) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?, ?, ?)",
"autocompletes": "INSERT INTO experimental.autocomplete (project_id, type, value) VALUES (?, ?, SUBSTR(?, 1, 8000))",
"pages": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive, url_path, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?)",
"clicks": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, hesitation_time, event_type, selector, normalized_x, normalized_y, url, url_path) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000))",
"inputs": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, event_type, duration, hesitation_time) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
"errors": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, source, name, message, error_id, event_type, error_tags_keys, error_tags_values) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"performance": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"requests": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type, transfer_size, url_path) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000))",
"custom": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
"graphql": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, request_body, response_body, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
"issuesEvents": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, issue_id, issue_type, event_type, url, url_path) VALUES (?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000))",
"issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)",
//Mobile
"ios_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)",
"ios_custom": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
"ios_clicks": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)",
"ios_swipes": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, direction, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
"ios_inputs": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)",
"ios_requests": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?)",
"ios_crashes": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, name, reason, stacktrace, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
}
func (c *connectorImpl) Prepare() error {
for table, query := range batches {
if err := c.newBatch(table, query); err != nil {
return fmt.Errorf("can't create %s batch: %s", table, err)
}
}
return nil
}
func (c *connectorImpl) Commit() error {
newTask := NewTask()
for _, b := range c.batches {
newTask.bulks = append(newTask.bulks, b)
}
c.batches = make(map[string]Bulk, 20)
if err := c.Prepare(); err != nil {
log.Printf("can't prepare new CH batch set: %s", err)
}
c.workerTask <- newTask
return nil
}
func (c *connectorImpl) Stop() error {
c.done <- struct{}{}
<-c.finished
return c.conn.Close()
}
func (c *connectorImpl) sendBulks(t *task) {
for _, b := range t.bulks {
if err := b.Send(); err != nil {
log.Printf("can't send batch: %s", err)
}
}
}
func (c *connectorImpl) worker() {
for {
select {
case t := <-c.workerTask:
c.sendBulks(t)
case <-c.done:
for t := range c.workerTask {
c.sendBulks(t)
}
c.finished <- struct{}{}
return
}
}
}
func (c *connectorImpl) checkError(name string, err error) {
if err != clickhouse.ErrBatchAlreadySent {
log.Printf("can't create %s batch after failed append operation: %s", name, err)
}
}
func (c *connectorImpl) InsertWebInputDuration(session *sessions.Session, msg *messages.InputChange) error {
if msg.Label == "" {
return nil
}
if err := c.batches["inputs"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Label,
"INPUT",
nullableUint16(uint16(msg.InputDuration)),
nullableUint32(uint32(msg.HesitationTime)),
); err != nil {
c.checkError("inputs", err)
return fmt.Errorf("can't append to inputs batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *messages.MouseThrashing) error {
issueID := hashid.MouseThrashingID(session.ProjectID, session.SessionID, msg.Timestamp)
// Insert issue event to batches
if err := c.batches["issuesEvents"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
issueID,
"mouse_thrashing",
"ISSUE",
msg.Url,
extractUrlPath(msg.Url),
); err != nil {
c.checkError("issuesEvents", err)
return fmt.Errorf("can't append to issuesEvents batch: %s", err)
}
if err := c.batches["issues"].Append(
uint16(session.ProjectID),
issueID,
"mouse_thrashing",
msg.Url,
); err != nil {
c.checkError("issues", err)
return fmt.Errorf("can't append to issues batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.IssueEvent) error {
issueID := hashid.IssueID(session.ProjectID, msg)
// Check issue type before insert to avoid panic from clickhouse lib
switch msg.Type {
case "click_rage", "dead_click", "excessive_scrolling", "bad_request", "missing_resource", "memory", "cpu", "slow_resource", "slow_page_load", "crash", "ml_cpu", "ml_memory", "ml_dead_click", "ml_click_rage", "ml_mouse_thrashing", "ml_excessive_scrolling", "ml_slow_resources", "custom", "js_exception", "mouse_thrashing", "app_crash":
default:
return fmt.Errorf("unknown issueType: %s", msg.Type)
}
// Insert issue event to batches
if err := c.batches["issuesEvents"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MessageID,
datetime(msg.Timestamp),
issueID,
msg.Type,
"ISSUE",
msg.URL,
extractUrlPath(msg.URL),
); err != nil {
c.checkError("issuesEvents", err)
return fmt.Errorf("can't append to issuesEvents batch: %s", err)
}
if err := c.batches["issues"].Append(
uint16(session.ProjectID),
issueID,
msg.Type,
msg.ContextString,
); err != nil {
c.checkError("issues", err)
return fmt.Errorf("can't append to issues batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertWebSession(session *sessions.Session) error {
if session.Duration == nil {
return errors.New("trying to insert session with nil duration")
}
if err := c.batches["sessions"].Append(
session.SessionID,
uint16(session.ProjectID),
session.UserID,
session.UserUUID,
session.UserOS,
nullableString(session.UserOSVersion),
nullableString(session.UserDevice),
session.UserDeviceType,
session.UserCountry,
session.UserState,
session.UserCity,
datetime(session.Timestamp),
uint32(*session.Duration),
uint16(session.PagesCount),
uint16(session.EventsCount),
uint16(session.ErrorsCount),
uint32(session.IssueScore),
session.Referrer,
session.IssueTypes,
session.TrackerVersion,
session.UserBrowser,
nullableString(session.UserBrowserVersion),
session.Metadata1,
session.Metadata2,
session.Metadata3,
session.Metadata4,
session.Metadata5,
session.Metadata6,
session.Metadata7,
session.Metadata8,
session.Metadata9,
session.Metadata10,
session.Timezone,
session.UtmSource,
session.UtmMedium,
session.UtmCampaign,
); err != nil {
c.checkError("sessions", err)
return fmt.Errorf("can't append to sessions batch: %s", err)
}
return nil
}
func extractUrlPath(fullUrl string) string {
_, path, query, err := url.GetURLParts(fullUrl)
if err != nil {
log.Printf("can't parse url: %s", err)
return ""
}
pathQuery := path
if query != "" {
pathQuery += "?" + query
}
return strings.ToLower(pathQuery)
}
func (c *connectorImpl) InsertWebPageEvent(session *sessions.Session, msg *messages.PageEvent) error {
if err := c.batches["pages"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MessageID,
datetime(msg.Timestamp),
msg.URL,
nullableUint16(uint16(msg.RequestStart)),
nullableUint16(uint16(msg.ResponseStart)),
nullableUint16(uint16(msg.ResponseEnd)),
nullableUint16(uint16(msg.DomContentLoadedEventStart)),
nullableUint16(uint16(msg.DomContentLoadedEventEnd)),
nullableUint16(uint16(msg.LoadEventStart)),
nullableUint16(uint16(msg.LoadEventEnd)),
nullableUint16(uint16(msg.FirstPaint)),
nullableUint16(uint16(msg.FirstContentfulPaint)),
nullableUint16(uint16(msg.SpeedIndex)),
nullableUint16(uint16(msg.VisuallyComplete)),
nullableUint16(uint16(msg.TimeToInteractive)),
extractUrlPath(msg.URL),
"LOCATION",
); err != nil {
c.checkError("pages", err)
return fmt.Errorf("can't append to pages batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertWebClickEvent(session *sessions.Session, msg *messages.MouseClick) error {
if msg.Label == "" {
return nil
}
var nX *float32 = nil
var nY *float32 = nil
if msg.NormalizedX != 101 && msg.NormalizedY != 101 {
// To support previous versions of tracker
if msg.NormalizedX <= 100 && msg.NormalizedY <= 100 {
msg.NormalizedX *= 100
msg.NormalizedY *= 100
}
normalizedX := float32(msg.NormalizedX) / 100.0
normalizedY := float32(msg.NormalizedY) / 100.0
nXVal := normalizedX
nX = &nXVal
nYVal := normalizedY
nY = &nYVal
}
if err := c.batches["clicks"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Label,
nullableUint32(uint32(msg.HesitationTime)),
"CLICK",
msg.Selector,
nX,
nY,
msg.Url,
extractUrlPath(msg.Url),
); err != nil {
c.checkError("clicks", err)
return fmt.Errorf("can't append to clicks batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *types.ErrorEvent) error {
keys, values := make([]string, 0, len(msg.Tags)), make([]*string, 0, len(msg.Tags))
for k, v := range msg.Tags {
keys = append(keys, k)
values = append(values, v)
}
// Check error source before insert to avoid panic from clickhouse lib
switch msg.Source {
case "js_exception", "bugsnag", "cloudwatch", "datadog", "elasticsearch", "newrelic", "rollbar", "sentry", "stackdriver", "sumologic":
default:
return fmt.Errorf("unknown error source: %s", msg.Source)
}
msgID, _ := msg.ID(session.ProjectID)
// Insert event to batch
if err := c.batches["errors"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MessageID,
datetime(msg.Timestamp),
msg.Source,
nullableString(msg.Name),
msg.Message,
msgID,
"ERROR",
keys,
values,
); err != nil {
c.checkError("errors", err)
return fmt.Errorf("can't append to errors batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertWebPerformanceTrackAggr(session *sessions.Session, msg *messages.PerformanceTrackAggr) error {
var timestamp uint64 = (msg.TimestampStart + msg.TimestampEnd) / 2
if err := c.batches["performance"].Append(
session.SessionID,
uint16(session.ProjectID),
uint64(0), // TODO: find messageID for performance events
datetime(timestamp),
nullableString(msg.Meta().Url),
uint8(msg.MinFPS),
uint8(msg.AvgFPS),
uint8(msg.MaxFPS),
uint8(msg.MinCPU),
uint8(msg.AvgCPU),
uint8(msg.MaxCPU),
msg.MinTotalJSHeapSize,
msg.AvgTotalJSHeapSize,
msg.MaxTotalJSHeapSize,
msg.MinUsedJSHeapSize,
msg.AvgUsedJSHeapSize,
msg.MaxUsedJSHeapSize,
"PERFORMANCE",
); err != nil {
c.checkError("performance", err)
return fmt.Errorf("can't append to performance batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertAutocomplete(session *sessions.Session, msgType, msgValue string) error {
if len(msgValue) == 0 {
return nil
}
if err := c.batches["autocompletes"].Append(
uint16(session.ProjectID),
msgType,
msgValue,
); err != nil {
c.checkError("autocompletes", err)
return fmt.Errorf("can't append to autocompletes batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.NetworkRequest, savePayload bool) error {
urlMethod := url.EnsureMethod(msg.Method)
if urlMethod == "" {
return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method)
}
var request, response *string
if savePayload {
request = &msg.Request
response = &msg.Response
}
if err := c.batches["requests"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.Meta().Index,
datetime(uint64(msg.Meta().Timestamp)),
msg.URL,
request,
response,
uint16(msg.Status),
url.EnsureMethod(msg.Method),
uint16(msg.Duration),
msg.Status < 400,
"REQUEST",
uint32(msg.TransferredBodySize),
extractUrlPath(msg.URL),
); err != nil {
c.checkError("requests", err)
return fmt.Errorf("can't append to requests batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error {
if err := c.batches["custom"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.Meta().Index,
datetime(uint64(msg.Meta().Timestamp)),
msg.Name,
msg.Payload,
"CUSTOM",
); err != nil {
c.checkError("custom", err)
return fmt.Errorf("can't append to custom batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertGraphQL(session *sessions.Session, msg *messages.GraphQL) error {
if err := c.batches["graphql"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.Meta().Index,
datetime(uint64(msg.Meta().Timestamp)),
msg.OperationName,
nullableString(msg.Variables),
nullableString(msg.Response),
"GRAPHQL",
); err != nil {
c.checkError("graphql", err)
return fmt.Errorf("can't append to graphql batch: %s", err)
}
return nil
}
// Mobile events
func (c *connectorImpl) InsertMobileSession(session *sessions.Session) error {
if session.Duration == nil {
return errors.New("trying to insert mobile session with nil duration")
}
if err := c.batches["ios_sessions"].Append(
session.SessionID,
uint16(session.ProjectID),
session.UserID,
session.UserUUID,
session.UserOS,
nullableString(session.UserOSVersion),
nullableString(session.UserDevice),
session.UserDeviceType,
session.UserCountry,
session.UserState,
session.UserCity,
datetime(session.Timestamp),
uint32(*session.Duration),
uint16(session.PagesCount),
uint16(session.EventsCount),
uint16(session.ErrorsCount),
uint32(session.IssueScore),
session.Referrer,
session.IssueTypes,
session.TrackerVersion,
session.UserBrowser,
nullableString(session.UserBrowserVersion),
session.Metadata1,
session.Metadata2,
session.Metadata3,
session.Metadata4,
session.Metadata5,
session.Metadata6,
session.Metadata7,
session.Metadata8,
session.Metadata9,
session.Metadata10,
"ios",
session.Timezone,
); err != nil {
c.checkError("ios_sessions", err)
return fmt.Errorf("can't append to sessions batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileCustom(session *sessions.Session, msg *messages.MobileEvent) error {
if err := c.batches["ios_custom"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.Meta().Index,
datetime(uint64(msg.Meta().Timestamp)),
msg.Name,
msg.Payload,
"CUSTOM",
); err != nil {
c.checkError("ios_custom", err)
return fmt.Errorf("can't append to mobile custom batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileClick(session *sessions.Session, msg *messages.MobileClickEvent) error {
if msg.Label == "" {
return nil
}
if err := c.batches["ios_clicks"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Label,
"TAP",
); err != nil {
c.checkError("ios_clicks", err)
return fmt.Errorf("can't append to mobile clicks batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileSwipe(session *sessions.Session, msg *messages.MobileSwipeEvent) error {
if msg.Label == "" {
return nil
}
if err := c.batches["ios_swipes"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Label,
nullableString(msg.Direction),
"SWIPE",
); err != nil {
c.checkError("ios_clicks", err)
return fmt.Errorf("can't append to mobile clicks batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileInput(session *sessions.Session, msg *messages.MobileInputEvent) error {
if msg.Label == "" {
return nil
}
if err := c.batches["ios_inputs"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Label,
"INPUT",
); err != nil {
c.checkError("ios_inputs", err)
return fmt.Errorf("can't append to mobile inputs batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileRequest(session *sessions.Session, msg *messages.MobileNetworkCall, savePayload bool) error {
urlMethod := url.EnsureMethod(msg.Method)
if urlMethod == "" {
return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method)
}
var request, response *string
if savePayload {
request = &msg.Request
response = &msg.Response
}
if err := c.batches["ios_requests"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.Meta().Index,
datetime(uint64(msg.Meta().Timestamp)),
msg.URL,
request,
response,
uint16(msg.Status),
url.EnsureMethod(msg.Method),
uint16(msg.Duration),
msg.Status < 400,
"REQUEST",
); err != nil {
c.checkError("ios_requests", err)
return fmt.Errorf("can't append to mobile requests batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileCrash(session *sessions.Session, msg *messages.MobileCrash) error {
if err := c.batches["ios_crashes"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Name,
msg.Reason,
msg.Stacktrace,
"CRASH",
); err != nil {
c.checkError("ios_crashes", err)
return fmt.Errorf("can't append to mobile crashges batch: %s", err)
}
return nil
}

View file

@ -19,20 +19,17 @@ type Conn struct {
Pool pool.Pool
batches *batch.BatchSet
bulks *BulkSet
chConn CH // hack for autocomplete inserts, TODO: rewrite
chConn CH
}
func (conn *Conn) SetClickHouse(ch CH) {
conn.chConn = ch
}
func NewConn(log logger.Logger, pool pool.Pool) *Conn {
func NewConn(log logger.Logger, pool pool.Pool, ch CH) *Conn {
if pool == nil {
log.Fatal(context.Background(), "pg pool is empty")
}
return &Conn{
log: log,
Pool: pool,
chConn: ch,
bulks: NewBulkSet(log, pool),
batches: batch.NewBatchSet(log, pool),
}

View file

@ -0,0 +1,22 @@
package analytics
import (
"github.com/prometheus/client_golang/prometheus"
"openreplay/backend/pkg/metrics/common"
)
var cardCreated = prometheus.NewHistogram(
prometheus.HistogramOpts{
Namespace: "card",
Name: "created",
Help: "Histogram for tracking card creation",
Buckets: common.DefaultBuckets,
},
)
func List() []prometheus.Collector {
return []prometheus.Collector{
cardCreated,
}
}

View file

@ -16,7 +16,7 @@ type Sessions interface {
AddUnStarted(session *UnStartedSession) error
AddCached(sessionID uint64, data map[string]string) error
Get(sessionID uint64) (*Session, error)
GetUpdated(sessionID uint64) (*Session, error)
GetUpdated(sessionID uint64, keepInCache bool) (*Session, error)
GetCached(sessionID uint64) (map[string]string, error)
GetDuration(sessionID uint64) (uint64, error)
UpdateDuration(sessionID uint64, timestamp uint64) (uint64, error)
@ -104,11 +104,14 @@ func (s *sessionsImpl) Get(sessionID uint64) (*Session, error) {
}
// Special method for clickhouse connector
func (s *sessionsImpl) GetUpdated(sessionID uint64) (*Session, error) {
func (s *sessionsImpl) GetUpdated(sessionID uint64, keepInCache bool) (*Session, error) {
session, err := s.getFromDB(sessionID)
if err != nil {
return nil, err
}
if !keepInCache {
return session, nil
}
if err := s.cache.Set(session); err != nil {
ctx := context.WithValue(context.Background(), "sessionID", sessionID)
s.log.Warn(ctx, "failed to cache session: %s", err)

View file

@ -1,19 +1,19 @@
package spot
import (
"openreplay/backend/pkg/metrics/web"
"openreplay/backend/pkg/server/tracer"
"time"
"openreplay/backend/internal/config/spot"
"openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/flakeid"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/metrics/web"
"openreplay/backend/pkg/objectstorage/store"
"openreplay/backend/pkg/server/api"
"openreplay/backend/pkg/server/auth"
"openreplay/backend/pkg/server/keys"
"openreplay/backend/pkg/server/limiter"
"openreplay/backend/pkg/server/tracer"
spotAPI "openreplay/backend/pkg/spot/api"
"openreplay/backend/pkg/spot/service"
"openreplay/backend/pkg/spot/transcoder"

6
ee/api/.gitignore vendored
View file

@ -184,7 +184,6 @@ Pipfile.lock
/build.sh
/build_alerts.sh
/build_crons.sh
/chalicelib/core/alerts.py
/chalicelib/core/announcements.py
/chalicelib/core/assist.py
/chalicelib/core/authorizers.py
@ -286,3 +285,8 @@ Pipfile.lock
/chalicelib/utils/ch_client.py
/chalicelib/utils/ch_client_exp.py
/routers/subs/product_anaytics.py
/chalicelib/core/alerts/__init__.py
/chalicelib/core/alerts/alerts.py
/chalicelib/core/alerts/alerts_processor.py
/chalicelib/core/alerts/alerts_processor_ch.py
/chalicelib/core/sessions_ch.py

View file

@ -11,7 +11,7 @@ from . import metrics as metrics_legacy
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
logging.info(">>> Using experimental sessions search")
from . import sessions_exp as sessions
from . import sessions_ch as sessions
else:
from . import sessions as sessions
@ -34,12 +34,6 @@ else:
if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
logging.info(">>> Using experimental sessions search for metrics")
if config("EXP_ALERTS", cast=bool, default=False):
logging.info(">>> Using experimental alerts")
from . import alerts_processor_exp as alerts_processor
else:
from . import alerts_processor as alerts_processor
if config("EXP_FUNNELS", cast=bool, default=False):
logging.info(">>> Using experimental funnels")
if not config("EXP_SESSIONS_SEARCH", cast=bool, default=False):

View file

@ -0,0 +1,12 @@
from decouple import config
if config("EXP_ALERTS", cast=bool, default=False):
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from chalicelib.core.sessions import *
else:
from chalicelib.core.sessions_legacy import *
else:
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from chalicelib.core.sessions_legacy import *
else:
from chalicelib.core.sessions import *

View file

@ -1,242 +0,0 @@
import decimal
import logging
from decouple import config
from pydantic_core._pydantic_core import ValidationError
import schemas
from chalicelib.core import alerts
from chalicelib.core import alerts_listener
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from chalicelib.core import sessions_legacy as sessions
else:
from chalicelib.core import sessions
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
LeftToDb = {
schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"},
schemas.AlertColumn.PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"},
schemas.AlertColumn.PERFORMANCE__PAGE_LOAD_TIME__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"},
schemas.AlertColumn.PERFORMANCE__DOM_BUILD_TIME__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(dom_building_time,0))"},
schemas.AlertColumn.PERFORMANCE__SPEED_INDEX__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"},
schemas.AlertColumn.PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(response_time,0))"},
schemas.AlertColumn.PERFORMANCE__TTFB__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(first_paint_time,0))"},
schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(visually_complete,0))"},
schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: {
"table": "public.sessions",
"formula": "COUNT(DISTINCT session_id)",
"condition": "errors_count > 0 AND duration>0"},
schemas.AlertColumn.ERRORS__JAVASCRIPT__COUNT: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False},
schemas.AlertColumn.ERRORS__BACKEND__COUNT: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False},
}
# This is the frequency of execution for each threshold
TimeInterval = {
15: 3,
30: 5,
60: 10,
120: 20,
240: 30,
1440: 60,
}
def can_check(a) -> bool:
now = TimeUTC.now()
repetitionBase = a["options"]["currentPeriod"] \
if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \
and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
else a["options"]["previousPeriod"]
if TimeInterval.get(repetitionBase) is None:
logging.error(f"repetitionBase: {repetitionBase} NOT FOUND")
return False
return (a["options"]["renotifyInterval"] <= 0 or
a["options"].get("lastNotification") is None or
a["options"]["lastNotification"] <= 0 or
((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
def Build(a):
now = TimeUTC.now()
params = {"project_id": a["projectId"], "now": now}
full_args = {}
j_s = True
main_table = ""
if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = schemas.SortOrderType.DESC
a["filter"]["startDate"] = 0
a["filter"]["endDate"] = TimeUTC.now()
try:
data = schemas.SessionsSearchPayloadSchema.model_validate(a["filter"])
except ValidationError:
logging.warning("Validation error for:")
logging.warning(a["filter"])
raise
full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False,
issue=None, project_id=a["projectId"], user_id=None,
favorite_only=False)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:
colDef = LeftToDb[a["query"]["left"]]
subQ = f"""SELECT {colDef["formula"]} AS value
FROM {colDef["table"]}
WHERE project_id = %(project_id)s
{"AND " + colDef["condition"] if colDef.get("condition") else ""}"""
j_s = colDef.get("joinSessions", True)
main_table = colDef["table"]
is_ss = main_table == "public.sessions"
q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
if a["detectionMethod"] == schemas.AlertDetectionMethod.THRESHOLD:
if a["seriesId"] is not None:
q += f""" FROM ({subQ}) AS stat"""
else:
q += f""" FROM ({subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""}
{"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}) AS stat"""
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
else:
if a["change"] == schemas.AlertDetectionType.CHANGE:
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})-({sub2})) AS value"
q += f" FROM ( {sub1} ) AS stat"
params = {**params, **full_args,
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
"timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000}
else:
sub1 = f"""{subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""}
{"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} {"AND timestamp < %(startDate)s AND timestamp >= %(timestamp_sub2)s" if not is_ss else ""}
{"AND start_ts < %(startDate)s AND start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
params["timestamp_sub2"] = TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000
sub1 = f"SELECT (( {sub1} )-( {sub2} )) AS value"
q += f" FROM ( {sub1} ) AS stat"
else:
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})/NULLIF(({sub2}),0)-1)*100 AS value"
q += f" FROM ({sub1}) AS stat"
params = {**params, **full_args,
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
"timestamp_sub2": TimeUTC.now() \
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) \
* 60 * 1000}
else:
sub1 = f"""{subQ} {"AND timestamp >= %(startDate)s AND timestamp <= %(now)s" if not is_ss else ""}
{"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} {"AND timestamp < %(startDate)s AND timestamp >= %(timestamp_sub2)s" if not is_ss else ""}
{"AND start_ts < %(startDate)s AND start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
params["timestamp_sub2"] = TimeUTC.now() \
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) * 60 * 1000
sub1 = f"SELECT (({sub1})/NULLIF(({sub2}),0)-1)*100 AS value"
q += f" FROM ({sub1}) AS stat"
return q, params
def process():
notifications = []
all_alerts = alerts_listener.get_all_alerts()
with pg_client.PostgresClient() as cur:
for alert in all_alerts:
if can_check(alert):
query, params = Build(alert)
try:
query = cur.mogrify(query, params)
except Exception as e:
logging.error(
f"!!!Error while building alert query for alertId:{alert['alertId']} name: {alert['name']}")
logging.error(e)
continue
logging.debug(alert)
logging.debug(query)
try:
cur.execute(query)
result = cur.fetchone()
if result["valid"]:
logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}")
notifications.append(generate_notification(alert, result))
except Exception as e:
logging.error(
f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}")
logging.error(query)
logging.error(e)
cur = cur.recreate(rollback=True)
if len(notifications) > 0:
cur.execute(
cur.mogrify(f"""UPDATE public.alerts
SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
if len(notifications) > 0:
alerts.process_notifications(notifications)
def __format_value(x):
if x % 1 == 0:
x = int(x)
else:
x = round(x, 2)
return f"{x:,}"
def generate_notification(alert, result):
left = __format_value(result['value'])
right = __format_value(alert['query']['right'])
return {
"alertId": alert["alertId"],
"tenantId": alert["tenantId"],
"title": alert["name"],
"description": f"{alert['seriesName']} = {left} ({alert['query']['operator']} {right}).",
"buttonText": "Check metrics for more details",
"buttonUrl": f"/{alert['projectId']}/metrics",
"imageUrl": None,
"projectId": alert["projectId"],
"projectName": alert["projectName"],
"options": {"source": "ALERT", "sourceId": alert["alertId"],
"sourceMeta": alert["detectionMethod"],
"message": alert["options"]["message"], "projectId": alert["projectId"],
"data": {"title": alert["name"],
"limitValue": alert["query"]["right"],
"actualValue": float(result["value"]) \
if isinstance(result["value"], decimal.Decimal) \
else result["value"],
"operator": alert["query"]["operator"],
"trigger": alert["query"]["left"],
"alertId": alert["alertId"],
"detectionMethod": alert["detectionMethod"],
"currentPeriod": alert["options"]["currentPeriod"],
"previousPeriod": alert["options"]["previousPeriod"],
"createdAt": TimeUTC.now()}},
}

View file

@ -11,11 +11,13 @@ from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.storage import extra
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
logging.info(">>> Using experimental error search")
from . import errors_exp as errors
else:
from . import errors as errors
# TODO: fix this import
from . import errors as errors
# if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
# logging.info(">>> Using experimental error search")
# from . import errors_exp as errors
# else:
# from . import errors as errors
if config("EXP_SESSIONS_SEARCH_METRIC", cast=bool, default=False):
from chalicelib.core import sessions

View file

@ -7,7 +7,7 @@ from chalicelib.core import sessions_mobs, events
from chalicelib.utils import sql_helper as sh
if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
from chalicelib.core import sessions_exp as sessions
from chalicelib.core import sessions_ch as sessions
else:
from chalicelib.core import sessions

View file

@ -1,8 +1,8 @@
from typing import List
import schemas
from chalicelib.core.metrics import __get_basic_constraints, __get_meta_constraint
from chalicelib.core.metrics import __get_constraint_values, __complete_missing_steps
from chalicelib.core.metrics_ch import __get_basic_constraints, __get_meta_constraint
from chalicelib.core.metrics_ch import __get_constraint_values, __complete_missing_steps
from chalicelib.utils import ch_client, exp_ch_helper
from chalicelib.utils import helper, dev
from chalicelib.utils.TimeUTC import TimeUTC

View file

@ -6,7 +6,6 @@ rm -rf ./auth/auth_apikey.py
rm -rf ./build.sh
rm -rf ./build_alerts.sh
rm -rf ./build_crons.sh
rm -rf ./chalicelib/core/alerts.py
rm -rf ./chalicelib/core/announcements.py
rm -rf ./chalicelib/core/assist.py
rm -rf ./chalicelib/core/authorizers.py
@ -105,3 +104,8 @@ rm -rf ./chalicelib/core/product_anaytics2.py
rm -rf ./chalicelib/utils/ch_client.py
rm -rf ./chalicelib/utils/ch_client_exp.py
rm -rf ./routers/subs/product_anaytics.py
rm -rf ./chalicelib/core/alerts/__init__.py
rm -rf ./chalicelib/core/alerts/alerts.py
rm -rf ./chalicelib/core/alerts/alerts_processor.py
rm -rf ./chalicelib/core/alerts/alerts_processor_ch.py
rm -rf ./chalicelib/core/sessions_ch.py

View file

@ -3,7 +3,9 @@ package datasaver
import (
"encoding/json"
"log"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
)
type NetworkRequestFTS struct {
@ -98,6 +100,12 @@ func WrapGraphQL(m *messages.GraphQL, projID uint32) *GraphQLFTS {
}
}
func (s *saverImpl) init() {
if s.cfg.UseQuickwit {
s.producer = queue.NewProducer(s.cfg.MessageSizeLimit, true)
}
}
func (s *saverImpl) sendToFTS(msg messages.Message, projID uint32) {
// Skip, if FTS is disabled
if s.producer == nil {

View file

@ -1,93 +0,0 @@
package datasaver
import (
"log"
"openreplay/backend/pkg/db/clickhouse"
"openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/sessions"
)
func (s *saverImpl) init() {
s.ch = clickhouse.NewConnector(env.String("CLICKHOUSE_STRING"))
if err := s.ch.Prepare(); err != nil {
log.Fatalf("can't prepare clickhouse: %s", err)
}
s.pg.SetClickHouse(s.ch)
if s.cfg.UseQuickwit {
s.producer = queue.NewProducer(s.cfg.MessageSizeLimit, true)
}
}
func (s *saverImpl) handleExtraMessage(msg messages.Message) error {
// Get session data
var (
session *sessions.Session
err error
)
if msg.TypeID() == messages.MsgSessionEnd || msg.TypeID() == messages.MsgMobileSessionEnd {
session, err = s.sessions.GetUpdated(msg.SessionID())
} else {
session, err = s.sessions.Get(msg.SessionID())
}
if err != nil || session == nil {
log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, msg.SessionID(), msg)
return err
}
// Send data to quickwit
s.sendToFTS(msg, session.ProjectID)
// Handle message
switch m := msg.(type) {
case *messages.SessionEnd:
return s.ch.InsertWebSession(session)
case *messages.PerformanceTrackAggr:
return s.ch.InsertWebPerformanceTrackAggr(session, m)
case *messages.MouseClick:
return s.ch.InsertWebClickEvent(session, m)
// Unique for Web
case *messages.PageEvent:
return s.ch.InsertWebPageEvent(session, m)
case *messages.JSException:
wrapper, _ := types.WrapJSException(m)
return s.ch.InsertWebErrorEvent(session, wrapper)
case *messages.IntegrationEvent:
return s.ch.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m))
case *messages.IssueEvent:
return s.ch.InsertIssue(session, m)
case *messages.CustomEvent:
return s.ch.InsertCustom(session, m)
case *messages.NetworkRequest:
if err := s.ch.InsertRequest(session, m, session.SaveRequestPayload); err != nil {
log.Printf("can't insert request event into clickhouse: %s", err)
}
case *messages.GraphQL:
return s.ch.InsertGraphQL(session, m)
case *messages.InputChange:
return s.ch.InsertWebInputDuration(session, m)
case *messages.MouseThrashing:
return s.ch.InsertMouseThrashing(session, m)
// Mobile messages
case *messages.MobileSessionEnd:
return s.ch.InsertMobileSession(session)
case *messages.MobileEvent:
return s.ch.InsertMobileCustom(session, m)
case *messages.MobileClickEvent:
return s.ch.InsertMobileClick(session, m)
case *messages.MobileSwipeEvent:
return s.ch.InsertMobileSwipe(session, m)
case *messages.MobileInputEvent:
return s.ch.InsertMobileInput(session, m)
case *messages.MobileNetworkCall:
return s.ch.InsertMobileRequest(session, m, session.SaveRequestPayload)
case *messages.MobileCrash:
return s.ch.InsertMobileCrash(session, m)
}
return nil
}

View file

@ -1,713 +0,0 @@
package clickhouse
import (
"errors"
"fmt"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
"log"
"openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/hashid"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/sessions"
"openreplay/backend/pkg/url"
"os"
"strings"
"time"
"openreplay/backend/pkg/license"
)
type Connector interface {
Prepare() error
Commit() error
Stop() error
// Web
InsertWebSession(session *sessions.Session) error
InsertWebPageEvent(session *sessions.Session, msg *messages.PageEvent) error
InsertWebClickEvent(session *sessions.Session, msg *messages.MouseClick) error
InsertWebErrorEvent(session *sessions.Session, msg *types.ErrorEvent) error
InsertWebPerformanceTrackAggr(session *sessions.Session, msg *messages.PerformanceTrackAggr) error
InsertAutocomplete(session *sessions.Session, msgType, msgValue string) error
InsertRequest(session *sessions.Session, msg *messages.NetworkRequest, savePayload bool) error
InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error
InsertGraphQL(session *sessions.Session, msg *messages.GraphQL) error
InsertIssue(session *sessions.Session, msg *messages.IssueEvent) error
InsertWebInputDuration(session *sessions.Session, msg *messages.InputChange) error
InsertMouseThrashing(session *sessions.Session, msg *messages.MouseThrashing) error
// Mobile
InsertMobileSession(session *sessions.Session) error
InsertMobileCustom(session *sessions.Session, msg *messages.MobileEvent) error
InsertMobileClick(session *sessions.Session, msg *messages.MobileClickEvent) error
InsertMobileSwipe(session *sessions.Session, msg *messages.MobileSwipeEvent) error
InsertMobileInput(session *sessions.Session, msg *messages.MobileInputEvent) error
InsertMobileRequest(session *sessions.Session, msg *messages.MobileNetworkCall, savePayload bool) error
InsertMobileCrash(session *sessions.Session, msg *messages.MobileCrash) error
}
type task struct {
bulks []Bulk
}
func NewTask() *task {
return &task{bulks: make([]Bulk, 0, 21)}
}
type connectorImpl struct {
conn driver.Conn
batches map[string]Bulk //driver.Batch
workerTask chan *task
done chan struct{}
finished chan struct{}
}
func getEnv(key, fallback string) string {
if value, ok := os.LookupEnv(key); ok {
return value
}
return fallback
}
func NewConnector(url string) Connector {
license.CheckLicense()
url = strings.TrimPrefix(url, "tcp://")
url = strings.TrimSuffix(url, "/default")
userName := getEnv("CH_USERNAME", "default")
password := getEnv("CH_PASSWORD", "")
conn, err := clickhouse.Open(&clickhouse.Options{
Addr: []string{url},
Auth: clickhouse.Auth{
Database: "default",
Username: userName,
Password: password,
},
MaxOpenConns: 20,
MaxIdleConns: 15,
ConnMaxLifetime: 3 * time.Minute,
Compression: &clickhouse.Compression{
Method: clickhouse.CompressionLZ4,
},
})
if err != nil {
log.Fatal(err)
}
c := &connectorImpl{
conn: conn,
batches: make(map[string]Bulk, 20),
workerTask: make(chan *task, 1),
done: make(chan struct{}),
finished: make(chan struct{}),
}
go c.worker()
return c
}
func (c *connectorImpl) newBatch(name, query string) error {
batch, err := NewBulk(c.conn, name, query)
if err != nil {
return fmt.Errorf("can't create new batch: %s", err)
}
c.batches[name] = batch
return nil
}
var batches = map[string]string{
// Web
"sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, timezone, utm_source, utm_medium, utm_campaign) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?, ?, ?)",
"autocompletes": "INSERT INTO experimental.autocomplete (project_id, type, value) VALUES (?, ?, SUBSTR(?, 1, 8000))",
"pages": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_start, response_start, response_end, dom_content_loaded_event_start, dom_content_loaded_event_end, load_event_start, load_event_end, first_paint, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive, url_path, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?)",
"clicks": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, hesitation_time, event_type, selector, normalized_x, normalized_y, url, url_path) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000))",
"inputs": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, label, event_type, duration, hesitation_time) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
"errors": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, source, name, message, error_id, event_type, error_tags_keys, error_tags_values) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"performance": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, min_fps, avg_fps, max_fps, min_cpu, avg_cpu, max_cpu, min_total_js_heap_size, avg_total_js_heap_size, max_total_js_heap_size, min_used_js_heap_size, avg_used_js_heap_size, max_used_js_heap_size, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"requests": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type, transfer_size, url_path) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000))",
"custom": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
"graphql": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, name, request_body, response_body, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
"issuesEvents": "INSERT INTO experimental.events (session_id, project_id, message_id, datetime, issue_id, issue_type, event_type, url, url_path) VALUES (?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000))",
"issues": "INSERT INTO experimental.issues (project_id, issue_id, type, context_string) VALUES (?, ?, ?, ?)",
//Mobile
"ios_sessions": "INSERT INTO experimental.sessions (session_id, project_id, user_id, user_uuid, user_os, user_os_version, user_device, user_device_type, user_country, user_state, user_city, datetime, duration, pages_count, events_count, errors_count, issue_score, referrer, issue_types, tracker_version, user_browser, user_browser_version, metadata_1, metadata_2, metadata_3, metadata_4, metadata_5, metadata_6, metadata_7, metadata_8, metadata_9, metadata_10, platform, timezone) VALUES (?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), SUBSTR(?, 1, 8000), ?, ?)",
"ios_custom": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, name, payload, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
"ios_clicks": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)",
"ios_swipes": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, direction, event_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
"ios_inputs": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, label, event_type) VALUES (?, ?, ?, ?, ?, ?)",
"ios_requests": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, url, request_body, response_body, status, method, duration, success, event_type) VALUES (?, ?, ?, ?, SUBSTR(?, 1, 8000), ?, ?, ?, ?, ?, ?, ?)",
"ios_crashes": "INSERT INTO experimental.ios_events (session_id, project_id, message_id, datetime, name, reason, stacktrace, event_type) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
}
func (c *connectorImpl) Prepare() error {
for table, query := range batches {
if err := c.newBatch(table, query); err != nil {
return fmt.Errorf("can't create %s batch: %s", table, err)
}
}
return nil
}
func (c *connectorImpl) Commit() error {
newTask := NewTask()
for _, b := range c.batches {
newTask.bulks = append(newTask.bulks, b)
}
c.batches = make(map[string]Bulk, 20)
if err := c.Prepare(); err != nil {
log.Printf("can't prepare new CH batch set: %s", err)
}
c.workerTask <- newTask
return nil
}
func (c *connectorImpl) Stop() error {
c.done <- struct{}{}
<-c.finished
return c.conn.Close()
}
func (c *connectorImpl) sendBulks(t *task) {
for _, b := range t.bulks {
if err := b.Send(); err != nil {
log.Printf("can't send batch: %s", err)
}
}
}
func (c *connectorImpl) worker() {
for {
select {
case t := <-c.workerTask:
c.sendBulks(t)
case <-c.done:
for t := range c.workerTask {
c.sendBulks(t)
}
c.finished <- struct{}{}
return
}
}
}
func (c *connectorImpl) checkError(name string, err error) {
if err != clickhouse.ErrBatchAlreadySent {
log.Printf("can't create %s batch after failed append operation: %s", name, err)
}
}
func (c *connectorImpl) InsertWebInputDuration(session *sessions.Session, msg *messages.InputChange) error {
if msg.Label == "" {
return nil
}
if err := c.batches["inputs"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Label,
"INPUT",
nullableUint16(uint16(msg.InputDuration)),
nullableUint32(uint32(msg.HesitationTime)),
); err != nil {
c.checkError("inputs", err)
return fmt.Errorf("can't append to inputs batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMouseThrashing(session *sessions.Session, msg *messages.MouseThrashing) error {
issueID := hashid.MouseThrashingID(session.ProjectID, session.SessionID, msg.Timestamp)
// Insert issue event to batches
if err := c.batches["issuesEvents"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
issueID,
"mouse_thrashing",
"ISSUE",
msg.Url,
extractUrlPath(msg.Url),
); err != nil {
c.checkError("issuesEvents", err)
return fmt.Errorf("can't append to issuesEvents batch: %s", err)
}
if err := c.batches["issues"].Append(
uint16(session.ProjectID),
issueID,
"mouse_thrashing",
msg.Url,
); err != nil {
c.checkError("issues", err)
return fmt.Errorf("can't append to issues batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertIssue(session *sessions.Session, msg *messages.IssueEvent) error {
issueID := hashid.IssueID(session.ProjectID, msg)
// Check issue type before insert to avoid panic from clickhouse lib
switch msg.Type {
case "click_rage", "dead_click", "excessive_scrolling", "bad_request", "missing_resource", "memory", "cpu", "slow_resource", "slow_page_load", "crash", "ml_cpu", "ml_memory", "ml_dead_click", "ml_click_rage", "ml_mouse_thrashing", "ml_excessive_scrolling", "ml_slow_resources", "custom", "js_exception", "mouse_thrashing", "app_crash":
default:
return fmt.Errorf("unknown issueType: %s", msg.Type)
}
// Insert issue event to batches
if err := c.batches["issuesEvents"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MessageID,
datetime(msg.Timestamp),
issueID,
msg.Type,
"ISSUE",
msg.URL,
extractUrlPath(msg.URL),
); err != nil {
c.checkError("issuesEvents", err)
return fmt.Errorf("can't append to issuesEvents batch: %s", err)
}
if err := c.batches["issues"].Append(
uint16(session.ProjectID),
issueID,
msg.Type,
msg.ContextString,
); err != nil {
c.checkError("issues", err)
return fmt.Errorf("can't append to issues batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertWebSession(session *sessions.Session) error {
if session.Duration == nil {
return errors.New("trying to insert session with nil duration")
}
if err := c.batches["sessions"].Append(
session.SessionID,
uint16(session.ProjectID),
session.UserID,
session.UserUUID,
session.UserOS,
nullableString(session.UserOSVersion),
nullableString(session.UserDevice),
session.UserDeviceType,
session.UserCountry,
session.UserState,
session.UserCity,
datetime(session.Timestamp),
uint32(*session.Duration),
uint16(session.PagesCount),
uint16(session.EventsCount),
uint16(session.ErrorsCount),
uint32(session.IssueScore),
session.Referrer,
session.IssueTypes,
session.TrackerVersion,
session.UserBrowser,
nullableString(session.UserBrowserVersion),
session.Metadata1,
session.Metadata2,
session.Metadata3,
session.Metadata4,
session.Metadata5,
session.Metadata6,
session.Metadata7,
session.Metadata8,
session.Metadata9,
session.Metadata10,
session.Timezone,
session.UtmSource,
session.UtmMedium,
session.UtmCampaign,
); err != nil {
c.checkError("sessions", err)
return fmt.Errorf("can't append to sessions batch: %s", err)
}
return nil
}
func extractUrlPath(fullUrl string) string {
_, path, query, err := url.GetURLParts(fullUrl)
if err != nil {
log.Printf("can't parse url: %s", err)
return ""
}
pathQuery := path
if query != "" {
pathQuery += "?" + query
}
return strings.ToLower(pathQuery)
}
func (c *connectorImpl) InsertWebPageEvent(session *sessions.Session, msg *messages.PageEvent) error {
if err := c.batches["pages"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MessageID,
datetime(msg.Timestamp),
msg.URL,
nullableUint16(uint16(msg.RequestStart)),
nullableUint16(uint16(msg.ResponseStart)),
nullableUint16(uint16(msg.ResponseEnd)),
nullableUint16(uint16(msg.DomContentLoadedEventStart)),
nullableUint16(uint16(msg.DomContentLoadedEventEnd)),
nullableUint16(uint16(msg.LoadEventStart)),
nullableUint16(uint16(msg.LoadEventEnd)),
nullableUint16(uint16(msg.FirstPaint)),
nullableUint16(uint16(msg.FirstContentfulPaint)),
nullableUint16(uint16(msg.SpeedIndex)),
nullableUint16(uint16(msg.VisuallyComplete)),
nullableUint16(uint16(msg.TimeToInteractive)),
extractUrlPath(msg.URL),
"LOCATION",
); err != nil {
c.checkError("pages", err)
return fmt.Errorf("can't append to pages batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertWebClickEvent(session *sessions.Session, msg *messages.MouseClick) error {
if msg.Label == "" {
return nil
}
var nX *float32 = nil
var nY *float32 = nil
if msg.NormalizedX != 101 && msg.NormalizedY != 101 {
// To support previous versions of tracker
if msg.NormalizedX <= 100 && msg.NormalizedY <= 100 {
msg.NormalizedX *= 100
msg.NormalizedY *= 100
}
normalizedX := float32(msg.NormalizedX) / 100.0
normalizedY := float32(msg.NormalizedY) / 100.0
nXVal := normalizedX
nX = &nXVal
nYVal := normalizedY
nY = &nYVal
}
if err := c.batches["clicks"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Label,
nullableUint32(uint32(msg.HesitationTime)),
"CLICK",
msg.Selector,
nX,
nY,
msg.Url,
extractUrlPath(msg.Url),
); err != nil {
c.checkError("clicks", err)
return fmt.Errorf("can't append to clicks batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertWebErrorEvent(session *sessions.Session, msg *types.ErrorEvent) error {
keys, values := make([]string, 0, len(msg.Tags)), make([]*string, 0, len(msg.Tags))
for k, v := range msg.Tags {
keys = append(keys, k)
values = append(values, v)
}
// Check error source before insert to avoid panic from clickhouse lib
switch msg.Source {
case "js_exception", "bugsnag", "cloudwatch", "datadog", "elasticsearch", "newrelic", "rollbar", "sentry", "stackdriver", "sumologic":
default:
return fmt.Errorf("unknown error source: %s", msg.Source)
}
msgID, _ := msg.ID(session.ProjectID)
// Insert event to batch
if err := c.batches["errors"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MessageID,
datetime(msg.Timestamp),
msg.Source,
nullableString(msg.Name),
msg.Message,
msgID,
"ERROR",
keys,
values,
); err != nil {
c.checkError("errors", err)
return fmt.Errorf("can't append to errors batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertWebPerformanceTrackAggr(session *sessions.Session, msg *messages.PerformanceTrackAggr) error {
var timestamp uint64 = (msg.TimestampStart + msg.TimestampEnd) / 2
if err := c.batches["performance"].Append(
session.SessionID,
uint16(session.ProjectID),
uint64(0), // TODO: find messageID for performance events
datetime(timestamp),
nullableString(msg.Meta().Url),
uint8(msg.MinFPS),
uint8(msg.AvgFPS),
uint8(msg.MaxFPS),
uint8(msg.MinCPU),
uint8(msg.AvgCPU),
uint8(msg.MaxCPU),
msg.MinTotalJSHeapSize,
msg.AvgTotalJSHeapSize,
msg.MaxTotalJSHeapSize,
msg.MinUsedJSHeapSize,
msg.AvgUsedJSHeapSize,
msg.MaxUsedJSHeapSize,
"PERFORMANCE",
); err != nil {
c.checkError("performance", err)
return fmt.Errorf("can't append to performance batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertAutocomplete(session *sessions.Session, msgType, msgValue string) error {
if len(msgValue) == 0 {
return nil
}
if err := c.batches["autocompletes"].Append(
uint16(session.ProjectID),
msgType,
msgValue,
); err != nil {
c.checkError("autocompletes", err)
return fmt.Errorf("can't append to autocompletes batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.NetworkRequest, savePayload bool) error {
urlMethod := url.EnsureMethod(msg.Method)
if urlMethod == "" {
return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method)
}
var request, response *string
if savePayload {
request = &msg.Request
response = &msg.Response
}
if err := c.batches["requests"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.Meta().Index,
datetime(uint64(msg.Meta().Timestamp)),
msg.URL,
request,
response,
uint16(msg.Status),
url.EnsureMethod(msg.Method),
uint16(msg.Duration),
msg.Status < 400,
"REQUEST",
uint32(msg.TransferredBodySize),
extractUrlPath(msg.URL),
); err != nil {
c.checkError("requests", err)
return fmt.Errorf("can't append to requests batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error {
if err := c.batches["custom"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.Meta().Index,
datetime(uint64(msg.Meta().Timestamp)),
msg.Name,
msg.Payload,
"CUSTOM",
); err != nil {
c.checkError("custom", err)
return fmt.Errorf("can't append to custom batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertGraphQL(session *sessions.Session, msg *messages.GraphQL) error {
if err := c.batches["graphql"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.Meta().Index,
datetime(uint64(msg.Meta().Timestamp)),
msg.OperationName,
nullableString(msg.Variables),
nullableString(msg.Response),
"GRAPHQL",
); err != nil {
c.checkError("graphql", err)
return fmt.Errorf("can't append to graphql batch: %s", err)
}
return nil
}
// Mobile events
func (c *connectorImpl) InsertMobileSession(session *sessions.Session) error {
if session.Duration == nil {
return errors.New("trying to insert mobile session with nil duration")
}
if err := c.batches["ios_sessions"].Append(
session.SessionID,
uint16(session.ProjectID),
session.UserID,
session.UserUUID,
session.UserOS,
nullableString(session.UserOSVersion),
nullableString(session.UserDevice),
session.UserDeviceType,
session.UserCountry,
session.UserState,
session.UserCity,
datetime(session.Timestamp),
uint32(*session.Duration),
uint16(session.PagesCount),
uint16(session.EventsCount),
uint16(session.ErrorsCount),
uint32(session.IssueScore),
session.Referrer,
session.IssueTypes,
session.TrackerVersion,
session.UserBrowser,
nullableString(session.UserBrowserVersion),
session.Metadata1,
session.Metadata2,
session.Metadata3,
session.Metadata4,
session.Metadata5,
session.Metadata6,
session.Metadata7,
session.Metadata8,
session.Metadata9,
session.Metadata10,
"ios",
session.Timezone,
); err != nil {
c.checkError("ios_sessions", err)
return fmt.Errorf("can't append to sessions batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileCustom(session *sessions.Session, msg *messages.MobileEvent) error {
if err := c.batches["ios_custom"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.Meta().Index,
datetime(uint64(msg.Meta().Timestamp)),
msg.Name,
msg.Payload,
"CUSTOM",
); err != nil {
c.checkError("ios_custom", err)
return fmt.Errorf("can't append to mobile custom batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileClick(session *sessions.Session, msg *messages.MobileClickEvent) error {
if msg.Label == "" {
return nil
}
if err := c.batches["ios_clicks"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Label,
"TAP",
); err != nil {
c.checkError("ios_clicks", err)
return fmt.Errorf("can't append to mobile clicks batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileSwipe(session *sessions.Session, msg *messages.MobileSwipeEvent) error {
if msg.Label == "" {
return nil
}
if err := c.batches["ios_swipes"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Label,
nullableString(msg.Direction),
"SWIPE",
); err != nil {
c.checkError("ios_clicks", err)
return fmt.Errorf("can't append to mobile clicks batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileInput(session *sessions.Session, msg *messages.MobileInputEvent) error {
if msg.Label == "" {
return nil
}
if err := c.batches["ios_inputs"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Label,
"INPUT",
); err != nil {
c.checkError("ios_inputs", err)
return fmt.Errorf("can't append to mobile inputs batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileRequest(session *sessions.Session, msg *messages.MobileNetworkCall, savePayload bool) error {
urlMethod := url.EnsureMethod(msg.Method)
if urlMethod == "" {
return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method)
}
var request, response *string
if savePayload {
request = &msg.Request
response = &msg.Response
}
if err := c.batches["ios_requests"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.Meta().Index,
datetime(uint64(msg.Meta().Timestamp)),
msg.URL,
request,
response,
uint16(msg.Status),
url.EnsureMethod(msg.Method),
uint16(msg.Duration),
msg.Status < 400,
"REQUEST",
); err != nil {
c.checkError("ios_requests", err)
return fmt.Errorf("can't append to mobile requests batch: %s", err)
}
return nil
}
func (c *connectorImpl) InsertMobileCrash(session *sessions.Session, msg *messages.MobileCrash) error {
if err := c.batches["ios_crashes"].Append(
session.SessionID,
uint16(session.ProjectID),
msg.MsgID(),
datetime(msg.Timestamp),
msg.Name,
msg.Reason,
msg.Stacktrace,
"CRASH",
); err != nil {
c.checkError("ios_crashes", err)
return fmt.Errorf("can't append to mobile crashges batch: %s", err)
}
return nil
}

View file

@ -1,8 +1,12 @@
package auth
import "fmt"
import (
"fmt"
func (a *authImpl) IsAuthorized(authHeader string, permissions []string, isExtension bool) (*User, error) {
"openreplay/backend/pkg/server/user"
)
func (a *authImpl) IsAuthorized(authHeader string, permissions []string, isExtension bool) (*user.User, error) {
secret := a.secret
if isExtension {
secret = a.spotSecret

View file

@ -1,4 +1,4 @@
package api
package auth
import "strings"

View file

@ -2,11 +2,13 @@ package auth
import (
"fmt"
"openreplay/backend/pkg/db/postgres/pool"
"strings"
"openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/server/user"
)
func authUser(conn pool.Pool, userID, tenantID, jwtIAT int, isExtension bool) (*User, error) {
func authUser(conn pool.Pool, userID, tenantID, jwtIAT int, isExtension bool) (*user.User, error) {
sql := `SELECT user_id, users.tenant_id, users.name, email, EXTRACT(epoch FROM spot_jwt_iat)::BIGINT AS spot_jwt_iat, roles.permissions
FROM users
JOIN tenants on users.tenant_id = tenants.tenant_id
@ -15,7 +17,7 @@ func authUser(conn pool.Pool, userID, tenantID, jwtIAT int, isExtension bool) (*
if !isExtension {
sql = strings.ReplaceAll(sql, "spot_jwt_iat", "jwt_iat")
}
user := &User{}
user := &user.User{}
var permissions []string
if err := conn.QueryRow(sql, userID, tenantID).
Scan(&user.ID, &user.TenantID, &user.Name, &user.Email, &user.JwtIat, &permissions); err != nil {
@ -33,3 +35,10 @@ func authUser(conn pool.Pool, userID, tenantID, jwtIAT int, isExtension bool) (*
}
return user, nil
}
func abs(x int) int {
if x < 0 {
return -x
}
return x
}

View file

@ -1,3 +1,3 @@
package service
package keys
var getUserSQL = `SELECT tenant_id, name, email FROM public.users WHERE user_id = $1 AND deleted_at IS NULL LIMIT 1`

View file

@ -1,61 +0,0 @@
package api
import (
"encoding/json"
"net/http"
"github.com/gorilla/mux"
"openreplay/backend/pkg/spot/auth"
"openreplay/backend/pkg/spot/service"
)
var routeMatch = map[string]string{
"POST" + "/v1/spots": "createSpot",
"GET" + "/v1/spots/{id}": "getSpot",
"PATCH" + "/v1/spots/{id}": "updateSpot",
"GET" + "/v1/spots": "getSpots",
"DELETE" + "/v1/spots": "deleteSpots",
"POST" + "/v1/spots/{id}/comment": "addComment",
"GET" + "/v1/spots/{id}/video": "getSpotVideo",
"PATCH" + "/v1/spots/{id}/public-key": "updatePublicKey",
}
func (e *Router) logRequest(r *http.Request, bodyBytes []byte, statusCode int) {
pathTemplate, err := mux.CurrentRoute(r).GetPathTemplate()
if err != nil {
e.log.Error(r.Context(), "failed to get path template: %s", err)
}
e.log.Info(r.Context(), "path template: %s", pathTemplate)
if _, ok := routeMatch[r.Method+pathTemplate]; !ok {
e.log.Debug(r.Context(), "no match for route: %s %s", r.Method, pathTemplate)
return
}
// Convert the parameters to json
query := r.URL.Query()
params := make(map[string]interface{})
for key, values := range query {
if len(values) > 1 {
params[key] = values
} else {
params[key] = values[0]
}
}
jsonData, err := json.Marshal(params)
if err != nil {
e.log.Error(r.Context(), "failed to marshal query parameters: %s", err)
}
requestData := &service.RequestData{
Action: routeMatch[r.Method+pathTemplate],
Method: r.Method,
PathFormat: pathTemplate,
Endpoint: r.URL.Path,
Payload: bodyBytes,
Parameters: jsonData,
Status: statusCode,
}
userData := r.Context().Value("userData").(*auth.User)
e.services.Tracer.Trace(userData, requestData)
// DEBUG
e.log.Info(r.Context(), "request data: %v", requestData)
}

View file

@ -1,45 +0,0 @@
package spot
import (
"openreplay/backend/internal/config/spot"
"openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/flakeid"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/objectstorage"
"openreplay/backend/pkg/objectstorage/store"
"openreplay/backend/pkg/spot/auth"
"openreplay/backend/pkg/spot/service"
"openreplay/backend/pkg/spot/transcoder"
)
type ServicesBuilder struct {
Flaker *flakeid.Flaker
ObjStorage objectstorage.ObjectStorage
Auth auth.Auth
Spots service.Spots
Keys service.Keys
Transcoder transcoder.Transcoder
Tracer service.Tracer
}
func NewServiceBuilder(log logger.Logger, cfg *spot.Config, pgconn pool.Pool) (*ServicesBuilder, error) {
objStore, err := store.NewStore(&cfg.ObjectsConfig)
if err != nil {
return nil, err
}
flaker := flakeid.NewFlaker(cfg.WorkerID)
tracer, err := service.NewTracer(log, pgconn)
if err != nil {
return nil, err
}
spots := service.NewSpots(log, pgconn, flaker)
return &ServicesBuilder{
Flaker: flaker,
ObjStorage: objStore,
Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn),
Spots: spots,
Keys: service.NewKeys(log, pgconn),
Transcoder: transcoder.NewTranscoder(cfg, log, objStore, pgconn, spots),
Tracer: tracer,
}, nil
}

View file

@ -1,104 +0,0 @@
package service
import (
"context"
"errors"
"openreplay/backend/pkg/db/postgres"
db "openreplay/backend/pkg/db/postgres/pool"
"openreplay/backend/pkg/logger"
"openreplay/backend/pkg/pool"
"openreplay/backend/pkg/spot/auth"
)
type Tracer interface {
Trace(user *auth.User, data *RequestData) error
Close() error
}
type tracerImpl struct {
log logger.Logger
conn db.Pool
traces postgres.Bulk
saver pool.WorkerPool
}
func NewTracer(log logger.Logger, conn db.Pool) (Tracer, error) {
switch {
case log == nil:
return nil, errors.New("logger is required")
case conn == nil:
return nil, errors.New("connection is required")
}
tracer := &tracerImpl{
log: log,
conn: conn,
}
if err := tracer.initBulk(); err != nil {
return nil, err
}
tracer.saver = pool.NewPool(1, 200, tracer.sendTraces)
return tracer, nil
}
func (t *tracerImpl) initBulk() (err error) {
t.traces, err = postgres.NewBulk(t.conn,
"traces",
"(user_id, tenant_id, auth, action, method, path_format, endpoint, payload, parameters, status)",
"($%d, $%d, $%d, $%d, $%d, $%d, $%d, $%d, $%d, $%d)",
10, 50)
if err != nil {
return err
}
return nil
}
type Task struct {
UserID *uint64
TenantID uint64
Auth *string
Data *RequestData
}
func (t *tracerImpl) sendTraces(payload interface{}) {
rec := payload.(*Task)
t.log.Info(context.Background(), "Sending traces, %v", rec)
if err := t.traces.Append(rec.UserID, rec.TenantID, rec.Auth, rec.Data.Action, rec.Data.Method, rec.Data.PathFormat,
rec.Data.Endpoint, rec.Data.Payload, rec.Data.Parameters, rec.Data.Status); err != nil {
t.log.Error(context.Background(), "can't append trace: %s", err)
}
}
type RequestData struct {
Action string
Method string
PathFormat string
Endpoint string
Payload []byte
Parameters []byte
Status int
}
func (t *tracerImpl) Trace(user *auth.User, data *RequestData) error {
switch {
case user == nil:
return errors.New("user is required")
case data == nil:
return errors.New("request is required")
}
trace := &Task{
UserID: &user.ID,
TenantID: user.TenantID,
Auth: &user.AuthMethod,
Data: data,
}
t.saver.Submit(trace)
return nil
}
func (t *tracerImpl) Close() error {
t.saver.Stop()
if err := t.traces.Send(); err != nil {
return err
}
return nil
}

View file

@ -8,6 +8,8 @@ import { observer } from 'mobx-react-lite';
import { NoContent, Icon } from 'UI';
import { Tag, Tooltip } from 'antd';
import { useModal } from 'App/components/Modal';
import { useStore } from '@/mstore';
import Filter from '@/mstore/types/filter';
interface Props {
metric?: Widget;
@ -17,19 +19,30 @@ interface Props {
}
function FunnelWidget(props: Props) {
const { dashboardStore, searchStore } = useStore();
const [focusedFilter, setFocusedFilter] = React.useState<number | null>(null);
const { isWidget = false, data, metric, compData } = props;
const funnel = data.funnel || { stages: [] };
const totalSteps = funnel.stages.length;
const stages = isWidget
? [...funnel.stages.slice(0, 1), funnel.stages[funnel.stages.length - 1]]
: funnel.stages;
const stages = isWidget ? [...funnel.stages.slice(0, 1), funnel.stages[funnel.stages.length - 1]] : funnel.stages;
const hasMoreSteps = funnel.stages.length > 2;
const lastStage = funnel.stages[funnel.stages.length - 1];
const remainingSteps = totalSteps - 2;
const { hideModal } = useModal();
const metricLabel =
metric?.metricFormat == 'userCount' ? 'Users' : 'Sessions';
const metricLabel = metric?.metricFormat == 'userCount' ? 'Users' : 'Sessions';
const drillDownFilter = dashboardStore.drillDownFilter;
const drillDownPeriod = dashboardStore.drillDownPeriod;
const metricFilters = metric?.series[0]?.filter.filters || [];
const applyDrillDown = (index: number) => {
const filter = new Filter().fromData({ filters: metricFilters.slice(0, index + 1) });
const periodTimestamps = drillDownPeriod.toTimestamps();
drillDownFilter.merge({
filters: filter.toJson().filters,
startTimestamp: periodTimestamps.startTimestamp,
endTimestamp: periodTimestamps.endTimestamp
});
};
useEffect(() => {
return () => {
@ -53,6 +66,8 @@ function FunnelWidget(props: Props) {
}
}
});
applyDrillDown(focusedFilter === index ? -1 : index);
};
const shownStages = React.useMemo(() => {

View file

@ -1,7 +1,7 @@
import { useQuery } from '@tanstack/react-query';
import React from 'react';
import { VList, VListHandle } from 'virtua';
import { PlayerContext } from "App/components/Session/playerContext";
import { PlayerContext } from 'App/components/Session/playerContext';
import { processLog, UnifiedLog } from './utils';
import { observer } from 'mobx-react-lite';
import { useStore } from 'App/mstore';
@ -13,13 +13,10 @@ import BottomBlock from 'App/components/shared/DevTools/BottomBlock';
import { capitalize } from 'App/utils';
import { Icon } from 'UI';
import { Segmented, Input, Tooltip } from 'antd';
import {SearchOutlined} from '@ant-design/icons';
import { SearchOutlined } from '@ant-design/icons';
import { client } from 'App/mstore';
import { FailedFetch, LoadingFetch } from "./StatusMessages";
import {
TableHeader,
LogRow
} from './Table'
import { FailedFetch, LoadingFetch } from './StatusMessages';
import { TableHeader, LogRow } from './Table';
async function fetchLogs(
tab: string,
@ -31,23 +28,24 @@ async function fetchLogs(
);
const json = await data.json();
try {
const logsResp = await fetch(json.url)
const logsResp = await fetch(json.url);
if (logsResp.ok) {
const logJson = await logsResp.json()
if (logJson.length === 0) return []
return processLog(logJson)
const logJson = await logsResp.json();
if (logJson.length === 0) return [];
return processLog(logJson);
} else {
throw new Error('Failed to fetch logs')
throw new Error('Failed to fetch logs');
}
} catch (e) {
console.log(e)
throw e
console.log(e);
throw e;
}
}
function BackendLogsPanel() {
const { projectsStore, sessionStore, integrationsStore } = useStore();
const integratedServices = integrationsStore.integrations.backendLogIntegrations;
const integratedServices =
integrationsStore.integrations.backendLogIntegrations;
const defaultTab = integratedServices[0]!.name;
const sessionId = sessionStore.currentId;
const projectId = projectsStore.siteId!;
@ -83,59 +81,59 @@ function BackendLogsPanel() {
return (
<BottomBlock style={{ height: '100%' }}>
<BottomBlock.Header>
<div className='flex items-center justify-between w-full'>
<div className={'flex gap-2 items-center'}>
<div className={'font-semibold'}>Traces</div>
{tabs.length && tab ? (
<div>
<Segmented options={tabs} value={tab} onChange={setTab} size='small' />
</div>
) : null}
</div>
<div className='flex items-center gap-2'>
<Segmented
options={[
{ label: 'All Tabs', value: 'all', },
{ label: (
<Tooltip title="Backend logs are fetched for all tabs combined.">
<span>Current Tab</span>
</Tooltip>),
value: 'current', disabled: true},
]}
defaultValue="all"
size="small"
className="rounded-full font-medium"
/>
<Input
className="rounded-lg"
placeholder="Filter by keyword"
name="filter"
onChange={onFilterChange}
value={filter}
size='small'
prefix={<SearchOutlined className='text-neutral-400' />}
<div className="flex items-center justify-between w-full">
<div className={'flex gap-2 items-center'}>
<div className={'font-semibold'}>Traces</div>
{tabs.length && tab ? (
<div>
<Segmented
options={tabs}
value={tab}
onChange={setTab}
size="small"
/>
</div>
</div>
) : null}
</div>
<div className="flex items-center gap-2">
<Segmented
options={[
{ label: 'All Tabs', value: 'all' },
{
label: (
<Tooltip title="Backend logs are fetched for all tabs combined.">
<span>Current Tab</span>
</Tooltip>
),
value: 'current',
disabled: true,
},
]}
defaultValue="all"
size="small"
className="rounded-full font-medium"
/>
<Input
className="rounded-lg"
placeholder="Filter by keyword"
name="filter"
onChange={onFilterChange}
value={filter}
size="small"
prefix={<SearchOutlined className="text-neutral-400" />}
/>
</div>
</div>
</BottomBlock.Header>
<BottomBlock.Content className="overflow-y-auto">
{isPending ? (
<LoadingFetch provider={capitalize(tab)} />
) : null}
{isPending ? <LoadingFetch provider={capitalize(tab)} /> : null}
{isError ? (
<FailedFetch
provider={capitalize(tab)}
onRetry={refetch}
/>
) : null}
{isSuccess ? (
<LogsTable data={data} />
<FailedFetch provider={capitalize(tab)} onRetry={refetch} />
) : null}
{isSuccess ? <LogsTable data={data} /> : null}
</BottomBlock.Content>
</BottomBlock>
);
@ -148,8 +146,10 @@ const LogsTable = observer(({ data }: { data: UnifiedLog[] }) => {
const _list = React.useRef<VListHandle>(null);
const activeIndex = React.useMemo(() => {
const currTs = time + sessionStart;
const index = data.findIndex(
(log) => log.timestamp !== 'N/A' ? new Date(log.timestamp).getTime() >= currTs : false
const index = data.findIndex((log) =>
log.timestamp !== 'N/A'
? new Date(log.timestamp).getTime() >= currTs
: false
);
return index === -1 ? data.length - 1 : index;
}, [time, data.length]);
@ -161,17 +161,22 @@ const LogsTable = observer(({ data }: { data: UnifiedLog[] }) => {
const onJump = (ts: number) => {
player.jump(ts - sessionStart);
}
};
return (
<>
<TableHeader size={data.length} />
<VList ref={_list} count={data.length}>
{data.map((log, index) => (
<LogRow key={index} isActive={index === activeIndex} log={log} onJump={onJump} />
<LogRow
key={index}
isActive={index === activeIndex}
log={log}
onJump={onJump}
/>
))}
</VList>
</>
)
);
});
export default observer(BackendLogsPanel);

View file

@ -22,7 +22,6 @@ const Tabs = ({ tabs, active, onClick, border = true, className }: Props) => {
return (
<div className={cn(stl.tabs, className, { [stl.bordered]: border })} role="tablist">
<Segmented
className='w-full'
size="small"
value={active}
options={tabs.map(({ key, text, hidden = false, disabled = false, iconComp = null }) => ({

View file

@ -135,7 +135,6 @@ function WebOverviewPanelCont() {
'ERRORS',
'NETWORK',
]);
const globalTabs = ['FRUSTRATIONS', 'ERRORS']
const { endTime, currentTab, tabStates } = store.get();
@ -347,7 +346,7 @@ function PanelComponent({
list={selectedFeatures}
updateList={setSelectedFeatures}
/>
<TabSelector />
{!isMobile ? <TabSelector /> : null}
<TimelineZoomButton />
</div>
)}
@ -370,7 +369,14 @@ function PanelComponent({
</div>
}
>
{isSpot ? <VerticalPointerLineComp time={spotTime} endTime={spotEndTime} /> : <VerticalPointerLine />}
{isSpot ? (
<VerticalPointerLineComp
time={spotTime}
endTime={spotEndTime}
/>
) : (
<VerticalPointerLine />
)}
{selectedFeatures.map((feature: any, index: number) => (
<div
key={feature}
@ -381,7 +387,7 @@ function PanelComponent({
<EventRow
isGraph={feature === 'PERFORMANCE'}
title={feature}
disabled={!showSingleTab}
disabled={!isMobile && !showSingleTab}
list={resources[feature]}
renderElement={(pointer: any[], isGrouped: boolean) => (
<TimelinePointer

View file

@ -58,11 +58,12 @@ const PerformanceGraph = React.memo((props: Props) => {
{disabled ? (
<div
className={
'flex justify-start'
'flex justify-center'
}
>
<div className={'text-xs text-neutral-400 ps-2'}>
Multi-tab performance overview is not available.</div>
Multi-tab performance overview is not available.
</div>
</div>
) : null}
<ResponsiveContainer height={35}>

View file

@ -106,11 +106,11 @@ const WarnBadge = React.memo(
>
<div>
<div>
Tracker version({version}) for this recording is{' '}
Tracker version ({version}) for this recording is{' '}
{trackerVerDiff === VersionComparison.Lower
? 'lower '
: 'ahead of '}
the current({trackerVersion}) version.
the current ({trackerVersion}) version.
</div>
<div>
<span>Some recording might display incorrectly.</span>

View file

@ -70,6 +70,7 @@ function SpotConsole({ onClose }: { onClose: () => void }) {
jump={jump}
iconProps={getIconProps(log.level)}
renderWithNL={renderWithNL}
showSingleTab
/>
))}
</VList>

View file

@ -143,7 +143,7 @@ function SpotPlayerHeader({
{browserVersion && (
<>
<div>·</div>
<div className="capitalize">Chrome v{browserVersion}</div>
<div>Chromium v{browserVersion}</div>
</>
)}
{resolution && (

View file

@ -130,22 +130,17 @@ function ConsolePanel({
}, [currentTab, tabStates, dataSource, tabValues, isLive])
const getTabNum = (tab: string) => (tabsArr.findIndex((t) => t === tab) + 1);
const list = isLive
? (useMemo(
() => logListNow.concat(exceptionsListNow).sort((a, b) => a.time - b.time),
[logListNow.length, exceptionsListNow.length]
) as ILog[])
: (useMemo(
() => logList.concat(exceptionsList).sort((a, b) => a.time - b.time),
[logList.length, exceptionsList.length]
).filter((l) =>
zoomEnabled ? l.time >= zoomStartTs && l.time <= zoomEndTs : true
) as ILog[]);
const list = useMemo(() => {
if (isLive) {
return logListNow.concat(exceptionsListNow).sort((a, b) => a.time - b.time)
} else {
const logs = logList.concat(exceptionsList).sort((a, b) => a.time - b.time)
return zoomEnabled ? logs.filter(l => l.time >= zoomStartTs && l.time <= zoomEndTs) : logs
}
}, [isLive, logList.length, exceptionsList.length, logListNow.length, exceptionsListNow.length, zoomEnabled, zoomStartTs, zoomEndTs])
let filteredList = useRegExListFilterMemo(list, (l) => l.value, filter);
filteredList = useTabListFilterMemo(filteredList, (l) => LEVEL_TAB[l.level], ALL, activeTab);
React.useEffect(() => {
}, [activeTab, filter]);
const onTabClick = (activeTab: any) => devTools.update(INDEX_KEY, { activeTab });
const onFilterChange = ({ target: { value } }: any) =>
devTools.update(INDEX_KEY, { filter: value });

View file

@ -1,10 +1,13 @@
import React, { useEffect, useRef, useState } from 'react';
import { LogLevel, ILog } from 'Player';
import BottomBlock from '../BottomBlock';
import { Tabs, Input, Icon, NoContent } from 'UI';
import { Tabs, Input, NoContent } from 'UI';
import cn from 'classnames';
import ConsoleRow from '../ConsoleRow';
import { IOSPlayerContext, MobilePlayerContext } from 'App/components/Session/playerContext';
import {
IOSPlayerContext,
MobilePlayerContext,
} from 'App/components/Session/playerContext';
import { observer } from 'mobx-react-lite';
import { VList, VListHandle } from 'virtua';
import { useStore } from 'App/mstore';
@ -12,7 +15,7 @@ import ErrorDetailsModal from 'App/components/Dashboard/components/Errors/ErrorD
import { useModal } from 'App/components/Modal';
import useAutoscroll, { getLastItemTime } from '../useAutoscroll';
import { useRegExListFilterMemo, useTabListFilterMemo } from '../useListFilter';
import {InfoCircleOutlined} from '@ant-design/icons'
import { InfoCircleOutlined, SearchOutlined } from '@ant-design/icons';
const ALL = 'ALL';
const INFO = 'INFO';
@ -27,7 +30,10 @@ const LEVEL_TAB = {
[LogLevel.EXCEPTION]: ERRORS,
} as const;
const TABS = [ALL, ERRORS, WARNINGS, INFO].map((tab) => ({ text: tab, key: tab }));
const TABS = [ALL, ERRORS, WARNINGS, INFO].map((tab) => ({
text: tab,
key: tab,
}));
function renderWithNL(s: string | null = '') {
if (typeof s !== 'string') return '';
@ -74,20 +80,23 @@ function MobileConsolePanel() {
const [isDetailsModalActive, setIsDetailsModalActive] = useState(false);
const { showModal } = useModal();
const { player, store } = React.useContext<IOSPlayerContext>(MobilePlayerContext);
const { player, store } =
React.useContext<IOSPlayerContext>(MobilePlayerContext);
const jump = (t: number) => player.jump(t);
const {
logList,
logListNow,
exceptionsListNow,
} = store.get();
const { logList, logListNow, exceptionsListNow } = store.get();
const list = logList as ILog[];
let filteredList = useRegExListFilterMemo(list, (l) => l.value, filter);
filteredList = useTabListFilterMemo(filteredList, (l) => LEVEL_TAB[l.level], ALL, activeTab);
filteredList = useTabListFilterMemo(
filteredList,
(l) => LEVEL_TAB[l.level],
ALL,
activeTab
);
const onTabClick = (activeTab: any) => devTools.update(INDEX_KEY, { activeTab });
const onTabClick = (activeTab: any) =>
devTools.update(INDEX_KEY, { activeTab });
const onFilterChange = ({ target: { value } }: any) =>
devTools.update(INDEX_KEY, { filter: value });
@ -137,7 +146,12 @@ function MobileConsolePanel() {
<BottomBlock.Header>
<div className="flex items-center">
<span className="font-semibold color-gray-medium mr-4">Console</span>
<Tabs tabs={TABS} active={activeTab} onClick={onTabClick} border={false} />
<Tabs
tabs={TABS}
active={activeTab}
onClick={onTabClick}
border={false}
/>
</div>
<Input
className="rounded-lg"
@ -145,8 +159,8 @@ function MobileConsolePanel() {
name="filter"
onChange={onFilterChange}
value={filter}
size='small'
prefix={<SearchOutlined className='text-neutral-400' />}
size="small"
prefix={<SearchOutlined className="text-neutral-400" />}
/>
</BottomBlock.Header>
<BottomBlock.Content className="overflow-y-auto">
@ -160,11 +174,7 @@ function MobileConsolePanel() {
size="small"
show={filteredList.length === 0}
>
<VList
ref={_list}
itemSize={25}
count={filteredList.length || 1}
>
<VList ref={_list} itemSize={25} count={filteredList.length || 1}>
{filteredList.map((log, index) => (
<ConsoleRow
key={log.time + index}
@ -173,6 +183,7 @@ function MobileConsolePanel() {
iconProps={getIconProps(log.level)}
renderWithNL={renderWithNL}
onClick={() => showDetails(log)}
showSingleTab
/>
))}
</VList>

View file

@ -12,7 +12,7 @@ interface Props {
renderWithNL?: any;
style?: any;
onClick?: () => void;
getTabNum: (tab: string) => number;
getTabNum?: (tab: string) => number;
showSingleTab: boolean;
}
function ConsoleRow(props: Props) {
@ -45,7 +45,7 @@ function ConsoleRow(props: Props) {
const titleLine = lines[0];
const restLines = lines.slice(1);
const logSource = props.showSingleTab ? -1 : props.getTabNum(log.tabId);
const logSource = props.showSingleTab ? -1 : props.getTabNum?.(log.tabId);
const logTabId = log.tabId
return (
<div

View file

@ -565,7 +565,7 @@ export const NetworkPanelComp = observer(
render: renderDuration,
},
];
if (!showSingleTab) {
if (!showSingleTab && !isSpot) {
cols.unshift({
label: 'Source',
width: 64,
@ -604,7 +604,7 @@ export const NetworkPanelComp = observer(
)}
</div>
<div className={'flex items-center gap-2'}>
<TabSelector />
{!isMobile && !isSpot ? <TabSelector /> : null}
<Input
className="rounded-lg"
placeholder="Filter by name, type, method or value"

View file

@ -2,9 +2,12 @@ import { Timed } from 'Player';
import React, { useEffect, useMemo, useState } from 'react';
import { observer } from 'mobx-react-lite';
import { Tabs, NoContent, Icon } from 'UI';
import {Input} from 'antd';
import {SearchOutlined, InfoCircleOutlined} from '@ant-design/icons';
import { PlayerContext, MobilePlayerContext } from 'App/components/Session/playerContext';
import { Input } from 'antd';
import { SearchOutlined, InfoCircleOutlined } from '@ant-design/icons';
import {
PlayerContext,
MobilePlayerContext,
} from 'App/components/Session/playerContext';
import BottomBlock from '../BottomBlock';
import { useModal } from 'App/components/Modal';
import { useStore } from 'App/mstore';
@ -12,7 +15,7 @@ import { typeList } from 'Types/session/stackEvent';
import StackEventRow from 'Shared/DevTools/StackEventRow';
import StackEventModal from '../StackEventModal';
import { Segmented, Tooltip } from 'antd'
import { Segmented, Tooltip } from 'antd';
import useAutoscroll, { getLastItemTime } from '../useAutoscroll';
import { useRegExListFilterMemo, useTabListFilterMemo } from '../useListFilter';
import { VList, VListHandle } from 'virtua';
@ -27,214 +30,247 @@ const ALL = 'ALL';
const TAB_KEYS = [ALL, ...typeList] as const;
const TABS = TAB_KEYS.map((tab) => ({ text: tab, key: tab }));
type EventsList = Array<Timed & { name: string; source: string; key: string; payload?: string[] }>;
type EventsList = Array<
Timed & { name: string; source: string; key: string; payload?: string[] }
>;
const WebStackEventPanelComp = observer(
() => {
const { uiPlayerStore } = useStore();
const zoomEnabled = uiPlayerStore.timelineZoom.enabled;
const zoomStartTs = uiPlayerStore.timelineZoom.startTs;
const zoomEndTs = uiPlayerStore.timelineZoom.endTs;
const { player, store } = React.useContext(PlayerContext);
const jump = (t: number) => player.jump(t);
const { currentTab, tabStates } = store.get();
const WebStackEventPanelComp = observer(() => {
const { uiPlayerStore } = useStore();
const zoomEnabled = uiPlayerStore.timelineZoom.enabled;
const zoomStartTs = uiPlayerStore.timelineZoom.startTs;
const zoomEndTs = uiPlayerStore.timelineZoom.endTs;
const { player, store } = React.useContext(PlayerContext);
const jump = (t: number) => player.jump(t);
const { currentTab, tabStates } = store.get();
const { stackList: list = [], stackListNow: listNow = [] } = tabStates[currentTab];
const { stackList: list = [], stackListNow: listNow = [] } =
tabStates[currentTab];
return (
<EventsPanel
list={list as EventsList}
listNow={listNow as EventsList}
jump={jump}
zoomEnabled={zoomEnabled}
zoomStartTs={zoomStartTs}
zoomEndTs={zoomEndTs}
/>
);
}
);
return (
<EventsPanel
list={list as EventsList}
listNow={listNow as EventsList}
jump={jump}
zoomEnabled={zoomEnabled}
zoomStartTs={zoomStartTs}
zoomEndTs={zoomEndTs}
/>
);
});
export const WebStackEventPanel = WebStackEventPanelComp;
const MobileStackEventPanelComp = observer(
() => {
const { uiPlayerStore } = useStore();
const zoomEnabled = uiPlayerStore.timelineZoom.enabled;
const zoomStartTs = uiPlayerStore.timelineZoom.startTs;
const zoomEndTs = uiPlayerStore.timelineZoom.endTs;
const { player, store } = React.useContext(MobilePlayerContext);
const jump = (t: number) => player.jump(t);
const { eventList: list = [], eventListNow: listNow = [] } = store.get();
const MobileStackEventPanelComp = observer(() => {
const { uiPlayerStore } = useStore();
const zoomEnabled = uiPlayerStore.timelineZoom.enabled;
const zoomStartTs = uiPlayerStore.timelineZoom.startTs;
const zoomEndTs = uiPlayerStore.timelineZoom.endTs;
const { player, store } = React.useContext(MobilePlayerContext);
const jump = (t: number) => player.jump(t);
const { eventList: list = [], eventListNow: listNow = [] } = store.get();
return (
<EventsPanel
list={list as EventsList}
listNow={listNow as EventsList}
jump={jump}
zoomEnabled={zoomEnabled}
zoomStartTs={zoomStartTs}
zoomEndTs={zoomEndTs}
/>
);
}
);
return (
<EventsPanel
list={list as EventsList}
listNow={listNow as EventsList}
jump={jump}
isMobile
zoomEnabled={zoomEnabled}
zoomStartTs={zoomStartTs}
zoomEndTs={zoomEndTs}
/>
);
});
export const MobileStackEventPanel = MobileStackEventPanelComp;
const EventsPanel = observer(({
list,
listNow,
jump,
zoomEnabled,
zoomStartTs,
zoomEndTs,
}: {
list: EventsList;
listNow: EventsList;
jump: (t: number) => void;
zoomEnabled: boolean;
zoomStartTs: number;
zoomEndTs: number;
}) => {
const {
sessionStore: { devTools },
} = useStore();
const { showModal } = useModal();
const [isDetailsModalActive, setIsDetailsModalActive] = useState(false); // TODO:embed that into useModal
const filter = devTools[INDEX_KEY].filter;
const activeTab = devTools[INDEX_KEY].activeTab;
const activeIndex = devTools[INDEX_KEY].index;
const EventsPanel = observer(
({
list,
listNow,
jump,
zoomEnabled,
zoomStartTs,
zoomEndTs,
isMobile,
}: {
list: EventsList;
listNow: EventsList;
jump: (t: number) => void;
zoomEnabled: boolean;
zoomStartTs: number;
zoomEndTs: number;
isMobile?: boolean;
}) => {
const {
sessionStore: { devTools },
} = useStore();
const { showModal } = useModal();
const [isDetailsModalActive, setIsDetailsModalActive] = useState(false); // TODO:embed that into useModal
const filter = devTools[INDEX_KEY].filter;
const activeTab = devTools[INDEX_KEY].activeTab;
const activeIndex = devTools[INDEX_KEY].index;
const inZoomRangeList = list.filter(({ time }) =>
zoomEnabled ? zoomStartTs <= time && time <= zoomEndTs : true
);
const inZoomRangeListNow = listNow.filter(({ time }) =>
zoomEnabled ? zoomStartTs <= time && time <= zoomEndTs : true
);
const inZoomRangeList = list.filter(({ time }) =>
zoomEnabled ? zoomStartTs <= time && time <= zoomEndTs : true
);
const inZoomRangeListNow = listNow.filter(({ time }) =>
zoomEnabled ? zoomStartTs <= time && time <= zoomEndTs : true
);
let filteredList = useRegExListFilterMemo(inZoomRangeList, (it) => {
const searchBy = [it.name]
if (it.payload) {
const payload = Array.isArray(it.payload) ? it.payload.join(',') : JSON.stringify(it.payload);
searchBy.push(payload);
}
return searchBy
}, filter);
filteredList = useTabListFilterMemo(filteredList, (it) => it.source, ALL, activeTab);
const onTabClick = (activeTab: (typeof TAB_KEYS)[number]) =>
devTools.update(INDEX_KEY, { activeTab });
const onFilterChange = ({ target: { value } }: React.ChangeEvent<HTMLInputElement>) => devTools.update(INDEX_KEY, { filter: value });
const tabs = useMemo(
() => TABS.filter(({ key }) => key === ALL || inZoomRangeList.some(({ source }) => key === source)),
[inZoomRangeList.length]
);
const [timeoutStartAutoscroll, stopAutoscroll] = useAutoscroll(
filteredList,
getLastItemTime(inZoomRangeListNow),
activeIndex,
(index) => devTools.update(INDEX_KEY, { index })
);
const onMouseEnter = stopAutoscroll;
const onMouseLeave = () => {
if (isDetailsModalActive) {
return;
}
timeoutStartAutoscroll();
};
const showDetails = (item: any) => {
setIsDetailsModalActive(true);
showModal(<StackEventModal event={item} />, {
right: true,
width: 500,
onClose: () => {
setIsDetailsModalActive(false);
timeoutStartAutoscroll();
let filteredList = useRegExListFilterMemo(
inZoomRangeList,
(it) => {
const searchBy = [it.name];
if (it.payload) {
const payload = Array.isArray(it.payload)
? it.payload.join(',')
: JSON.stringify(it.payload);
searchBy.push(payload);
}
return searchBy;
},
});
devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) });
stopAutoscroll();
};
filter
);
filteredList = useTabListFilterMemo(
filteredList,
(it) => it.source,
ALL,
activeTab
);
const _list = React.useRef<VListHandle>(null);
useEffect(() => {
if (_list.current) {
_list.current.scrollToIndex(activeIndex);
}
}, [activeIndex]);
const onTabClick = (activeTab: (typeof TAB_KEYS)[number]) =>
devTools.update(INDEX_KEY, { activeTab });
const onFilterChange = ({
target: { value },
}: React.ChangeEvent<HTMLInputElement>) =>
devTools.update(INDEX_KEY, { filter: value });
const tabs = useMemo(
() =>
TABS.filter(
({ key }) =>
key === ALL || inZoomRangeList.some(({ source }) => key === source)
),
[inZoomRangeList.length]
);
return (
<BottomBlock style={{ height: '100%' }} onMouseEnter={onMouseEnter} onMouseLeave={onMouseLeave}>
<BottomBlock.Header>
<div className="flex items-center">
<span className="font-semibold color-gray-medium mr-4">Stack Events</span>
<Tabs
renameTab={mapNames}
tabs={tabs}
active={activeTab}
onClick={onTabClick}
border={false}
/>
</div>
<div className={'flex items-center gap-2'}>
<Segmented
options={[
{ label: 'All Tabs', value: 'all', },
{ label: (
<Tooltip title="Stack Events overview is available only for all tabs combined.">
<span>Current Tab</span>
</Tooltip>),
value: 'current', disabled: true},
]}
defaultValue="all"
size="small"
className="rounded-full font-medium"
/>
<Input
className="rounded-lg"
placeholder="Filter by keyword"
name="filter"
height={28}
onChange={onFilterChange}
value={filter}
size='small'
prefix={<SearchOutlined className='text-neutral-400' />}
/>
</div>
</BottomBlock.Header>
<BottomBlock.Content className="overflow-y-auto">
<NoContent
title={
<div className="capitalize flex items-center mt-16 gap-2">
<InfoCircleOutlined size={18} />
No Data
</div>
}
size="small"
show={filteredList.length === 0}
>
<VList
ref={_list}
count={filteredList.length || 1}
>
{filteredList.map((item, index) => (
<StackEventRow
isActive={activeIndex === index}
key={item.key}
event={item}
onJump={() => {
stopAutoscroll();
devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) });
jump(item.time);
}}
onClick={() => showDetails(item)}
const [timeoutStartAutoscroll, stopAutoscroll] = useAutoscroll(
filteredList,
getLastItemTime(inZoomRangeListNow),
activeIndex,
(index) => devTools.update(INDEX_KEY, { index })
);
const onMouseEnter = stopAutoscroll;
const onMouseLeave = () => {
if (isDetailsModalActive) {
return;
}
timeoutStartAutoscroll();
};
const showDetails = (item: any) => {
setIsDetailsModalActive(true);
showModal(<StackEventModal event={item} />, {
right: true,
width: 500,
onClose: () => {
setIsDetailsModalActive(false);
timeoutStartAutoscroll();
},
});
devTools.update(INDEX_KEY, { index: filteredList.indexOf(item) });
stopAutoscroll();
};
const _list = React.useRef<VListHandle>(null);
useEffect(() => {
if (_list.current) {
_list.current.scrollToIndex(activeIndex);
}
}, [activeIndex]);
return (
<BottomBlock
style={{ height: '100%' }}
onMouseEnter={onMouseEnter}
onMouseLeave={onMouseLeave}
>
<BottomBlock.Header>
<div className="flex items-center">
<span className="font-semibold color-gray-medium mr-4">
Stack Events
</span>
<Tabs
renameTab={mapNames}
tabs={tabs}
active={activeTab}
onClick={onTabClick}
border={false}
/>
</div>
<div className={'flex items-center gap-2'}>
{isMobile ? null : (
<Segmented
options={[
{ label: 'All Tabs', value: 'all' },
{
label: (
<Tooltip title="Stack Events overview is available only for all tabs combined.">
<span>Current Tab</span>
</Tooltip>
),
value: 'current',
disabled: true,
},
]}
defaultValue="all"
size="small"
className="rounded-full font-medium"
/>
))}
</VList>
</NoContent>
</BottomBlock.Content>
</BottomBlock>
);
});
)}
<Input
className="rounded-lg"
placeholder="Filter by keyword"
name="filter"
height={28}
onChange={onFilterChange}
value={filter}
size="small"
prefix={<SearchOutlined className="text-neutral-400" />}
/>
</div>
</BottomBlock.Header>
<BottomBlock.Content className="overflow-y-auto">
<NoContent
title={
<div className="capitalize flex items-center mt-16 gap-2">
<InfoCircleOutlined size={18} />
No Data
</div>
}
size="small"
show={filteredList.length === 0}
>
<VList ref={_list} count={filteredList.length || 1}>
{filteredList.map((item, index) => (
<StackEventRow
isActive={activeIndex === index}
key={item.key}
event={item}
onJump={() => {
stopAutoscroll();
devTools.update(INDEX_KEY, {
index: filteredList.indexOf(item),
});
jump(item.time);
}}
onClick={() => showDetails(item)}
/>
))}
</VList>
</NoContent>
</BottomBlock.Content>
</BottomBlock>
);
}
);

View file

@ -3,12 +3,12 @@ import { Tooltip } from 'antd';
import { observer } from 'mobx-react-lite';
import { PlayerContext } from 'Components/Session/playerContext';
function TabTag({ logSource }: { logSource: number; logTabId: string }) {
function TabTag({ logSource, logTabId }: { logSource: number; logTabId: string }) {
const { store } = React.useContext(PlayerContext);
const { tabNames } = store.get();
return (
<Tooltip title={`${tabNames[logSource] ?? `Tab ${logSource}`}`} placement="left">
<Tooltip title={`${tabNames[logTabId] ?? `Tab ${logSource}`}`} placement="left">
<div
className={
'bg-gray-light rounded-full min-w-5 min-h-5 w-5 h-5 flex items-center justify-center text-xs cursor-default'

View file

@ -143,7 +143,7 @@ export default class IOSPlayer extends Player {
clean = () => {
super.clean();
this.screen.clean();
this.screen?.clean();
// @ts-ignore
this.screen = undefined;
this.messageLoader.clean();

View file

@ -91,7 +91,7 @@
"@babel/preset-typescript": "^7.23.2",
"@babel/runtime": "^7.23.2",
"@jest/globals": "^29.7.0",
"@openreplay/sourcemap-uploader": "^3.0.8",
"@openreplay/sourcemap-uploader": "^3.0.10",
"@trivago/prettier-plugin-sort-imports": "^4.3.0",
"@types/luxon": "^3.4.2",
"@types/node": "^22.7.8",
@ -116,6 +116,7 @@
"cypress": "^13.3.0",
"cypress-image-snapshot": "^4.0.1",
"dotenv": "^6.2.0",
"esbuild-loader": "^4.2.2",
"eslint": "^8.15.0",
"eslint-plugin-react": "^7.29.4",
"file-loader": "^6.2.0",

View file

@ -6,6 +6,8 @@ import CopyWebpackPlugin from 'copy-webpack-plugin';
import HtmlWebpackPlugin from "html-webpack-plugin";
import MiniCssExtractPlugin from 'mini-css-extract-plugin';
import CompressionPlugin from "compression-webpack-plugin";
import { EsbuildPlugin } from 'esbuild-loader';
const dotenv = require('dotenv').config({ path: __dirname + '/.env' })
const isDevelopment = process.env.NODE_ENV !== 'production'
const stylesHandler = MiniCssExtractPlugin.loader;
@ -28,23 +30,32 @@ const config: Configuration = {
splitChunks: {
chunks: 'all',
},
minimizer: [
new EsbuildPlugin({
target: 'es2020',
css: true
})
]
},
module: {
exprContextCritical: false,
rules: [
{
test: /\.(ts|js)x?$/i,
test: /\.tsx?$/i,
exclude: isDevelopment ? /node_modules/ : undefined,
use: ['thread-loader', {
loader: "babel-loader",
options: {
presets: [
"@babel/preset-env",
"@babel/preset-react",
"@babel/preset-typescript",
],
},
}],
loader: "esbuild-loader",
options: {
target: 'es2020',
},
},
{
test: /\.jsx?$/i,
exclude: isDevelopment ? /node_modules/ : undefined,
loader: "esbuild-loader",
options: {
loader: 'jsx',
target: 'es2020',
},
},
{
test: /\.s[ac]ss$/i,
@ -111,7 +122,11 @@ const config: Configuration = {
},
},
plugins: [
new CompressionPlugin(),
(isDevelopment ? false : new CompressionPlugin({
test: /\.(js|css|html|svg)$/,
algorithm: 'brotliCompress',
threshold: 10240,
})),
new webpack.DefinePlugin({
// 'process.env': ENV_VARIABLES,
'window.env': ENV_VARIABLES,
@ -131,6 +146,7 @@ const config: Configuration = {
performance: {
hints: false,
},
watchOptions: { ignored: "**/node_modules/**" },
devServer: {
// static: path.join(__dirname, "public"),
historyApiFallback: true,
@ -138,7 +154,6 @@ const config: Configuration = {
open: true,
port: 3333,
hot: true,
compress: true,
allowedHosts: "all",
client: {
overlay: {

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,195 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0';
CREATE DATABASE IF NOT EXISTS experimental;
CREATE TABLE IF NOT EXISTS experimental.autocomplete
(
project_id UInt16,
type LowCardinality(String),
value String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, type, value)
TTL _timestamp + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS experimental.events
(
session_id UInt64,
project_id UInt16,
event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'ERROR'=5,'CUSTOM'=6, 'GRAPHQL'=7, 'STATEACTION'=8, 'ISSUE'=9),
datetime DateTime,
label Nullable(String),
hesitation_time Nullable(UInt32),
name Nullable(String),
payload Nullable(String),
level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null),
source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)),
message Nullable(String),
error_id Nullable(String),
duration Nullable(UInt16),
context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)),
url Nullable(String),
url_host Nullable(String) MATERIALIZED lower(domain(url)),
url_path Nullable(String),
url_hostpath Nullable(String) MATERIALIZED concat(url_host, url_path),
request_start Nullable(UInt16),
response_start Nullable(UInt16),
response_end Nullable(UInt16),
dom_content_loaded_event_start Nullable(UInt16),
dom_content_loaded_event_end Nullable(UInt16),
load_event_start Nullable(UInt16),
load_event_end Nullable(UInt16),
first_paint Nullable(UInt16),
first_contentful_paint_time Nullable(UInt16),
speed_index Nullable(UInt16),
visually_complete Nullable(UInt16),
time_to_interactive Nullable(UInt16),
ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start),
minus(response_start, request_start), Null),
ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start),
minus(response_end, request_start), Null),
response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start),
minus(response_end, response_start), Null),
dom_building_time Nullable(UInt16) MATERIALIZED if(
greaterOrEquals(dom_content_loaded_event_start, response_end),
minus(dom_content_loaded_event_start, response_end), Null),
dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if(
greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start),
minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null),
load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start),
minus(load_event_end, load_event_start), Null),
min_fps Nullable(UInt8),
avg_fps Nullable(UInt8),
max_fps Nullable(UInt8),
min_cpu Nullable(UInt8),
avg_cpu Nullable(UInt8),
max_cpu Nullable(UInt8),
min_total_js_heap_size Nullable(UInt64),
avg_total_js_heap_size Nullable(UInt64),
max_total_js_heap_size Nullable(UInt64),
min_used_js_heap_size Nullable(UInt64),
avg_used_js_heap_size Nullable(UInt64),
max_used_js_heap_size Nullable(UInt64),
method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)),
status Nullable(UInt16),
success Nullable(UInt8),
request_body Nullable(String),
response_body Nullable(String),
issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21)),
issue_id Nullable(String),
error_tags_keys Array(String),
error_tags_values Array(Nullable(String)),
transfer_size Nullable(UInt32),
selector Nullable(String),
normalized_x Nullable(Float32),
normalized_y Nullable(Float32),
message_id UInt64 DEFAULT 0,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS experimental.sessions
(
session_id UInt64,
project_id UInt16,
tracker_version LowCardinality(String),
rev_id LowCardinality(Nullable(String)),
user_uuid UUID,
user_os LowCardinality(String),
user_os_version LowCardinality(Nullable(String)),
user_browser LowCardinality(String),
user_browser_version LowCardinality(Nullable(String)),
user_device Nullable(String),
user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2,'tablet'=3),
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126),
user_city LowCardinality(String),
user_state LowCardinality(String),
platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web',
datetime DateTime,
timezone LowCardinality(Nullable(String)),
duration UInt32,
pages_count UInt16,
events_count UInt16,
errors_count UInt16,
utm_source Nullable(String),
utm_medium Nullable(String),
utm_campaign Nullable(String),
user_id Nullable(String),
user_anonymous_id Nullable(String),
issue_types Array(LowCardinality(String)),
referrer Nullable(String),
base_referrer Nullable(String) MATERIALIZED lower(concat(domain(referrer), path(referrer))),
issue_score Nullable(UInt32),
screen_width Nullable(Int16),
screen_height Nullable(Int16),
metadata_1 Nullable(String),
metadata_2 Nullable(String),
metadata_3 Nullable(String),
metadata_4 Nullable(String),
metadata_5 Nullable(String),
metadata_6 Nullable(String),
metadata_7 Nullable(String),
metadata_8 Nullable(String),
metadata_9 Nullable(String),
metadata_10 Nullable(String),
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 1 MONTH
SETTINGS index_granularity = 512;
CREATE TABLE IF NOT EXISTS experimental.issues
(
project_id UInt16,
issue_id String,
type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21),
context_string String,
context_keys Array(String),
context_values Array(Nullable(String)),
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, issue_id, type)
TTL _timestamp + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS experimental.ios_events
(
session_id UInt64,
project_id UInt16,
event_type Enum8('TAP'=0, 'INPUT'=1, 'SWIPE'=2, 'VIEW'=3,'REQUEST'=4,'CRASH'=5,'CUSTOM'=6, 'STATEACTION'=8, 'ISSUE'=9),
datetime DateTime,
label Nullable(String),
name Nullable(String),
payload Nullable(String),
level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null),
context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)),
url Nullable(String),
url_host Nullable(String) MATERIALIZED lower(domain(url)),
url_path Nullable(String),
url_hostpath Nullable(String) MATERIALIZED concat(url_host, url_path),
request_start Nullable(UInt16),
response_start Nullable(UInt16),
response_end Nullable(UInt16),
method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)),
status Nullable(UInt16),
duration Nullable(UInt16),
success Nullable(UInt8),
request_body Nullable(String),
response_body Nullable(String),
issue_type Nullable(Enum8('tap_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21)),
issue_id Nullable(String),
transfer_size Nullable(UInt32),
direction Nullable(String),
reason Nullable(String),
stacktrace Nullable(String),
message_id UInt64 DEFAULT 0,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -0,0 +1,195 @@
CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.22.0';
CREATE DATABASE IF NOT EXISTS experimental;
CREATE TABLE IF NOT EXISTS experimental.autocomplete
(
project_id UInt16,
type LowCardinality(String),
value String,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, type, value)
TTL _timestamp + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS experimental.events
(
session_id UInt64,
project_id UInt16,
event_type Enum8('CLICK'=0, 'INPUT'=1, 'LOCATION'=2,'REQUEST'=3,'PERFORMANCE'=4,'ERROR'=5,'CUSTOM'=6, 'GRAPHQL'=7, 'STATEACTION'=8, 'ISSUE'=9),
datetime DateTime,
label Nullable(String),
hesitation_time Nullable(UInt32),
name Nullable(String),
payload Nullable(String),
level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null),
source Nullable(Enum8('js_exception'=0, 'bugsnag'=1, 'cloudwatch'=2, 'datadog'=3, 'elasticsearch'=4, 'newrelic'=5, 'rollbar'=6, 'sentry'=7, 'stackdriver'=8, 'sumologic'=9)),
message Nullable(String),
error_id Nullable(String),
duration Nullable(UInt16),
context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)),
url Nullable(String),
url_host Nullable(String) MATERIALIZED lower(domain(url)),
url_path Nullable(String),
url_hostpath Nullable(String) MATERIALIZED concat(url_host, url_path),
request_start Nullable(UInt16),
response_start Nullable(UInt16),
response_end Nullable(UInt16),
dom_content_loaded_event_start Nullable(UInt16),
dom_content_loaded_event_end Nullable(UInt16),
load_event_start Nullable(UInt16),
load_event_end Nullable(UInt16),
first_paint Nullable(UInt16),
first_contentful_paint_time Nullable(UInt16),
speed_index Nullable(UInt16),
visually_complete Nullable(UInt16),
time_to_interactive Nullable(UInt16),
ttfb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_start, request_start),
minus(response_start, request_start), Null),
ttlb Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, request_start),
minus(response_end, request_start), Null),
response_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(response_end, response_start),
minus(response_end, response_start), Null),
dom_building_time Nullable(UInt16) MATERIALIZED if(
greaterOrEquals(dom_content_loaded_event_start, response_end),
minus(dom_content_loaded_event_start, response_end), Null),
dom_content_loaded_event_time Nullable(UInt16) MATERIALIZED if(
greaterOrEquals(dom_content_loaded_event_end, dom_content_loaded_event_start),
minus(dom_content_loaded_event_end, dom_content_loaded_event_start), Null),
load_event_time Nullable(UInt16) MATERIALIZED if(greaterOrEquals(load_event_end, load_event_start),
minus(load_event_end, load_event_start), Null),
min_fps Nullable(UInt8),
avg_fps Nullable(UInt8),
max_fps Nullable(UInt8),
min_cpu Nullable(UInt8),
avg_cpu Nullable(UInt8),
max_cpu Nullable(UInt8),
min_total_js_heap_size Nullable(UInt64),
avg_total_js_heap_size Nullable(UInt64),
max_total_js_heap_size Nullable(UInt64),
min_used_js_heap_size Nullable(UInt64),
avg_used_js_heap_size Nullable(UInt64),
max_used_js_heap_size Nullable(UInt64),
method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)),
status Nullable(UInt16),
success Nullable(UInt8),
request_body Nullable(String),
response_body Nullable(String),
issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21)),
issue_id Nullable(String),
error_tags_keys Array(String),
error_tags_values Array(Nullable(String)),
transfer_size Nullable(UInt32),
selector Nullable(String),
normalized_x Nullable(Float32),
normalized_y Nullable(Float32),
message_id UInt64 DEFAULT 0,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS experimental.sessions
(
session_id UInt64,
project_id UInt16,
tracker_version LowCardinality(String),
rev_id LowCardinality(Nullable(String)),
user_uuid UUID,
user_os LowCardinality(String),
user_os_version LowCardinality(Nullable(String)),
user_browser LowCardinality(String),
user_browser_version LowCardinality(Nullable(String)),
user_device Nullable(String),
user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2,'tablet'=3),
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122,'BU'=123, 'VD'=124, 'YD'=125, 'DD'=126),
user_city LowCardinality(String),
user_state LowCardinality(String),
platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web',
datetime DateTime,
timezone LowCardinality(Nullable(String)),
duration UInt32,
pages_count UInt16,
events_count UInt16,
errors_count UInt16,
utm_source Nullable(String),
utm_medium Nullable(String),
utm_campaign Nullable(String),
user_id Nullable(String),
user_anonymous_id Nullable(String),
issue_types Array(LowCardinality(String)),
referrer Nullable(String),
base_referrer Nullable(String) MATERIALIZED lower(concat(domain(referrer), path(referrer))),
issue_score Nullable(UInt32),
screen_width Nullable(Int16),
screen_height Nullable(Int16),
metadata_1 Nullable(String),
metadata_2 Nullable(String),
metadata_3 Nullable(String),
metadata_4 Nullable(String),
metadata_5 Nullable(String),
metadata_6 Nullable(String),
metadata_7 Nullable(String),
metadata_8 Nullable(String),
metadata_9 Nullable(String),
metadata_10 Nullable(String),
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMMDD(datetime)
ORDER BY (project_id, datetime, session_id)
TTL datetime + INTERVAL 1 MONTH
SETTINGS index_granularity = 512;
CREATE TABLE IF NOT EXISTS experimental.issues
(
project_id UInt16,
issue_id String,
type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21),
context_string String,
context_keys Array(String),
context_values Array(Nullable(String)),
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(_timestamp)
ORDER BY (project_id, issue_id, type)
TTL _timestamp + INTERVAL 1 MONTH;
CREATE TABLE IF NOT EXISTS experimental.ios_events
(
session_id UInt64,
project_id UInt16,
event_type Enum8('TAP'=0, 'INPUT'=1, 'SWIPE'=2, 'VIEW'=3,'REQUEST'=4,'CRASH'=5,'CUSTOM'=6, 'STATEACTION'=8, 'ISSUE'=9),
datetime DateTime,
label Nullable(String),
name Nullable(String),
payload Nullable(String),
level Nullable(Enum8('info'=0, 'error'=1)) DEFAULT if(event_type == 'CUSTOM', 'info', null),
context Nullable(Enum8('unknown'=0, 'self'=1, 'same-origin-ancestor'=2, 'same-origin-descendant'=3, 'same-origin'=4, 'cross-origin-ancestor'=5, 'cross-origin-descendant'=6, 'cross-origin-unreachable'=7, 'multiple-contexts'=8)),
url Nullable(String),
url_host Nullable(String) MATERIALIZED lower(domain(url)),
url_path Nullable(String),
url_hostpath Nullable(String) MATERIALIZED concat(url_host, url_path),
request_start Nullable(UInt16),
response_start Nullable(UInt16),
response_end Nullable(UInt16),
method Nullable(Enum8('GET' = 0, 'HEAD' = 1, 'POST' = 2, 'PUT' = 3, 'DELETE' = 4, 'CONNECT' = 5, 'OPTIONS' = 6, 'TRACE' = 7, 'PATCH' = 8)),
status Nullable(UInt16),
duration Nullable(UInt16),
success Nullable(UInt8),
request_body Nullable(String),
response_body Nullable(String),
issue_type Nullable(Enum8('tap_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20,'app_crash'=21)),
issue_id Nullable(String),
transfer_size Nullable(UInt32),
direction Nullable(String),
reason Nullable(String),
stacktrace Nullable(String),
message_id UInt64 DEFAULT 0,
_timestamp DateTime DEFAULT now()
) ENGINE = ReplacingMergeTree(_timestamp)
PARTITION BY toYYYYMM(datetime)
ORDER BY (project_id, datetime, event_type, session_id, message_id)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -0,0 +1,2 @@
DROP DATABASE IF EXISTS experimental;
DROP FUNCTION IF EXISTS openreplay_version();

View file

@ -91,7 +91,7 @@ dependencies {
//noinspection GradleDynamicVersion
implementation("com.facebook.react:react-native:0.20.1")
implementation("org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version")
implementation("com.github.openreplay:android-tracker:v1.1.2")
implementation("com.github.openreplay:android-tracker:v1.1.3")
}
//allprojects {

View file

@ -10,18 +10,10 @@ import com.openreplay.tracker.models.OROptions
class ReactNativeModule(reactContext: ReactApplicationContext) :
ReactContextBaseJavaModule(reactContext) {
// private val context = reactContext.acti
override fun getName(): String {
return NAME
}
// Example method
// See https://reactnative.dev/docs/native-modules-android
@ReactMethod
fun multiply(a: Double, b: Double, promise: Promise) {
promise.resolve(a * b * 2)
}
companion object {
const val NAME = "ORTrackerConnector"
}
@ -33,14 +25,13 @@ class ReactNativeModule(reactContext: ReactApplicationContext) :
val logs: Boolean = true,
val screen: Boolean = true,
val debugLogs: Boolean = false,
val wifiOnly: Boolean = true // assuming you want this as well
val wifiOnly: Boolean = true
)
private fun getBooleanOrDefault(map: ReadableMap, key: String, default: Boolean): Boolean {
return if (map.hasKey(key)) map.getBoolean(key) else default
}
// optionsMap: ReadableMap?,
@ReactMethod
fun startSession(
projectKey: String,
@ -97,8 +88,8 @@ class ReactNativeModule(reactContext: ReactApplicationContext) :
@ReactMethod
fun getSessionID(promise: Promise) {
try {
val sessionId = OpenReplay.getSessionID() ?: ""
promise.resolve(sessionId) // Resolve the promise with the session ID
val sessionId = OpenReplay.getSessionID()
promise.resolve(sessionId)
} catch (e: Exception) {
promise.reject("GET_SESSION_ID_ERROR", "Failed to retrieve session ID", e)
}
@ -111,8 +102,9 @@ class ReactNativeModule(reactContext: ReactApplicationContext) :
requestJSON: String,
responseJSON: String,
status: Int,
duration: ULong
duration: Double
) {
OpenReplay.networkRequest(url, method, requestJSON, responseJSON, status, duration)
val durationULong = duration.toLong().toULong()
OpenReplay.networkRequest(url, method, requestJSON, responseJSON, status, durationULong)
}
}

View file

@ -1,13 +1,13 @@
package com.openreplay.reactnative
import android.annotation.SuppressLint
import android.content.Context
import android.graphics.PointF
import android.util.Log
import android.view.GestureDetector
import android.view.MotionEvent
import android.view.View
import android.view.ViewGroup
import android.widget.FrameLayout
import android.widget.Toast
import com.facebook.react.uimanager.SimpleViewManager
import com.facebook.react.uimanager.ThemedReactContext
import com.facebook.react.uimanager.ViewGroupManager
import com.openreplay.tracker.listeners.Analytics
@ -15,151 +15,16 @@ import com.openreplay.tracker.listeners.SwipeDirection
import kotlin.math.abs
import kotlin.math.sqrt
import android.os.Handler
import android.os.Looper
import android.util.Log
import android.view.GestureDetector
import com.facebook.react.ReactRootView
//class RnTrackerTouchManager : ViewGroupManager<TouchableFrameLayout>() {
// override fun getName(): String = "RnTrackerTouchView"
//
// override fun createViewInstance(reactContext: ThemedReactContext): TouchableFrameLayout {
// return TouchableFrameLayout(reactContext)
// }
//}
//
//class TouchableFrameLayout(context: Context) : FrameLayout(context) {
// private var gestureDetector: GestureDetector
// private var handler = Handler(Looper.getMainLooper())
// private var isScrolling = false
// private var lastX: Float = 0f
// private var lastY: Float = 0f
// private var swipeDirection: SwipeDirection = SwipeDirection.UNDEFINED
//
// init {
// gestureDetector = GestureDetector(context, object : GestureDetector.SimpleOnGestureListener() {
// override fun onSingleTapUp(e: MotionEvent): Boolean {
// Analytics.sendClick(e)
// return true
// }
//
// override fun onDown(e: MotionEvent): Boolean = true
//
// override fun onScroll(e1: MotionEvent?, e2: MotionEvent, distanceX: Float, distanceY: Float): Boolean {
// if (!isScrolling) {
// isScrolling = true
// }
//
// swipeDirection = SwipeDirection.fromDistances(distanceX, distanceY)
// lastX = e2.x
// lastY = e2.y
//
// handler.removeCallbacksAndMessages(null)
// handler.postDelayed({
// if (isScrolling) {
// isScrolling = false
// Analytics.sendSwipe(swipeDirection, lastX, lastY)
// }
// }, 200)
// return true
// }
// })
//
// setOnTouchListener { _, event ->
// Log.d("TouchEvent", "Event: ${event.actionMasked}, X: ${event.x}, Y: ${event.y}")
// gestureDetector.onTouchEvent(event)
// this.performClick()
// }
// }
//}
class RnTrackerTouchManager : ViewGroupManager<FrameLayout>() {
override fun getName(): String = "RnTrackerTouchView"
override fun createViewInstance(reactContext: ThemedReactContext): FrameLayout {
return ReactRootView(reactContext).apply {
// layoutParams = FrameLayout.LayoutParams(
// FrameLayout.LayoutParams.MATCH_PARENT,
// FrameLayout.LayoutParams.MATCH_PARENT
// )
// isClickable = true
// val touchStart = PointF()
// setOnTouchListener { view, event ->
// when (event.action) {
// MotionEvent.ACTION_DOWN -> {
// touchStart.set(event.x, event.y)
// true
// }
//
// MotionEvent.ACTION_UP -> {
// val deltaX = event.x - touchStart.x
// val deltaY = event.y - touchStart.y
// val distance = sqrt(deltaX * deltaX + deltaY * deltaY)
//
// if (distance > 10) {
// val direction = if (abs(deltaX) > abs(deltaY)) {
// if (deltaX > 0) "RIGHT" else "LEFT"
// } else {
// if (deltaY > 0) "DOWN" else "UP"
// }
// Analytics.sendSwipe(SwipeDirection.valueOf(direction), event.x, event.y)
// } else {
// Analytics.sendClick(event)
// view.performClick() // Perform click for accessibility
// }
// true
// }
//
// else -> false
// }
// }
}
return RnTrackerRootLayout(reactContext)
}
override fun addView(parent: FrameLayout, child: View, index: Int) {
child.isClickable = true
child.isFocusable = true
// child.layoutParams = FrameLayout.LayoutParams(
// FrameLayout.LayoutParams.MATCH_PARENT,
// FrameLayout.LayoutParams.MATCH_PARENT
// )
val touchStart = PointF()
child.setOnTouchListener(
View.OnTouchListener { view, event ->
when (event.action) {
MotionEvent.ACTION_DOWN -> {
view.performClick()
Analytics.sendClick(event)
true
}
MotionEvent.ACTION_UP -> {
val deltaX = event.x - touchStart.x
val deltaY = event.y - touchStart.y
val distance = sqrt(deltaX * deltaX + deltaY * deltaY)
if (distance > 10) {
val direction = if (abs(deltaX) > abs(deltaY)) {
if (deltaX > 0) "RIGHT" else "LEFT"
} else {
if (deltaY > 0) "DOWN" else "UP"
}
Analytics.sendSwipe(SwipeDirection.valueOf(direction), event.x, event.y)
} else {
Analytics.sendClick(event)
view.performClick() // Perform click for accessibility
}
true
}
else -> false
}
}
)
parent.addView(child)
parent.addView(child, index)
}
override fun getChildCount(parent: FrameLayout): Int = parent.childCount
@ -175,63 +40,102 @@ class RnTrackerTouchManager : ViewGroupManager<FrameLayout>() {
}
}
//class RnTrackerTouchManager : ViewGroupManager<FrameLayout>() {
// override fun getName(): String = "RnTrackerTouchView"
//
// override fun createViewInstance(reactContext: ThemedReactContext): FrameLayout {
// return FrameLayout(reactContext).apply {
// layoutParams = FrameLayout.LayoutParams(
// FrameLayout.LayoutParams.MATCH_PARENT,
// FrameLayout.LayoutParams.MATCH_PARENT
// )
// isClickable = true
// val touchStart = PointF()
// setOnTouchListener { view, event ->
// when (event.action) {
// MotionEvent.ACTION_DOWN -> {
// touchStart.set(event.x, event.y)
// view.performClick()
// }
//
// MotionEvent.ACTION_UP -> {
// val deltaX = event.x - touchStart.x
// val deltaY = event.y - touchStart.y
// val distance = sqrt(deltaX * deltaX + deltaY * deltaY)
//
// if (distance > 10) {
// val direction = if (abs(deltaX) > abs(deltaY)) {
// if (deltaX > 0) "RIGHT" else "LEFT"
// } else {
// if (deltaY > 0) "DOWN" else "UP"
// }
// Analytics.sendSwipe(SwipeDirection.valueOf(direction), event.x, event.y)
// view.performClick()
// } else {
// Analytics.sendClick(event)
// view.performClick()
// }
// true
// }
//
// else -> false
// }
// }
// }
// }
//
// override fun addView(parent: FrameLayout, child: View, index: Int) {
// parent.addView(child, index)
// }
//
// override fun getChildCount(parent: FrameLayout): Int = parent.childCount
//
// override fun getChildAt(parent: FrameLayout, index: Int): View = parent.getChildAt(index)
//
// override fun removeViewAt(parent: FrameLayout, index: Int) {
// parent.removeViewAt(index)
// }
//
// override fun removeAllViews(parent: FrameLayout) {
// parent.removeAllViews()
// }
//}
class RnTrackerRootLayout(context: Context) : FrameLayout(context) {
private val touchStart = PointF()
private val gestureDetector: GestureDetector
private var currentTappedView: View? = null
// Variables to track total movement
private var totalDeltaX: Float = 0f
private var totalDeltaY: Float = 0f
init {
gestureDetector = GestureDetector(context, GestureListener())
}
override fun dispatchTouchEvent(ev: MotionEvent): Boolean {
// Pass all touch events to the GestureDetector
gestureDetector.onTouchEvent(ev)
when (ev.action) {
MotionEvent.ACTION_DOWN -> {
// Record the starting point for potential swipe
touchStart.x = ev.x
touchStart.y = ev.y
// Reset total movement
totalDeltaX = 0f
totalDeltaY = 0f
// Find and store the view that was touched
currentTappedView = findViewAt(this, ev.x.toInt(), ev.y.toInt())
// Log.d(
// "RnTrackerRootLayout",
// "ACTION_DOWN at global: (${ev.rawX}, ${ev.rawY}) on view: $currentTappedView"
// )
}
MotionEvent.ACTION_MOVE -> {
// Accumulate movement
val deltaX = ev.x - touchStart.x
val deltaY = ev.y - touchStart.y
totalDeltaX += deltaX
totalDeltaY += deltaY
// Update touchStart for the next move event
touchStart.x = ev.x
touchStart.y = ev.y
// Log.d("RnTrackerRootLayout", "Accumulated movement - X: $totalDeltaX, Y: $totalDeltaY")
}
MotionEvent.ACTION_UP, MotionEvent.ACTION_CANCEL -> {
// Determine if the accumulated movement qualifies as a swipe
val distance = sqrt(totalDeltaX * totalDeltaX + totalDeltaY * totalDeltaY)
if (distance > SWIPE_DISTANCE_THRESHOLD) {
val direction = if (abs(totalDeltaX) > abs(totalDeltaY)) {
if (totalDeltaX > 0) "RIGHT" else "LEFT"
} else {
if (totalDeltaY > 0) "DOWN" else "UP"
}
Log.d("RnTrackerRootLayout", "Swipe detected: $direction")
Analytics.sendSwipe(SwipeDirection.valueOf(direction), ev.rawX, ev.rawY)
}
}
}
// Ensure normal event propagation
return super.dispatchTouchEvent(ev)
}
companion object {
private const val SWIPE_DISTANCE_THRESHOLD = 100f // Adjust as needed
}
private fun findViewAt(parent: ViewGroup, x: Int, y: Int): View? {
for (i in parent.childCount - 1 downTo 0) {
val child = parent.getChildAt(i)
if (isPointInsideView(x, y, child)) {
if (child is ViewGroup) {
val childX = x - child.left
val childY = y - child.top
val result = findViewAt(child, childX, childY)
return result ?: child
} else {
return child
}
}
}
return null
}
private fun isPointInsideView(x: Int, y: Int, view: View): Boolean {
return x >= view.left && x <= view.right && y >= view.top && y <= view.bottom
}
inner class GestureListener : GestureDetector.SimpleOnGestureListener() {
override fun onSingleTapUp(e: MotionEvent): Boolean {
Log.d("GestureListener", "Single tap detected at: (${e.rawX}, ${e.rawY})")
val label = currentTappedView?.contentDescription?.toString() ?: "Button"
Analytics.sendClick(e, label)
currentTappedView?.performClick()
return super.onSingleTapUp(e)
}
}
}

View file

@ -0,0 +1,22 @@
const { withMainApplication } = require('@expo/config-plugins');
function addPackageToMainApplication(src) {
console.log('Adding OpenReplay package to MainApplication.java', src);
// Insert `packages.add(new ReactNativePackage());` before return packages;
if (src.includes('packages.add(new ReactNativePackage())')) {
return src;
}
return src.replace(
'return packages;',
`packages.add(new com.openreplay.reactnative.ReactNativePackage());\n return packages;`
);
}
module.exports = function configPlugin(config) {
return withMainApplication(config, (config) => {
if (config.modResults.contents) {
config.modResults.contents = addPackageToMainApplication(config.modResults.contents);
}
return config;
});
};

View file

@ -1,6 +1,6 @@
{
"name": "@openreplay/react-native",
"version": "0.6.6",
"version": "0.6.10",
"description": "Openreplay React-native connector for iOS applications",
"main": "lib/commonjs/index",
"module": "lib/module/index",
@ -13,6 +13,7 @@
"android",
"ios",
"cpp",
"app.plugin.js",
"*.podspec",
"!ios/build",
"!android/build",
@ -71,7 +72,8 @@
"react-native-builder-bob": "^0.20.0",
"release-it": "^15.0.0",
"turbo": "^1.10.7",
"typescript": "^5.2.2"
"typescript": "^5.2.2",
"@expo/config-plugins": "^9.0.12"
},
"resolutions": {
"@types/react": "^18.2.44"