From 31ba4176aaf2bc9a61764b4d7be2e2afae43b778 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 23 Dec 2024 10:48:18 +0100 Subject: [PATCH 1/9] change(react-native): android version jump to use v1.1.4 --- tracker/tracker-reactnative/android/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tracker/tracker-reactnative/android/build.gradle b/tracker/tracker-reactnative/android/build.gradle index 2890718fd..b188ae009 100644 --- a/tracker/tracker-reactnative/android/build.gradle +++ b/tracker/tracker-reactnative/android/build.gradle @@ -91,7 +91,7 @@ dependencies { //noinspection GradleDynamicVersion implementation("com.facebook.react:react-native:0.20.1") implementation("org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version") - implementation("com.github.openreplay:android-tracker:v1.1.3") + implementation("com.github.openreplay:android-tracker:v1.1.4") } //allprojects { From 0658e3b3d9968f8f8f2a0418625ac4bd14413a6b Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 23 Dec 2024 10:49:08 +0100 Subject: [PATCH 2/9] tracker: update ios pkg ver --- tracker/tracker-reactnative/example/ios/Podfile.lock | 4 ++-- tracker/tracker-reactnative/openreplay-react-native.podspec | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tracker/tracker-reactnative/example/ios/Podfile.lock b/tracker/tracker-reactnative/example/ios/Podfile.lock index b846d42d7..3b00731e3 100644 --- a/tracker/tracker-reactnative/example/ios/Podfile.lock +++ b/tracker/tracker-reactnative/example/ios/Podfile.lock @@ -9,14 +9,14 @@ PODS: - hermes-engine (0.74.0): - hermes-engine/Pre-built (= 0.74.0) - hermes-engine/Pre-built (0.74.0) - - Openreplay (1.0.14): + - Openreplay (1.0.15): - DeviceKit - SWCompression - openreplay-react-native (0.6.6): - DoubleConversion - glog - hermes-engine - - Openreplay (= 1.0.14) + - Openreplay (= 1.0.15) - RCT-Folly (= 2024.01.01.00) - RCTRequired - RCTTypeSafety diff --git a/tracker/tracker-reactnative/openreplay-react-native.podspec b/tracker/tracker-reactnative/openreplay-react-native.podspec index ddb04f545..dc353690a 100644 --- a/tracker/tracker-reactnative/openreplay-react-native.podspec +++ b/tracker/tracker-reactnative/openreplay-react-native.podspec @@ -19,11 +19,11 @@ Pod::Spec.new do |s| # Use install_modules_dependencies helper to install the dependencies if React Native version >=0.71.0. # See https://github.com/facebook/react-native/blob/febf6b7f33fdb4904669f99d795eba4c0f95d7bf/scripts/cocoapods/new_architecture.rb#L79. if respond_to?(:install_modules_dependencies, true) - s.dependency "Openreplay", '1.0.14' + s.dependency "Openreplay", '1.0.15' install_modules_dependencies(s) else s.dependency "React-Core" - s.dependency "Openreplay", '1.0.14' + s.dependency "Openreplay", '1.0.15' # Don't install the dependencies when we run `pod install` in the old architecture. if ENV['RCT_NEW_ARCH_ENABLED'] == '1' then @@ -38,7 +38,7 @@ Pod::Spec.new do |s| s.dependency "RCTRequired" s.dependency "RCTTypeSafety" s.dependency "ReactCommon/turbomodule/core" - s.dependency "Openreplay", '1.0.14' + s.dependency "Openreplay", '1.0.15' end end end From 4395c9ee46db62ebc67d982908dc6a9170874dad Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 23 Dec 2024 10:54:06 +0100 Subject: [PATCH 3/9] tracker: react native 0.6.11 (background, ios crash, screen size) --- tracker/tracker-reactnative/example/ios/Podfile.lock | 6 +++--- tracker/tracker-reactnative/package.json | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tracker/tracker-reactnative/example/ios/Podfile.lock b/tracker/tracker-reactnative/example/ios/Podfile.lock index 3b00731e3..ea9b6ffbe 100644 --- a/tracker/tracker-reactnative/example/ios/Podfile.lock +++ b/tracker/tracker-reactnative/example/ios/Podfile.lock @@ -12,7 +12,7 @@ PODS: - Openreplay (1.0.15): - DeviceKit - SWCompression - - openreplay-react-native (0.6.6): + - openreplay-react-native (0.6.11): - DoubleConversion - glog - hermes-engine @@ -1423,8 +1423,8 @@ SPEC CHECKSUMS: fmt: 4c2741a687cc09f0634a2e2c72a838b99f1ff120 glog: c5d68082e772fa1c511173d6b30a9de2c05a69a2 hermes-engine: 6eae7edb2f563ee41d7c1f91f4f2e57c26d8a5c3 - Openreplay: e0b8de0203bbbd8fc161afab77ac370e2cef3d12 - openreplay-react-native: 3382fc4f597052f10ec79c37e872fc83890c1ce4 + Openreplay: ae72a7ca1a05d7da026b7ee8f4f84dcdfaa44021 + openreplay-react-native: 0cfa1842c5b2457f6e9d5fa564f746192317b20c RCT-Folly: 045d6ecaa59d826c5736dfba0b2f4083ff8d79df RCTDeprecation: 3ca8b6c36bfb302e1895b72cfe7db0de0c92cd47 RCTRequired: 9fc183af555fd0c89a366c34c1ae70b7e03b1dc5 diff --git a/tracker/tracker-reactnative/package.json b/tracker/tracker-reactnative/package.json index f8de5a324..b177b0070 100644 --- a/tracker/tracker-reactnative/package.json +++ b/tracker/tracker-reactnative/package.json @@ -1,6 +1,6 @@ { "name": "@openreplay/react-native", - "version": "0.6.10", + "version": "0.6.11", "description": "Openreplay React-native connector for iOS applications", "main": "lib/commonjs/index", "module": "lib/module/index", From 5fe204020f875ef446f0be45b4d55bee80cf8064 Mon Sep 17 00:00:00 2001 From: Kraiem Taha Yassine Date: Mon, 23 Dec 2024 11:54:28 +0100 Subject: [PATCH 4/9] Dev (#2903) * feat(chalice): autocomplete return top 10 with stats * fix(chalice): fixed autocomplete top 10 meta-filters * fix(chalice): fixed restricted sessions search --- api/chalicelib/core/metrics/custom_metrics.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/metrics/custom_metrics.py b/api/chalicelib/core/metrics/custom_metrics.py index bbba7b1c7..c6bf82b61 100644 --- a/api/chalicelib/core/metrics/custom_metrics.py +++ b/api/chalicelib/core/metrics/custom_metrics.py @@ -7,7 +7,7 @@ import schemas from chalicelib.core import issues from chalicelib.core.errors import errors from chalicelib.core.metrics import heatmaps, product_analytics, funnels, custom_metrics_predefined -from chalicelib.core.sessions import sessions +from chalicelib.core.sessions import sessions, sessions_search from chalicelib.utils import helper, pg_client from chalicelib.utils.TimeUTC import TimeUTC @@ -52,7 +52,7 @@ def __get_sessions_list(project: schemas.ProjectContext, user_id, data: schemas. "total": 0, "sessions": [] } - return sessions.search_sessions(data=data.series[0].filter, project_id=project.project_id, user_id=user_id) + return sessions_search.search_sessions(data=data.series[0].filter, project_id=project.project_id, user_id=user_id) def __get_heat_map_chart(project: schemas.ProjectContext, user_id, data: schemas.CardHeatMap, @@ -174,7 +174,7 @@ def get_sessions_by_card_id(project_id, user_id, metric_id, data: schemas.CardSe results = [] for s in data.series: results.append({"seriesId": s.series_id, "seriesName": s.name, - **sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) + **sessions_search.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) return results @@ -189,7 +189,7 @@ def get_sessions(project_id, user_id, data: schemas.CardSessionsSchema): s.filter = schemas.SessionsSearchPayloadSchema(**s.filter.model_dump(by_alias=True)) results.append({"seriesId": None, "seriesName": s.name, - **sessions.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) + **sessions_search.search_sessions(data=s.filter, project_id=project_id, user_id=user_id)}) return results From f05e84777b531f326c7e4b7addd36a0801407709 Mon Sep 17 00:00:00 2001 From: Alexander Date: Mon, 23 Dec 2024 14:24:06 +0100 Subject: [PATCH 5/9] feat(analytics): small refactoring of the service's architecture --- backend/cmd/analytics/main.go | 2 +- backend/pkg/analytics/api/card.go | 92 ------------------- backend/pkg/analytics/api/handlers.go | 64 ------------- backend/pkg/analytics/builder.go | 38 ++++---- .../card-service.go => cards/cards.go} | 88 +++++++++++------- .../card-handlers.go => cards/handlers.go} | 82 +++++++++++++---- .../{api/models/card.go => cards/model.go} | 2 +- .../dashboards.go} | 67 +++++++++----- .../handlers.go} | 83 +++++++++++++---- .../{api/models => dashboards}/model.go | 24 ++--- backend/pkg/analytics/service/analytics.go | 50 ---------- 11 files changed, 270 insertions(+), 322 deletions(-) delete mode 100644 backend/pkg/analytics/api/card.go delete mode 100644 backend/pkg/analytics/api/handlers.go rename backend/pkg/analytics/{service/card-service.go => cards/cards.go} (82%) rename backend/pkg/analytics/{api/card-handlers.go => cards/handlers.go} (80%) rename backend/pkg/analytics/{api/models/card.go => cards/model.go} (99%) rename backend/pkg/analytics/{service/dashboard-service.go => dashboards/dashboards.go} (78%) rename backend/pkg/analytics/{api/dashboard-handlers.go => dashboards/handlers.go} (81%) rename backend/pkg/analytics/{api/models => dashboards}/model.go (71%) delete mode 100644 backend/pkg/analytics/service/analytics.go diff --git a/backend/cmd/analytics/main.go b/backend/cmd/analytics/main.go index bd8c3a99c..55923fe70 100644 --- a/backend/cmd/analytics/main.go +++ b/backend/cmd/analytics/main.go @@ -37,7 +37,7 @@ func main() { if err != nil { log.Fatal(ctx, "failed while creating router: %s", err) } - router.AddHandlers(api.NoPrefix, builder.AnalyticsAPI) + router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI) router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware) server.Run(ctx, log, &cfg.HTTP, router) diff --git a/backend/pkg/analytics/api/card.go b/backend/pkg/analytics/api/card.go deleted file mode 100644 index bd03ad115..000000000 --- a/backend/pkg/analytics/api/card.go +++ /dev/null @@ -1,92 +0,0 @@ -package api - -import ( - "time" -) - -// CardBase Common fields for the Card entity -type CardBase struct { - Name string `json:"name" validate:"required"` - IsPublic bool `json:"isPublic" validate:"omitempty"` - DefaultConfig map[string]any `json:"defaultConfig"` - Thumbnail *string `json:"thumbnail" validate:"omitempty,url"` - MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"` - MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"` - MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"` - ViewType string `json:"viewType" validate:"required,oneof=line_chart table_view"` - MetricValue []string `json:"metricValue" validate:"omitempty"` - SessionID *int64 `json:"sessionId" validate:"omitempty"` - Series []CardSeries `json:"series" validate:"required,dive"` -} - -// Card Fields specific to database operations -type Card struct { - CardBase - ProjectID int64 `json:"projectId" validate:"required"` - UserID int64 `json:"userId" validate:"required"` - CardID int64 `json:"cardId"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` - DeletedAt *time.Time `json:"deleted_at,omitempty"` - EditedAt *time.Time `json:"edited_at,omitempty"` -} - -type CardSeries struct { - SeriesID int64 `json:"seriesId" validate:"omitempty"` - MetricID int64 `json:"metricId" validate:"omitempty"` - Name string `json:"name" validate:"required"` - CreatedAt time.Time `json:"createdAt" validate:"omitempty"` - DeletedAt *time.Time `json:"deletedAt" validate:"omitempty"` - Index int64 `json:"index" validate:"required"` - Filter SeriesFilter `json:"filter"` -} - -type SeriesFilter struct { - EventOrder string `json:"eventOrder" validate:"required,oneof=then or and"` - Filters []FilterItem `json:"filters"` -} - -type FilterItem struct { - Type string `json:"type" validate:"required"` - Operator string `json:"operator" validate:"required"` - Source string `json:"source" validate:"required"` - SourceOperator string `json:"sourceOperator" validate:"required"` - Value []string `json:"value" validate:"required,dive,required"` - IsEvent bool `json:"isEvent"` -} - -// CardCreateRequest Fields required for creating a card (from the frontend) -type CardCreateRequest struct { - CardBase -} - -type CardGetResponse struct { - Card -} - -type CardUpdateRequest struct { - CardBase -} - -type GetCardsResponse struct { - Cards []Card `json:"cards"` - Total int64 `json:"total"` -} - -type DataPoint struct { - Timestamp int64 `json:"timestamp"` - Series map[string]int64 `json:"series"` -} - -type GetCardChartDataRequest struct { - ProjectID int64 `json:"projectId" validate:"required"` - MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"` - MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"` - MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"` - SessionID int64 `json:"sessionId" validate:"required"` - Series []CardSeries `json:"series"` -} - -type GetCardChartDataResponse struct { - Data []DataPoint `json:"data"` -} diff --git a/backend/pkg/analytics/api/handlers.go b/backend/pkg/analytics/api/handlers.go deleted file mode 100644 index 758e098b2..000000000 --- a/backend/pkg/analytics/api/handlers.go +++ /dev/null @@ -1,64 +0,0 @@ -package api - -import ( - "fmt" - "net/http" - "strconv" - - "github.com/gorilla/mux" - - config "openreplay/backend/internal/config/analytics" - "openreplay/backend/pkg/analytics/service" - "openreplay/backend/pkg/logger" - "openreplay/backend/pkg/server/api" -) - -type handlersImpl struct { - log logger.Logger - responser *api.Responser - jsonSizeLimit int64 - service service.Service -} - -func (e *handlersImpl) GetAll() []*api.Description { - return []*api.Description{ - {"/v1/analytics/{projectId}/dashboards", e.createDashboard, "POST"}, - {"/v1/analytics/{projectId}/dashboards", e.getDashboards, "GET"}, - {"/v1/analytics/{projectId}/dashboards/{id}", e.getDashboard, "GET"}, - {"/v1/analytics/{projectId}/dashboards/{id}", e.updateDashboard, "PUT"}, - {"/v1/analytics/{projectId}/dashboards/{id}", e.deleteDashboard, "DELETE"}, - {"/v1/analytics/{projectId}/dashboards/{id}/cards", e.addCardToDashboard, "POST"}, - {"/v1/analytics/{projectId}/dashboards/{id}/cards/{cardId}", e.removeCardFromDashboard, "DELETE"}, - {"/v1/analytics/{projectId}/cards", e.createCard, "POST"}, - {"/v1/analytics/{projectId}/cards", e.getCardsPaginated, "GET"}, - {"/v1/analytics/{projectId}/cards/{id}", e.getCard, "GET"}, - {"/v1/analytics/{projectId}/cards/{id}", e.updateCard, "PUT"}, - {"/v1/analytics/{projectId}/cards/{id}", e.deleteCard, "DELETE"}, - {"/v1/analytics/{projectId}/cards/{id}/chart", e.getCardChartData, "POST"}, - {"/v1/analytics/{projectId}/cards/{id}/try", e.getCardChartData, "POST"}, - } -} - -func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, service service.Service) (api.Handlers, error) { - return &handlersImpl{ - log: log, - responser: responser, - jsonSizeLimit: cfg.JsonSizeLimit, - service: service, - }, nil -} - -func getIDFromRequest(r *http.Request, key string) (int, error) { - vars := mux.Vars(r) - idStr := vars[key] - if idStr == "" { - return 0, fmt.Errorf("missing %s in request", key) - } - - id, err := strconv.Atoi(idStr) - if err != nil { - return 0, fmt.Errorf("invalid %s format", key) - } - - return id, nil -} diff --git a/backend/pkg/analytics/builder.go b/backend/pkg/analytics/builder.go index 5303ef283..18250790b 100644 --- a/backend/pkg/analytics/builder.go +++ b/backend/pkg/analytics/builder.go @@ -4,8 +4,8 @@ import ( "time" "openreplay/backend/internal/config/analytics" - analyticsAPI "openreplay/backend/pkg/analytics/api" - "openreplay/backend/pkg/analytics/service" + "openreplay/backend/pkg/analytics/cards" + "openreplay/backend/pkg/analytics/dashboards" "openreplay/backend/pkg/db/postgres/pool" "openreplay/backend/pkg/logger" "openreplay/backend/pkg/metrics/web" @@ -16,34 +16,40 @@ import ( ) type ServicesBuilder struct { - Auth auth.Auth - RateLimiter *limiter.UserRateLimiter - AuditTrail tracer.Tracer - AnalyticsAPI api.Handlers + Auth auth.Auth + RateLimiter *limiter.UserRateLimiter + AuditTrail tracer.Tracer + CardsAPI api.Handlers + DashboardsAPI api.Handlers } func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.Web, pgconn pool.Pool) (*ServicesBuilder, error) { responser := api.NewResponser(webMetrics) - audiTrail, err := tracer.NewTracer(log, pgconn) if err != nil { return nil, err } - - analyticsService, err := service.NewService(log, pgconn) + cardsService, err := cards.New(log, pgconn) if err != nil { return nil, err } - - handlers, err := analyticsAPI.NewHandlers(log, cfg, responser, analyticsService) + cardsHandlers, err := cards.NewHandlers(log, cfg, responser, cardsService) + if err != nil { + return nil, err + } + dashboardsService, err := dashboards.New(log, pgconn) + if err != nil { + return nil, err + } + dashboardsHandlers, err := dashboards.NewHandlers(log, cfg, responser, dashboardsService) if err != nil { return nil, err } - return &ServicesBuilder{ - Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, nil), - RateLimiter: limiter.NewUserRateLimiter(10, 30, 1*time.Minute, 5*time.Minute), - AuditTrail: audiTrail, - AnalyticsAPI: handlers, + Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, nil), + RateLimiter: limiter.NewUserRateLimiter(10, 30, 1*time.Minute, 5*time.Minute), + AuditTrail: audiTrail, + CardsAPI: cardsHandlers, + DashboardsAPI: dashboardsHandlers, }, nil } diff --git a/backend/pkg/analytics/service/card-service.go b/backend/pkg/analytics/cards/cards.go similarity index 82% rename from backend/pkg/analytics/service/card-service.go rename to backend/pkg/analytics/cards/cards.go index a478187b6..3e0033ce8 100644 --- a/backend/pkg/analytics/service/card-service.go +++ b/backend/pkg/analytics/cards/cards.go @@ -1,16 +1,42 @@ -package service +package cards import ( "context" "encoding/json" "fmt" + "strings" + "github.com/jackc/pgx/v4" "github.com/lib/pq" - "openreplay/backend/pkg/analytics/api/models" - "strings" + + "openreplay/backend/pkg/db/postgres/pool" + "openreplay/backend/pkg/logger" ) -func (s *serviceImpl) CreateCard(projectId int, userID uint64, req *models.CardCreateRequest) (*models.CardGetResponse, error) { +type Cards interface { + GetCard(projectId int, cardId int) (*CardGetResponse, error) + GetCardWithSeries(projectId int, cardId int) (*CardGetResponse, error) + GetCards(projectId int) (*GetCardsResponse, error) + GetCardsPaginated(projectId int, filters CardListFilter, sort CardListSort, limit int, offset int) (*GetCardsResponsePaginated, error) + CreateCard(projectId int, userId uint64, req *CardCreateRequest) (*CardGetResponse, error) + UpdateCard(projectId int, cardId int64, userId uint64, req *CardUpdateRequest) (*CardGetResponse, error) + DeleteCard(projectId int, cardId int64, userId uint64) error + GetCardChartData(projectId int, userId uint64, req *GetCardChartDataRequest) ([]DataPoint, error) +} + +type cardsImpl struct { + log logger.Logger + pgconn pool.Pool +} + +func New(log logger.Logger, conn pool.Pool) (Cards, error) { + return &cardsImpl{ + log: log, + pgconn: conn, + }, nil +} + +func (s *cardsImpl) CreateCard(projectId int, userID uint64, req *CardCreateRequest) (*CardGetResponse, error) { if req.MetricValue == nil { req.MetricValue = []string{} } @@ -41,7 +67,7 @@ func (s *serviceImpl) CreateCard(projectId int, userID uint64, req *models.CardC VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at` - card := &models.CardGetResponse{} + card := &CardGetResponse{} err = tx.QueryRow( ctx, sql, projectId, userID, req.Name, req.MetricType, req.ViewType, req.MetricOf, req.MetricValue, req.MetricFormat, req.IsPublic, @@ -73,7 +99,7 @@ func (s *serviceImpl) CreateCard(projectId int, userID uint64, req *models.CardC return card, nil } -func (s *serviceImpl) CreateSeries(ctx context.Context, tx pgx.Tx, metricId int64, series []models.CardSeriesBase) []models.CardSeries { +func (s *cardsImpl) CreateSeries(ctx context.Context, tx pgx.Tx, metricId int64, series []CardSeriesBase) []CardSeries { if len(series) == 0 { return nil // No series to create } @@ -114,9 +140,9 @@ func (s *serviceImpl) CreateSeries(ctx context.Context, tx pgx.Tx, metricId int6 } // Collect inserted series - var seriesList []models.CardSeries + var seriesList []CardSeries for rows.Next() { - cardSeries := models.CardSeries{} + cardSeries := CardSeries{} if err := rows.Scan(&cardSeries.SeriesID, &cardSeries.MetricID, &cardSeries.Name, &cardSeries.Index, &cardSeries.Filter); err != nil { s.log.Error(ctx, "failed to scan series: %v", err) continue @@ -127,13 +153,13 @@ func (s *serviceImpl) CreateSeries(ctx context.Context, tx pgx.Tx, metricId int6 return seriesList } -func (s *serviceImpl) GetCard(projectId int, cardID int) (*models.CardGetResponse, error) { +func (s *cardsImpl) GetCard(projectId int, cardID int) (*CardGetResponse, error) { sql := `SELECT metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at FROM public.metrics WHERE metric_id = $1 AND project_id = $2 AND deleted_at IS NULL` - card := &models.CardGetResponse{} + card := &CardGetResponse{} err := s.pgconn.QueryRow(sql, cardID, projectId).Scan( &card.CardID, &card.ProjectID, &card.UserID, &card.Name, &card.MetricType, &card.ViewType, &card.MetricOf, &card.MetricValue, &card.MetricFormat, &card.IsPublic, &card.CreatedAt, &card.EditedAt, ) @@ -145,7 +171,7 @@ func (s *serviceImpl) GetCard(projectId int, cardID int) (*models.CardGetRespons return card, nil } -func (s *serviceImpl) GetCardWithSeries(projectId int, cardID int) (*models.CardGetResponse, error) { +func (s *cardsImpl) GetCardWithSeries(projectId int, cardID int) (*CardGetResponse, error) { sql := ` SELECT m.metric_id, m.project_id, m.user_id, m.name, m.metric_type, m.view_type, m.metric_of, m.metric_value, m.metric_format, m.is_public, m.created_at, m.edited_at, @@ -166,7 +192,7 @@ func (s *serviceImpl) GetCardWithSeries(projectId int, cardID int) (*models.Card m.metric_of, m.metric_value, m.metric_format, m.is_public, m.created_at, m.edited_at ` - card := &models.CardGetResponse{} + card := &CardGetResponse{} var seriesJSON []byte err := s.pgconn.QueryRow(sql, cardID, projectId).Scan( &card.CardID, &card.ProjectID, &card.UserID, &card.Name, &card.MetricType, &card.ViewType, &card.MetricOf, @@ -184,7 +210,7 @@ func (s *serviceImpl) GetCardWithSeries(projectId int, cardID int) (*models.Card return card, nil } -func (s *serviceImpl) GetCards(projectId int) (*models.GetCardsResponse, error) { +func (s *cardsImpl) GetCards(projectId int) (*GetCardsResponse, error) { sql := ` SELECT metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at FROM public.metrics @@ -196,9 +222,9 @@ func (s *serviceImpl) GetCards(projectId int) (*models.GetCardsResponse, error) } defer rows.Close() - cards := make([]models.Card, 0) + cards := make([]Card, 0) for rows.Next() { - card := models.Card{} + card := Card{} if err := rows.Scan( &card.CardID, &card.ProjectID, &card.UserID, &card.Name, &card.MetricType, &card.ViewType, &card.MetricOf, &card.MetricValue, &card.MetricFormat, &card.IsPublic, &card.CreatedAt, &card.EditedAt, @@ -208,21 +234,21 @@ func (s *serviceImpl) GetCards(projectId int) (*models.GetCardsResponse, error) cards = append(cards, card) } - return &models.GetCardsResponse{Cards: cards}, nil + return &GetCardsResponse{Cards: cards}, nil } -func (s *serviceImpl) GetCardsPaginated( +func (s *cardsImpl) GetCardsPaginated( projectId int, - filters models.CardListFilter, - sort models.CardListSort, + filters CardListFilter, + sort CardListSort, limit, offset int, -) (*models.GetCardsResponsePaginated, error) { +) (*GetCardsResponsePaginated, error) { // Validate inputs - if err := models.ValidateStruct(filters); err != nil { + if err := ValidateStruct(filters); err != nil { return nil, fmt.Errorf("invalid filters: %w", err) } - if err := models.ValidateStruct(sort); err != nil { + if err := ValidateStruct(sort); err != nil { return nil, fmt.Errorf("invalid sort: %w", err) } @@ -289,9 +315,9 @@ func (s *serviceImpl) GetCardsPaginated( } defer rows.Close() - var cards []models.Card + var cards []Card for rows.Next() { - var card models.Card + var card Card if err := rows.Scan( &card.CardID, &card.ProjectID, &card.UserID, &card.Name, &card.MetricType, &card.ViewType, &card.MetricOf, &card.MetricValue, &card.MetricFormat, &card.IsPublic, &card.CreatedAt, &card.EditedAt, @@ -315,13 +341,13 @@ func (s *serviceImpl) GetCardsPaginated( return nil, fmt.Errorf("failed to get total count: %w", err) } - return &models.GetCardsResponsePaginated{ + return &GetCardsResponsePaginated{ Cards: cards, Total: total, }, nil } -func (s *serviceImpl) UpdateCard(projectId int, cardID int64, userID uint64, req *models.CardUpdateRequest) (*models.CardGetResponse, error) { +func (s *cardsImpl) UpdateCard(projectId int, cardID int64, userID uint64, req *CardUpdateRequest) (*CardGetResponse, error) { if req.MetricValue == nil { req.MetricValue = []string{} } @@ -353,7 +379,7 @@ func (s *serviceImpl) UpdateCard(projectId int, cardID int64, userID uint64, req WHERE metric_id = $8 AND project_id = $9 AND deleted_at IS NULL RETURNING metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at` - card := &models.CardGetResponse{} + card := &CardGetResponse{} err = tx.QueryRow(ctx, sql, req.Name, req.MetricType, req.ViewType, req.MetricOf, req.MetricValue, req.MetricFormat, req.IsPublic, cardID, projectId, ).Scan( @@ -380,7 +406,7 @@ func (s *serviceImpl) UpdateCard(projectId int, cardID int64, userID uint64, req return card, nil } -func (s *serviceImpl) DeleteCardSeries(cardId int64) error { +func (s *cardsImpl) DeleteCardSeries(cardId int64) error { sql := `DELETE FROM public.metric_series WHERE metric_id = $1` err := s.pgconn.Exec(sql, cardId) if err != nil { @@ -389,7 +415,7 @@ func (s *serviceImpl) DeleteCardSeries(cardId int64) error { return nil } -func (s *serviceImpl) DeleteCard(projectId int, cardID int64, userID uint64) error { +func (s *cardsImpl) DeleteCard(projectId int, cardID int64, userID uint64) error { sql := ` UPDATE public.metrics SET deleted_at = now() @@ -402,7 +428,7 @@ func (s *serviceImpl) DeleteCard(projectId int, cardID int64, userID uint64) err return nil } -func (s *serviceImpl) GetCardChartData(projectId int, userID uint64, req *models.GetCardChartDataRequest) ([]models.DataPoint, error) { +func (s *cardsImpl) GetCardChartData(projectId int, userID uint64, req *GetCardChartDataRequest) ([]DataPoint, error) { jsonInput := ` { "data": [ @@ -419,7 +445,7 @@ func (s *serviceImpl) GetCardChartData(projectId int, userID uint64, req *models ] }` - var resp models.GetCardChartDataResponse + var resp GetCardChartDataResponse if err := json.Unmarshal([]byte(jsonInput), &resp); err != nil { return nil, fmt.Errorf("failed to unmarshal response: %w", err) } diff --git a/backend/pkg/analytics/api/card-handlers.go b/backend/pkg/analytics/cards/handlers.go similarity index 80% rename from backend/pkg/analytics/api/card-handlers.go rename to backend/pkg/analytics/cards/handlers.go index 7fe880e20..cb9c389b8 100644 --- a/backend/pkg/analytics/api/card-handlers.go +++ b/backend/pkg/analytics/cards/handlers.go @@ -1,18 +1,64 @@ -package api +package cards import ( "encoding/json" "fmt" "net/http" - "openreplay/backend/pkg/analytics/api/models" - "openreplay/backend/pkg/server/api" - "openreplay/backend/pkg/server/user" "strconv" "time" "github.com/go-playground/validator/v10" + "github.com/gorilla/mux" + + config "openreplay/backend/internal/config/analytics" + "openreplay/backend/pkg/logger" + "openreplay/backend/pkg/server/api" + "openreplay/backend/pkg/server/user" ) +func getIDFromRequest(r *http.Request, key string) (int, error) { + vars := mux.Vars(r) + idStr := vars[key] + if idStr == "" { + return 0, fmt.Errorf("missing %s in request", key) + } + + id, err := strconv.Atoi(idStr) + if err != nil { + return 0, fmt.Errorf("invalid %s format", key) + } + + return id, nil +} + +type handlersImpl struct { + log logger.Logger + responser *api.Responser + jsonSizeLimit int64 + cards Cards +} + +func (e *handlersImpl) GetAll() []*api.Description { + return []*api.Description{ + {"/v1/analytics/{projectId}/cards", e.createCard, "POST"}, + {"/v1/analytics/{projectId}/cards", e.getCardsPaginated, "GET"}, + {"/v1/analytics/{projectId}/cards/{id}", e.getCard, "GET"}, + {"/v1/analytics/{projectId}/cards/{id}", e.updateCard, "PUT"}, + {"/v1/analytics/{projectId}/cards/{id}", e.deleteCard, "DELETE"}, + {"/v1/analytics/{projectId}/cards/{id}/chart", e.getCardChartData, "POST"}, + {"/v1/analytics/{projectId}/cards/{id}/try", e.getCardChartData, "POST"}, + } +} + +func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, cards Cards) (api.Handlers, error) { + return &handlersImpl{ + log: log, + responser: responser, + jsonSizeLimit: cfg.JsonSizeLimit, + cards: cards, + }, nil +} + func (e *handlersImpl) createCard(w http.ResponseWriter, r *http.Request) { startTime := time.Now() bodySize := 0 @@ -24,7 +70,7 @@ func (e *handlersImpl) createCard(w http.ResponseWriter, r *http.Request) { } bodySize = len(bodyBytes) - req := &models.CardCreateRequest{} + req := &CardCreateRequest{} if err := json.Unmarshal(bodyBytes, req); err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return @@ -44,7 +90,7 @@ func (e *handlersImpl) createCard(w http.ResponseWriter, r *http.Request) { } currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.service.CreateCard(projectID, currentUser.ID, req) + resp, err := e.cards.CreateCard(projectID, currentUser.ID, req) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -70,7 +116,7 @@ func (e *handlersImpl) getCard(w http.ResponseWriter, r *http.Request) { return } - resp, err := e.service.GetCardWithSeries(projectID, id) + resp, err := e.cards.GetCardWithSeries(projectID, id) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -90,7 +136,7 @@ func (e *handlersImpl) getCards(w http.ResponseWriter, r *http.Request) { } //currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.service.GetCards(projectID) + resp, err := e.cards.GetCards(projectID) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -114,7 +160,7 @@ func (e *handlersImpl) getCardsPaginated(w http.ResponseWriter, r *http.Request) query := r.URL.Query() // Filters - filters := models.CardListFilter{ + filters := CardListFilter{ Filters: make(map[string]interface{}), } @@ -136,7 +182,7 @@ func (e *handlersImpl) getCardsPaginated(w http.ResponseWriter, r *http.Request) } // Sorting - sort := models.CardListSort{ + sort := CardListSort{ Field: query.Get("sort_field"), Order: query.Get("sort_order"), } @@ -163,17 +209,17 @@ func (e *handlersImpl) getCardsPaginated(w http.ResponseWriter, r *http.Request) offset := (page - 1) * limit // Validate inputs - if err := models.ValidateStruct(filters); err != nil { + if err := ValidateStruct(filters); err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, fmt.Errorf("invalid filters: %w", err), startTime, r.URL.Path, bodySize) return } - if err := models.ValidateStruct(sort); err != nil { + if err := ValidateStruct(sort); err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, fmt.Errorf("invalid sort: %w", err), startTime, r.URL.Path, bodySize) return } // Call the service - resp, err := e.service.GetCardsPaginated(projectID, filters, sort, limit, offset) + resp, err := e.cards.GetCardsPaginated(projectID, filters, sort, limit, offset) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -206,7 +252,7 @@ func (e *handlersImpl) updateCard(w http.ResponseWriter, r *http.Request) { } bodySize = len(bodyBytes) - req := &models.CardUpdateRequest{} + req := &CardUpdateRequest{} if err := json.Unmarshal(bodyBytes, req); err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return @@ -220,7 +266,7 @@ func (e *handlersImpl) updateCard(w http.ResponseWriter, r *http.Request) { } currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.service.UpdateCard(projectID, int64(cardId), currentUser.ID, req) + resp, err := e.cards.UpdateCard(projectID, int64(cardId), currentUser.ID, req) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -246,7 +292,7 @@ func (e *handlersImpl) deleteCard(w http.ResponseWriter, r *http.Request) { } currentUser := r.Context().Value("userData").(*user.User) - err = e.service.DeleteCard(projectID, int64(cardId), currentUser.ID) + err = e.cards.DeleteCard(projectID, int64(cardId), currentUser.ID) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -272,7 +318,7 @@ func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request) } bodySize = len(bodyBytes) - req := &models.GetCardChartDataRequest{} + req := &GetCardChartDataRequest{} if err := json.Unmarshal(bodyBytes, req); err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return @@ -286,7 +332,7 @@ func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request) } currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.service.GetCardChartData(projectID, currentUser.ID, req) + resp, err := e.cards.GetCardChartData(projectID, currentUser.ID, req) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return diff --git a/backend/pkg/analytics/api/models/card.go b/backend/pkg/analytics/cards/model.go similarity index 99% rename from backend/pkg/analytics/api/models/card.go rename to backend/pkg/analytics/cards/model.go index 3d2be511a..21692a355 100644 --- a/backend/pkg/analytics/api/models/card.go +++ b/backend/pkg/analytics/cards/model.go @@ -1,4 +1,4 @@ -package models +package cards import ( "github.com/go-playground/validator/v10" diff --git a/backend/pkg/analytics/service/dashboard-service.go b/backend/pkg/analytics/dashboards/dashboards.go similarity index 78% rename from backend/pkg/analytics/service/dashboard-service.go rename to backend/pkg/analytics/dashboards/dashboards.go index bbbf81b3f..95ba863ed 100644 --- a/backend/pkg/analytics/service/dashboard-service.go +++ b/backend/pkg/analytics/dashboards/dashboards.go @@ -1,21 +1,46 @@ -package service +package dashboards import ( "context" "encoding/json" "errors" "fmt" - "openreplay/backend/pkg/analytics/api/models" + + "openreplay/backend/pkg/db/postgres/pool" + "openreplay/backend/pkg/logger" ) +type Dashboards interface { + GetDashboard(projectId int, dashboardId int, userId uint64) (*GetDashboardResponse, error) + GetDashboardsPaginated(projectId int, userId uint64, req *GetDashboardsRequest) (*GetDashboardsResponsePaginated, error) + GetDashboards(projectId int, userId uint64) (*GetDashboardsResponse, error) + CreateDashboard(projectId int, userId uint64, req *CreateDashboardRequest) (*GetDashboardResponse, error) + UpdateDashboard(projectId int, dashboardId int, userId uint64, req *UpdateDashboardRequest) (*GetDashboardResponse, error) + DeleteDashboard(projectId int, dashboardId int, userId uint64) error + AddCardsToDashboard(projectId int, dashboardId int, userId uint64, req *AddCardToDashboardRequest) error + DeleteCardFromDashboard(dashboardId int, cardId int) error +} + +type dashboardsImpl struct { + log logger.Logger + pgconn pool.Pool +} + +func New(log logger.Logger, conn pool.Pool) (Dashboards, error) { + return &dashboardsImpl{ + log: log, + pgconn: conn, + }, nil +} + // CreateDashboard Create a new dashboard -func (s *serviceImpl) CreateDashboard(projectId int, userID uint64, req *models.CreateDashboardRequest) (*models.GetDashboardResponse, error) { +func (s *dashboardsImpl) CreateDashboard(projectId int, userID uint64, req *CreateDashboardRequest) (*GetDashboardResponse, error) { sql := ` INSERT INTO dashboards (project_id, user_id, name, description, is_public, is_pinned) VALUES ($1, $2, $3, $4, $5, $6) RETURNING dashboard_id, project_id, user_id, name, description, is_public, is_pinned` - dashboard := &models.GetDashboardResponse{} + dashboard := &GetDashboardResponse{} err := s.pgconn.QueryRow(sql, projectId, userID, req.Name, req.Description, req.IsPublic, req.IsPinned).Scan( &dashboard.DashboardID, &dashboard.ProjectID, @@ -32,7 +57,7 @@ func (s *serviceImpl) CreateDashboard(projectId int, userID uint64, req *models. } // GetDashboard Fetch a specific dashboard by ID -func (s *serviceImpl) GetDashboard(projectId int, dashboardID int, userID uint64) (*models.GetDashboardResponse, error) { +func (s *dashboardsImpl) GetDashboard(projectId int, dashboardID int, userID uint64) (*GetDashboardResponse, error) { sql := ` WITH series_agg AS ( SELECT @@ -75,7 +100,7 @@ func (s *serviceImpl) GetDashboard(projectId int, dashboardID int, userID uint64 WHERE d.dashboard_id = $1 AND d.project_id = $2 AND d.deleted_at IS NULL GROUP BY d.dashboard_id, d.project_id, d.name, d.description, d.is_public, d.is_pinned, d.user_id` - dashboard := &models.GetDashboardResponse{} + dashboard := &GetDashboardResponse{} var ownerID int var metricsJSON []byte @@ -108,7 +133,7 @@ func (s *serviceImpl) GetDashboard(projectId int, dashboardID int, userID uint64 return dashboard, nil } -func (s *serviceImpl) GetDashboards(projectId int, userID uint64) (*models.GetDashboardsResponse, error) { +func (s *dashboardsImpl) GetDashboards(projectId int, userID uint64) (*GetDashboardsResponse, error) { sql := ` SELECT d.dashboard_id, d.user_id, d.project_id, d.name, d.description, d.is_public, d.is_pinned, u.email AS owner_email, u.name AS owner_name FROM dashboards d @@ -121,9 +146,9 @@ func (s *serviceImpl) GetDashboards(projectId int, userID uint64) (*models.GetDa } defer rows.Close() - var dashboards []models.Dashboard + var dashboards []Dashboard for rows.Next() { - var dashboard models.Dashboard + var dashboard Dashboard err := rows.Scan(&dashboard.DashboardID, &dashboard.UserID, &dashboard.ProjectID, &dashboard.Name, &dashboard.Description, &dashboard.IsPublic, &dashboard.IsPinned, &dashboard.OwnerEmail, &dashboard.OwnerName) if err != nil { @@ -137,13 +162,13 @@ func (s *serviceImpl) GetDashboards(projectId int, userID uint64) (*models.GetDa return nil, err } - return &models.GetDashboardsResponse{ + return &GetDashboardsResponse{ Dashboards: dashboards, }, nil } // GetDashboardsPaginated Fetch dashboards with pagination -func (s *serviceImpl) GetDashboardsPaginated(projectId int, userID uint64, req *models.GetDashboardsRequest) (*models.GetDashboardsResponsePaginated, error) { +func (s *dashboardsImpl) GetDashboardsPaginated(projectId int, userID uint64, req *GetDashboardsRequest) (*GetDashboardsResponsePaginated, error) { baseSQL, args := buildBaseQuery(projectId, userID, req) // Count total dashboards @@ -165,9 +190,9 @@ func (s *serviceImpl) GetDashboardsPaginated(projectId int, userID uint64, req * } defer rows.Close() - var dashboards []models.Dashboard + var dashboards []Dashboard for rows.Next() { - var dashboard models.Dashboard + var dashboard Dashboard err := rows.Scan( &dashboard.DashboardID, &dashboard.UserID, @@ -185,21 +210,21 @@ func (s *serviceImpl) GetDashboardsPaginated(projectId int, userID uint64, req * dashboards = append(dashboards, dashboard) } - return &models.GetDashboardsResponsePaginated{ + return &GetDashboardsResponsePaginated{ Dashboards: dashboards, Total: total, }, nil } // UpdateDashboard Update a dashboard -func (s *serviceImpl) UpdateDashboard(projectId int, dashboardID int, userID uint64, req *models.UpdateDashboardRequest) (*models.GetDashboardResponse, error) { +func (s *dashboardsImpl) UpdateDashboard(projectId int, dashboardID int, userID uint64, req *UpdateDashboardRequest) (*GetDashboardResponse, error) { sql := ` UPDATE dashboards SET name = $1, description = $2, is_public = $3, is_pinned = $4 WHERE dashboard_id = $5 AND project_id = $6 AND user_id = $7 AND deleted_at IS NULL RETURNING dashboard_id, project_id, user_id, name, description, is_public, is_pinned` - dashboard := &models.GetDashboardResponse{} + dashboard := &GetDashboardResponse{} err := s.pgconn.QueryRow(sql, req.Name, req.Description, req.IsPublic, req.IsPinned, dashboardID, projectId, userID).Scan( &dashboard.DashboardID, &dashboard.ProjectID, @@ -216,7 +241,7 @@ func (s *serviceImpl) UpdateDashboard(projectId int, dashboardID int, userID uin } // DeleteDashboard Soft-delete a dashboard -func (s *serviceImpl) DeleteDashboard(projectId int, dashboardID int, userID uint64) error { +func (s *dashboardsImpl) DeleteDashboard(projectId int, dashboardID int, userID uint64) error { sql := ` UPDATE dashboards SET deleted_at = now() @@ -231,7 +256,7 @@ func (s *serviceImpl) DeleteDashboard(projectId int, dashboardID int, userID uin } // Helper to build the base query for dashboards -func buildBaseQuery(projectId int, userID uint64, req *models.GetDashboardsRequest) (string, []interface{}) { +func buildBaseQuery(projectId int, userID uint64, req *GetDashboardsRequest) (string, []interface{}) { var conditions []string args := []interface{}{projectId} @@ -282,7 +307,7 @@ func getOrder(order string) string { return "ASC" } -func (s *serviceImpl) CardsExist(projectId int, cardIDs []int) (bool, error) { +func (s *dashboardsImpl) CardsExist(projectId int, cardIDs []int) (bool, error) { sql := ` SELECT COUNT(*) FROM public.metrics WHERE project_id = $1 AND metric_id = ANY($2) @@ -295,7 +320,7 @@ func (s *serviceImpl) CardsExist(projectId int, cardIDs []int) (bool, error) { return count == len(cardIDs), nil } -func (s *serviceImpl) AddCardsToDashboard(projectId int, dashboardId int, userId uint64, req *models.AddCardToDashboardRequest) error { +func (s *dashboardsImpl) AddCardsToDashboard(projectId int, dashboardId int, userId uint64, req *AddCardToDashboardRequest) error { _, err := s.GetDashboard(projectId, dashboardId, userId) if err != nil { return fmt.Errorf("failed to get dashboard: %w", err) @@ -370,7 +395,7 @@ func (s *serviceImpl) AddCardsToDashboard(projectId int, dashboardId int, userId return nil } -func (s *serviceImpl) DeleteCardFromDashboard(dashboardId int, cardId int) error { +func (s *dashboardsImpl) DeleteCardFromDashboard(dashboardId int, cardId int) error { sql := `DELETE FROM public.dashboard_widgets WHERE dashboard_id = $1 AND metric_id = $2` err := s.pgconn.Exec(sql, dashboardId, cardId) if err != nil { diff --git a/backend/pkg/analytics/api/dashboard-handlers.go b/backend/pkg/analytics/dashboards/handlers.go similarity index 81% rename from backend/pkg/analytics/api/dashboard-handlers.go rename to backend/pkg/analytics/dashboards/handlers.go index 48d3e3c5b..f4c3ef573 100644 --- a/backend/pkg/analytics/api/dashboard-handlers.go +++ b/backend/pkg/analytics/dashboards/handlers.go @@ -1,15 +1,64 @@ -package api +package dashboards import ( "encoding/json" - "github.com/go-playground/validator/v10" + "fmt" "net/http" - "openreplay/backend/pkg/analytics/api/models" + "strconv" + "time" + + "github.com/go-playground/validator/v10" + "github.com/gorilla/mux" + + config "openreplay/backend/internal/config/analytics" + "openreplay/backend/pkg/logger" "openreplay/backend/pkg/server/api" "openreplay/backend/pkg/server/user" - "time" ) +func getIDFromRequest(r *http.Request, key string) (int, error) { + vars := mux.Vars(r) + idStr := vars[key] + if idStr == "" { + return 0, fmt.Errorf("missing %s in request", key) + } + + id, err := strconv.Atoi(idStr) + if err != nil { + return 0, fmt.Errorf("invalid %s format", key) + } + + return id, nil +} + +type handlersImpl struct { + log logger.Logger + responser *api.Responser + jsonSizeLimit int64 + dashboards Dashboards +} + +func (e *handlersImpl) GetAll() []*api.Description { + return []*api.Description{ + {"/v1/analytics/{projectId}/dashboards", e.createDashboard, "POST"}, + {"/v1/analytics/{projectId}/dashboards", e.getDashboards, "GET"}, + {"/v1/analytics/{projectId}/dashboards/{id}", e.getDashboard, "GET"}, + {"/v1/analytics/{projectId}/dashboards/{id}", e.updateDashboard, "PUT"}, + {"/v1/analytics/{projectId}/dashboards/{id}", e.deleteDashboard, "DELETE"}, + {"/v1/analytics/{projectId}/dashboards/{id}/cards", e.addCardToDashboard, "POST"}, + {"/v1/analytics/{projectId}/dashboards/{id}/cards/{cardId}", e.removeCardFromDashboard, "DELETE"}, + } +} + +func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, dashboards Dashboards) (api.Handlers, error) { + return &handlersImpl{ + log: log, + responser: responser, + jsonSizeLimit: cfg.JsonSizeLimit, + dashboards: dashboards, + }, nil +} + func (e *handlersImpl) createDashboard(w http.ResponseWriter, r *http.Request) { startTime := time.Now() bodySize := 0 @@ -21,7 +70,7 @@ func (e *handlersImpl) createDashboard(w http.ResponseWriter, r *http.Request) { } bodySize = len(bodyBytes) - req := &models.CreateDashboardRequest{} + req := &CreateDashboardRequest{} if err := json.Unmarshal(bodyBytes, req); err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return @@ -41,7 +90,7 @@ func (e *handlersImpl) createDashboard(w http.ResponseWriter, r *http.Request) { } currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.service.CreateDashboard(projectID, currentUser.ID, req) + resp, err := e.dashboards.CreateDashboard(projectID, currentUser.ID, req) e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) } @@ -58,7 +107,7 @@ func (e *handlersImpl) getDashboards(w http.ResponseWriter, r *http.Request) { } u := r.Context().Value("userData").(*user.User) - resp, err := e.service.GetDashboards(projectID, u.ID) + resp, err := e.dashboards.GetDashboards(projectID, u.ID) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -84,7 +133,7 @@ func (e *handlersImpl) getDashboard(w http.ResponseWriter, r *http.Request) { } u := r.Context().Value("userData").(*user.User) - res, err := e.service.GetDashboard(projectID, dashboardID, u.ID) + res, err := e.dashboards.GetDashboard(projectID, dashboardID, u.ID) if err != nil { // Map errors to appropriate HTTP status codes if err.Error() == "not_found: dashboard not found" { @@ -124,7 +173,7 @@ func (e *handlersImpl) updateDashboard(w http.ResponseWriter, r *http.Request) { bodySize = len(bodyBytes) u := r.Context().Value("userData").(*user.User) - _, err = e.service.GetDashboard(projectID, dashboardID, u.ID) + _, err = e.dashboards.GetDashboard(projectID, dashboardID, u.ID) if err != nil { // Map errors to appropriate HTTP status codes if err.Error() == "not_found: dashboard not found" { @@ -137,14 +186,14 @@ func (e *handlersImpl) updateDashboard(w http.ResponseWriter, r *http.Request) { return } - req := &models.UpdateDashboardRequest{} + req := &UpdateDashboardRequest{} if err := json.Unmarshal(bodyBytes, req); err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return } currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.service.UpdateDashboard(projectID, dashboardID, currentUser.ID, req) + resp, err := e.dashboards.UpdateDashboard(projectID, dashboardID, currentUser.ID, req) e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) } @@ -166,7 +215,7 @@ func (e *handlersImpl) deleteDashboard(w http.ResponseWriter, r *http.Request) { } u := r.Context().Value("userData").(*user.User) - _, err = e.service.GetDashboard(projectID, dashboardID, u.ID) + _, err = e.dashboards.GetDashboard(projectID, dashboardID, u.ID) if err != nil { // Map errors to appropriate HTTP status codes if err.Error() == "not_found: dashboard not found" { @@ -179,7 +228,7 @@ func (e *handlersImpl) deleteDashboard(w http.ResponseWriter, r *http.Request) { return } - err = e.service.DeleteDashboard(projectID, dashboardID, u.ID) + err = e.dashboards.DeleteDashboard(projectID, dashboardID, u.ID) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -224,7 +273,7 @@ func (e *handlersImpl) addCardToDashboard(w http.ResponseWriter, r *http.Request bodySize = len(bodyBytes) - req := &models.AddCardToDashboardRequest{} + req := &AddCardToDashboardRequest{} if err := json.Unmarshal(bodyBytes, req); err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) return @@ -237,7 +286,7 @@ func (e *handlersImpl) addCardToDashboard(w http.ResponseWriter, r *http.Request return } - err = e.service.AddCardsToDashboard(projectID, dashboardID, u.ID, req) + err = e.dashboards.AddCardsToDashboard(projectID, dashboardID, u.ID, req) if err != nil { if err.Error() == "not_found: dashboard not found" { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusNotFound, err, startTime, r.URL.Path, bodySize) @@ -276,7 +325,7 @@ func (e *handlersImpl) removeCardFromDashboard(w http.ResponseWriter, r *http.Re } u := r.Context().Value("userData").(*user.User) - _, err = e.service.GetDashboard(projectID, dashboardID, u.ID) + _, err = e.dashboards.GetDashboard(projectID, dashboardID, u.ID) if err != nil { if err.Error() == "not_found: dashboard not found" { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusNotFound, err, startTime, r.URL.Path, bodySize) @@ -287,7 +336,7 @@ func (e *handlersImpl) removeCardFromDashboard(w http.ResponseWriter, r *http.Re } } - err = e.service.DeleteCardFromDashboard(dashboardID, cardID) + err = e.dashboards.DeleteCardFromDashboard(dashboardID, cardID) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return diff --git a/backend/pkg/analytics/api/models/model.go b/backend/pkg/analytics/dashboards/model.go similarity index 71% rename from backend/pkg/analytics/api/models/model.go rename to backend/pkg/analytics/dashboards/model.go index 377f67abd..6085b1f1a 100644 --- a/backend/pkg/analytics/api/models/model.go +++ b/backend/pkg/analytics/dashboards/model.go @@ -1,16 +1,18 @@ -package models +package dashboards + +import "openreplay/backend/pkg/analytics/cards" type Dashboard struct { - DashboardID int `json:"dashboardId"` - ProjectID int `json:"projectId"` - UserID int `json:"userId"` - Name string `json:"name"` - Description string `json:"description"` - IsPublic bool `json:"isPublic"` - IsPinned bool `json:"isPinned"` - OwnerEmail string `json:"ownerEmail"` - OwnerName string `json:"ownerName"` - Metrics []CardBase `json:"cards"` + DashboardID int `json:"dashboardId"` + ProjectID int `json:"projectId"` + UserID int `json:"userId"` + Name string `json:"name"` + Description string `json:"description"` + IsPublic bool `json:"isPublic"` + IsPinned bool `json:"isPinned"` + OwnerEmail string `json:"ownerEmail"` + OwnerName string `json:"ownerName"` + Metrics []cards.CardBase `json:"cards"` } type CreateDashboardResponse struct { diff --git a/backend/pkg/analytics/service/analytics.go b/backend/pkg/analytics/service/analytics.go deleted file mode 100644 index 6b844a4f6..000000000 --- a/backend/pkg/analytics/service/analytics.go +++ /dev/null @@ -1,50 +0,0 @@ -package service - -import ( - "context" - "errors" - - "openreplay/backend/pkg/analytics/api/models" - "openreplay/backend/pkg/db/postgres/pool" - "openreplay/backend/pkg/logger" -) - -type Service interface { - GetDashboard(projectId int, dashboardId int, userId uint64) (*models.GetDashboardResponse, error) - GetDashboardsPaginated(projectId int, userId uint64, req *models.GetDashboardsRequest) (*models.GetDashboardsResponsePaginated, error) - GetDashboards(projectId int, userId uint64) (*models.GetDashboardsResponse, error) - CreateDashboard(projectId int, userId uint64, req *models.CreateDashboardRequest) (*models.GetDashboardResponse, error) - UpdateDashboard(projectId int, dashboardId int, userId uint64, req *models.UpdateDashboardRequest) (*models.GetDashboardResponse, error) - DeleteDashboard(projectId int, dashboardId int, userId uint64) error - AddCardsToDashboard(projectId int, dashboardId int, userId uint64, req *models.AddCardToDashboardRequest) error - DeleteCardFromDashboard(dashboardId int, cardId int) error - GetCard(projectId int, cardId int) (*models.CardGetResponse, error) - GetCardWithSeries(projectId int, cardId int) (*models.CardGetResponse, error) - GetCards(projectId int) (*models.GetCardsResponse, error) - GetCardsPaginated(projectId int, filters models.CardListFilter, sort models.CardListSort, limit int, offset int) (*models.GetCardsResponsePaginated, error) - CreateCard(projectId int, userId uint64, req *models.CardCreateRequest) (*models.CardGetResponse, error) - UpdateCard(projectId int, cardId int64, userId uint64, req *models.CardUpdateRequest) (*models.CardGetResponse, error) - DeleteCard(projectId int, cardId int64, userId uint64) error - GetCardChartData(projectId int, userId uint64, req *models.GetCardChartDataRequest) ([]models.DataPoint, error) -} - -type serviceImpl struct { - log logger.Logger - pgconn pool.Pool - ctx context.Context -} - -func NewService(log logger.Logger, conn pool.Pool) (Service, error) { - switch { - case log == nil: - return nil, errors.New("logger is empty") - case conn == nil: - return nil, errors.New("connection pool is empty") - } - - return &serviceImpl{ - log: log, - pgconn: conn, - ctx: context.Background(), - }, nil -} From 880f4f1a949a9bb4e33f8cfa23a45ce61662b6eb Mon Sep 17 00:00:00 2001 From: Alexander Date: Mon, 23 Dec 2024 14:43:12 +0100 Subject: [PATCH 6/9] feat(analytics): better naming for cards and dashboards modules --- backend/pkg/analytics/cards/cards.go | 54 +++++-------------- backend/pkg/analytics/cards/handlers.go | 54 +++---------------- backend/pkg/analytics/cards/model.go | 18 ------- .../pkg/analytics/dashboards/dashboards.go | 34 ++++++------ backend/pkg/analytics/dashboards/handlers.go | 20 +++---- 5 files changed, 47 insertions(+), 133 deletions(-) diff --git a/backend/pkg/analytics/cards/cards.go b/backend/pkg/analytics/cards/cards.go index 3e0033ce8..1425a61f0 100644 --- a/backend/pkg/analytics/cards/cards.go +++ b/backend/pkg/analytics/cards/cards.go @@ -14,14 +14,13 @@ import ( ) type Cards interface { - GetCard(projectId int, cardId int) (*CardGetResponse, error) - GetCardWithSeries(projectId int, cardId int) (*CardGetResponse, error) - GetCards(projectId int) (*GetCardsResponse, error) - GetCardsPaginated(projectId int, filters CardListFilter, sort CardListSort, limit int, offset int) (*GetCardsResponsePaginated, error) - CreateCard(projectId int, userId uint64, req *CardCreateRequest) (*CardGetResponse, error) - UpdateCard(projectId int, cardId int64, userId uint64, req *CardUpdateRequest) (*CardGetResponse, error) - DeleteCard(projectId int, cardId int64, userId uint64) error - GetCardChartData(projectId int, userId uint64, req *GetCardChartDataRequest) ([]DataPoint, error) + Create(projectId int, userId uint64, req *CardCreateRequest) (*CardGetResponse, error) + Get(projectId int, cardId int) (*CardGetResponse, error) + GetWithSeries(projectId int, cardId int) (*CardGetResponse, error) + GetAll(projectId int) (*GetCardsResponse, error) + GetAllPaginated(projectId int, filters CardListFilter, sort CardListSort, limit int, offset int) (*GetCardsResponsePaginated, error) + Update(projectId int, cardId int64, userId uint64, req *CardUpdateRequest) (*CardGetResponse, error) + Delete(projectId int, cardId int64, userId uint64) error } type cardsImpl struct { @@ -36,7 +35,7 @@ func New(log logger.Logger, conn pool.Pool) (Cards, error) { }, nil } -func (s *cardsImpl) CreateCard(projectId int, userID uint64, req *CardCreateRequest) (*CardGetResponse, error) { +func (s *cardsImpl) Create(projectId int, userID uint64, req *CardCreateRequest) (*CardGetResponse, error) { if req.MetricValue == nil { req.MetricValue = []string{} } @@ -153,7 +152,7 @@ func (s *cardsImpl) CreateSeries(ctx context.Context, tx pgx.Tx, metricId int64, return seriesList } -func (s *cardsImpl) GetCard(projectId int, cardID int) (*CardGetResponse, error) { +func (s *cardsImpl) Get(projectId int, cardID int) (*CardGetResponse, error) { sql := `SELECT metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at FROM public.metrics @@ -171,7 +170,7 @@ func (s *cardsImpl) GetCard(projectId int, cardID int) (*CardGetResponse, error) return card, nil } -func (s *cardsImpl) GetCardWithSeries(projectId int, cardID int) (*CardGetResponse, error) { +func (s *cardsImpl) GetWithSeries(projectId int, cardID int) (*CardGetResponse, error) { sql := ` SELECT m.metric_id, m.project_id, m.user_id, m.name, m.metric_type, m.view_type, m.metric_of, m.metric_value, m.metric_format, m.is_public, m.created_at, m.edited_at, @@ -210,7 +209,7 @@ func (s *cardsImpl) GetCardWithSeries(projectId int, cardID int) (*CardGetRespon return card, nil } -func (s *cardsImpl) GetCards(projectId int) (*GetCardsResponse, error) { +func (s *cardsImpl) GetAll(projectId int) (*GetCardsResponse, error) { sql := ` SELECT metric_id, project_id, user_id, name, metric_type, view_type, metric_of, metric_value, metric_format, is_public, created_at, edited_at FROM public.metrics @@ -237,7 +236,7 @@ func (s *cardsImpl) GetCards(projectId int) (*GetCardsResponse, error) { return &GetCardsResponse{Cards: cards}, nil } -func (s *cardsImpl) GetCardsPaginated( +func (s *cardsImpl) GetAllPaginated( projectId int, filters CardListFilter, sort CardListSort, @@ -347,7 +346,7 @@ func (s *cardsImpl) GetCardsPaginated( }, nil } -func (s *cardsImpl) UpdateCard(projectId int, cardID int64, userID uint64, req *CardUpdateRequest) (*CardGetResponse, error) { +func (s *cardsImpl) Update(projectId int, cardID int64, userID uint64, req *CardUpdateRequest) (*CardGetResponse, error) { if req.MetricValue == nil { req.MetricValue = []string{} } @@ -415,7 +414,7 @@ func (s *cardsImpl) DeleteCardSeries(cardId int64) error { return nil } -func (s *cardsImpl) DeleteCard(projectId int, cardID int64, userID uint64) error { +func (s *cardsImpl) Delete(projectId int, cardID int64, userID uint64) error { sql := ` UPDATE public.metrics SET deleted_at = now() @@ -427,28 +426,3 @@ func (s *cardsImpl) DeleteCard(projectId int, cardID int64, userID uint64) error } return nil } - -func (s *cardsImpl) GetCardChartData(projectId int, userID uint64, req *GetCardChartDataRequest) ([]DataPoint, error) { - jsonInput := ` - { - "data": [ - { - "timestamp": 1733934939000, - "Series A": 100, - "Series B": 200 - }, - { - "timestamp": 1733935939000, - "Series A": 150, - "Series B": 250 - } - ] - }` - - var resp GetCardChartDataResponse - if err := json.Unmarshal([]byte(jsonInput), &resp); err != nil { - return nil, fmt.Errorf("failed to unmarshal response: %w", err) - } - - return resp.Data, nil -} diff --git a/backend/pkg/analytics/cards/handlers.go b/backend/pkg/analytics/cards/handlers.go index cb9c389b8..13161b8b5 100644 --- a/backend/pkg/analytics/cards/handlers.go +++ b/backend/pkg/analytics/cards/handlers.go @@ -45,8 +45,6 @@ func (e *handlersImpl) GetAll() []*api.Description { {"/v1/analytics/{projectId}/cards/{id}", e.getCard, "GET"}, {"/v1/analytics/{projectId}/cards/{id}", e.updateCard, "PUT"}, {"/v1/analytics/{projectId}/cards/{id}", e.deleteCard, "DELETE"}, - {"/v1/analytics/{projectId}/cards/{id}/chart", e.getCardChartData, "POST"}, - {"/v1/analytics/{projectId}/cards/{id}/try", e.getCardChartData, "POST"}, } } @@ -90,7 +88,7 @@ func (e *handlersImpl) createCard(w http.ResponseWriter, r *http.Request) { } currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.cards.CreateCard(projectID, currentUser.ID, req) + resp, err := e.cards.Create(projectID, currentUser.ID, req) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -116,7 +114,7 @@ func (e *handlersImpl) getCard(w http.ResponseWriter, r *http.Request) { return } - resp, err := e.cards.GetCardWithSeries(projectID, id) + resp, err := e.cards.GetWithSeries(projectID, id) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -136,7 +134,7 @@ func (e *handlersImpl) getCards(w http.ResponseWriter, r *http.Request) { } //currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.cards.GetCards(projectID) + resp, err := e.cards.GetAll(projectID) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -219,7 +217,7 @@ func (e *handlersImpl) getCardsPaginated(w http.ResponseWriter, r *http.Request) } // Call the service - resp, err := e.cards.GetCardsPaginated(projectID, filters, sort, limit, offset) + resp, err := e.cards.GetAllPaginated(projectID, filters, sort, limit, offset) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -266,7 +264,7 @@ func (e *handlersImpl) updateCard(w http.ResponseWriter, r *http.Request) { } currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.cards.UpdateCard(projectID, int64(cardId), currentUser.ID, req) + resp, err := e.cards.Update(projectID, int64(cardId), currentUser.ID, req) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -292,7 +290,7 @@ func (e *handlersImpl) deleteCard(w http.ResponseWriter, r *http.Request) { } currentUser := r.Context().Value("userData").(*user.User) - err = e.cards.DeleteCard(projectID, int64(cardId), currentUser.ID) + err = e.cards.Delete(projectID, int64(cardId), currentUser.ID) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -300,43 +298,3 @@ func (e *handlersImpl) deleteCard(w http.ResponseWriter, r *http.Request) { e.responser.ResponseWithJSON(e.log, r.Context(), w, nil, startTime, r.URL.Path, bodySize) } - -func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request) { - startTime := time.Now() - bodySize := 0 - - projectID, err := getIDFromRequest(r, "projectId") - if err != nil { - e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) - return - } - - bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit) - if err != nil { - e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) - return - } - bodySize = len(bodyBytes) - - req := &GetCardChartDataRequest{} - if err := json.Unmarshal(bodyBytes, req); err != nil { - e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) - return - } - - validate := validator.New() - err = validate.Struct(req) - if err != nil { - e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) - return - } - - currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.cards.GetCardChartData(projectID, currentUser.ID, req) - if err != nil { - e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) - return - } - - e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) -} diff --git a/backend/pkg/analytics/cards/model.go b/backend/pkg/analytics/cards/model.go index 21692a355..5ab4144f0 100644 --- a/backend/pkg/analytics/cards/model.go +++ b/backend/pkg/analytics/cards/model.go @@ -85,24 +85,6 @@ type GetCardsResponsePaginated struct { Total int `json:"total"` } -type DataPoint struct { - Timestamp int64 `json:"timestamp"` - Series map[string]int64 `json:"series"` -} - -type GetCardChartDataRequest struct { - MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"` - MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"` - ViewType string `json:"viewType" validate:"required,oneof=line_chart table_view"` - MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"` - SessionID int64 `json:"sessionId"` - Series []CardSeries `json:"series" validate:"required,dive"` -} - -type GetCardChartDataResponse struct { - Data []DataPoint `json:"data"` -} - /************************************************************ * CardListFilter and Sorter */ diff --git a/backend/pkg/analytics/dashboards/dashboards.go b/backend/pkg/analytics/dashboards/dashboards.go index 95ba863ed..7a4147654 100644 --- a/backend/pkg/analytics/dashboards/dashboards.go +++ b/backend/pkg/analytics/dashboards/dashboards.go @@ -11,14 +11,14 @@ import ( ) type Dashboards interface { - GetDashboard(projectId int, dashboardId int, userId uint64) (*GetDashboardResponse, error) - GetDashboardsPaginated(projectId int, userId uint64, req *GetDashboardsRequest) (*GetDashboardsResponsePaginated, error) - GetDashboards(projectId int, userId uint64) (*GetDashboardsResponse, error) - CreateDashboard(projectId int, userId uint64, req *CreateDashboardRequest) (*GetDashboardResponse, error) - UpdateDashboard(projectId int, dashboardId int, userId uint64, req *UpdateDashboardRequest) (*GetDashboardResponse, error) - DeleteDashboard(projectId int, dashboardId int, userId uint64) error - AddCardsToDashboard(projectId int, dashboardId int, userId uint64, req *AddCardToDashboardRequest) error - DeleteCardFromDashboard(dashboardId int, cardId int) error + Create(projectId int, userId uint64, req *CreateDashboardRequest) (*GetDashboardResponse, error) + Get(projectId int, dashboardId int, userId uint64) (*GetDashboardResponse, error) + GetAll(projectId int, userId uint64) (*GetDashboardsResponse, error) + GetAllPaginated(projectId int, userId uint64, req *GetDashboardsRequest) (*GetDashboardsResponsePaginated, error) + Update(projectId int, dashboardId int, userId uint64, req *UpdateDashboardRequest) (*GetDashboardResponse, error) + Delete(projectId int, dashboardId int, userId uint64) error + AddCards(projectId int, dashboardId int, userId uint64, req *AddCardToDashboardRequest) error + DeleteCard(dashboardId int, cardId int) error } type dashboardsImpl struct { @@ -34,7 +34,7 @@ func New(log logger.Logger, conn pool.Pool) (Dashboards, error) { } // CreateDashboard Create a new dashboard -func (s *dashboardsImpl) CreateDashboard(projectId int, userID uint64, req *CreateDashboardRequest) (*GetDashboardResponse, error) { +func (s *dashboardsImpl) Create(projectId int, userID uint64, req *CreateDashboardRequest) (*GetDashboardResponse, error) { sql := ` INSERT INTO dashboards (project_id, user_id, name, description, is_public, is_pinned) VALUES ($1, $2, $3, $4, $5, $6) @@ -57,7 +57,7 @@ func (s *dashboardsImpl) CreateDashboard(projectId int, userID uint64, req *Crea } // GetDashboard Fetch a specific dashboard by ID -func (s *dashboardsImpl) GetDashboard(projectId int, dashboardID int, userID uint64) (*GetDashboardResponse, error) { +func (s *dashboardsImpl) Get(projectId int, dashboardID int, userID uint64) (*GetDashboardResponse, error) { sql := ` WITH series_agg AS ( SELECT @@ -133,7 +133,7 @@ func (s *dashboardsImpl) GetDashboard(projectId int, dashboardID int, userID uin return dashboard, nil } -func (s *dashboardsImpl) GetDashboards(projectId int, userID uint64) (*GetDashboardsResponse, error) { +func (s *dashboardsImpl) GetAll(projectId int, userID uint64) (*GetDashboardsResponse, error) { sql := ` SELECT d.dashboard_id, d.user_id, d.project_id, d.name, d.description, d.is_public, d.is_pinned, u.email AS owner_email, u.name AS owner_name FROM dashboards d @@ -168,7 +168,7 @@ func (s *dashboardsImpl) GetDashboards(projectId int, userID uint64) (*GetDashbo } // GetDashboardsPaginated Fetch dashboards with pagination -func (s *dashboardsImpl) GetDashboardsPaginated(projectId int, userID uint64, req *GetDashboardsRequest) (*GetDashboardsResponsePaginated, error) { +func (s *dashboardsImpl) GetAllPaginated(projectId int, userID uint64, req *GetDashboardsRequest) (*GetDashboardsResponsePaginated, error) { baseSQL, args := buildBaseQuery(projectId, userID, req) // Count total dashboards @@ -217,7 +217,7 @@ func (s *dashboardsImpl) GetDashboardsPaginated(projectId int, userID uint64, re } // UpdateDashboard Update a dashboard -func (s *dashboardsImpl) UpdateDashboard(projectId int, dashboardID int, userID uint64, req *UpdateDashboardRequest) (*GetDashboardResponse, error) { +func (s *dashboardsImpl) Update(projectId int, dashboardID int, userID uint64, req *UpdateDashboardRequest) (*GetDashboardResponse, error) { sql := ` UPDATE dashboards SET name = $1, description = $2, is_public = $3, is_pinned = $4 @@ -241,7 +241,7 @@ func (s *dashboardsImpl) UpdateDashboard(projectId int, dashboardID int, userID } // DeleteDashboard Soft-delete a dashboard -func (s *dashboardsImpl) DeleteDashboard(projectId int, dashboardID int, userID uint64) error { +func (s *dashboardsImpl) Delete(projectId int, dashboardID int, userID uint64) error { sql := ` UPDATE dashboards SET deleted_at = now() @@ -320,8 +320,8 @@ func (s *dashboardsImpl) CardsExist(projectId int, cardIDs []int) (bool, error) return count == len(cardIDs), nil } -func (s *dashboardsImpl) AddCardsToDashboard(projectId int, dashboardId int, userId uint64, req *AddCardToDashboardRequest) error { - _, err := s.GetDashboard(projectId, dashboardId, userId) +func (s *dashboardsImpl) AddCards(projectId int, dashboardId int, userId uint64, req *AddCardToDashboardRequest) error { + _, err := s.Get(projectId, dashboardId, userId) if err != nil { return fmt.Errorf("failed to get dashboard: %w", err) } @@ -395,7 +395,7 @@ func (s *dashboardsImpl) AddCardsToDashboard(projectId int, dashboardId int, use return nil } -func (s *dashboardsImpl) DeleteCardFromDashboard(dashboardId int, cardId int) error { +func (s *dashboardsImpl) DeleteCard(dashboardId int, cardId int) error { sql := `DELETE FROM public.dashboard_widgets WHERE dashboard_id = $1 AND metric_id = $2` err := s.pgconn.Exec(sql, dashboardId, cardId) if err != nil { diff --git a/backend/pkg/analytics/dashboards/handlers.go b/backend/pkg/analytics/dashboards/handlers.go index f4c3ef573..a37005d00 100644 --- a/backend/pkg/analytics/dashboards/handlers.go +++ b/backend/pkg/analytics/dashboards/handlers.go @@ -90,7 +90,7 @@ func (e *handlersImpl) createDashboard(w http.ResponseWriter, r *http.Request) { } currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.dashboards.CreateDashboard(projectID, currentUser.ID, req) + resp, err := e.dashboards.Create(projectID, currentUser.ID, req) e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) } @@ -107,7 +107,7 @@ func (e *handlersImpl) getDashboards(w http.ResponseWriter, r *http.Request) { } u := r.Context().Value("userData").(*user.User) - resp, err := e.dashboards.GetDashboards(projectID, u.ID) + resp, err := e.dashboards.GetAll(projectID, u.ID) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -133,7 +133,7 @@ func (e *handlersImpl) getDashboard(w http.ResponseWriter, r *http.Request) { } u := r.Context().Value("userData").(*user.User) - res, err := e.dashboards.GetDashboard(projectID, dashboardID, u.ID) + res, err := e.dashboards.Get(projectID, dashboardID, u.ID) if err != nil { // Map errors to appropriate HTTP status codes if err.Error() == "not_found: dashboard not found" { @@ -173,7 +173,7 @@ func (e *handlersImpl) updateDashboard(w http.ResponseWriter, r *http.Request) { bodySize = len(bodyBytes) u := r.Context().Value("userData").(*user.User) - _, err = e.dashboards.GetDashboard(projectID, dashboardID, u.ID) + _, err = e.dashboards.Get(projectID, dashboardID, u.ID) if err != nil { // Map errors to appropriate HTTP status codes if err.Error() == "not_found: dashboard not found" { @@ -193,7 +193,7 @@ func (e *handlersImpl) updateDashboard(w http.ResponseWriter, r *http.Request) { } currentUser := r.Context().Value("userData").(*user.User) - resp, err := e.dashboards.UpdateDashboard(projectID, dashboardID, currentUser.ID, req) + resp, err := e.dashboards.Update(projectID, dashboardID, currentUser.ID, req) e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) } @@ -215,7 +215,7 @@ func (e *handlersImpl) deleteDashboard(w http.ResponseWriter, r *http.Request) { } u := r.Context().Value("userData").(*user.User) - _, err = e.dashboards.GetDashboard(projectID, dashboardID, u.ID) + _, err = e.dashboards.Get(projectID, dashboardID, u.ID) if err != nil { // Map errors to appropriate HTTP status codes if err.Error() == "not_found: dashboard not found" { @@ -228,7 +228,7 @@ func (e *handlersImpl) deleteDashboard(w http.ResponseWriter, r *http.Request) { return } - err = e.dashboards.DeleteDashboard(projectID, dashboardID, u.ID) + err = e.dashboards.Delete(projectID, dashboardID, u.ID) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return @@ -286,7 +286,7 @@ func (e *handlersImpl) addCardToDashboard(w http.ResponseWriter, r *http.Request return } - err = e.dashboards.AddCardsToDashboard(projectID, dashboardID, u.ID, req) + err = e.dashboards.AddCards(projectID, dashboardID, u.ID, req) if err != nil { if err.Error() == "not_found: dashboard not found" { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusNotFound, err, startTime, r.URL.Path, bodySize) @@ -325,7 +325,7 @@ func (e *handlersImpl) removeCardFromDashboard(w http.ResponseWriter, r *http.Re } u := r.Context().Value("userData").(*user.User) - _, err = e.dashboards.GetDashboard(projectID, dashboardID, u.ID) + _, err = e.dashboards.Get(projectID, dashboardID, u.ID) if err != nil { if err.Error() == "not_found: dashboard not found" { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusNotFound, err, startTime, r.URL.Path, bodySize) @@ -336,7 +336,7 @@ func (e *handlersImpl) removeCardFromDashboard(w http.ResponseWriter, r *http.Re } } - err = e.dashboards.DeleteCardFromDashboard(dashboardID, cardID) + err = e.dashboards.DeleteCard(dashboardID, cardID) if err != nil { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) return From 230924c4b8ab210b482ca5da0466a2865354537f Mon Sep 17 00:00:00 2001 From: Alexander Date: Mon, 23 Dec 2024 14:50:58 +0100 Subject: [PATCH 7/9] feat(analytics): removed unnecessary comments --- backend/pkg/analytics/dashboards/dashboards.go | 6 ------ 1 file changed, 6 deletions(-) diff --git a/backend/pkg/analytics/dashboards/dashboards.go b/backend/pkg/analytics/dashboards/dashboards.go index 7a4147654..cb260f572 100644 --- a/backend/pkg/analytics/dashboards/dashboards.go +++ b/backend/pkg/analytics/dashboards/dashboards.go @@ -33,7 +33,6 @@ func New(log logger.Logger, conn pool.Pool) (Dashboards, error) { }, nil } -// CreateDashboard Create a new dashboard func (s *dashboardsImpl) Create(projectId int, userID uint64, req *CreateDashboardRequest) (*GetDashboardResponse, error) { sql := ` INSERT INTO dashboards (project_id, user_id, name, description, is_public, is_pinned) @@ -56,7 +55,6 @@ func (s *dashboardsImpl) Create(projectId int, userID uint64, req *CreateDashboa return dashboard, nil } -// GetDashboard Fetch a specific dashboard by ID func (s *dashboardsImpl) Get(projectId int, dashboardID int, userID uint64) (*GetDashboardResponse, error) { sql := ` WITH series_agg AS ( @@ -167,7 +165,6 @@ func (s *dashboardsImpl) GetAll(projectId int, userID uint64) (*GetDashboardsRes }, nil } -// GetDashboardsPaginated Fetch dashboards with pagination func (s *dashboardsImpl) GetAllPaginated(projectId int, userID uint64, req *GetDashboardsRequest) (*GetDashboardsResponsePaginated, error) { baseSQL, args := buildBaseQuery(projectId, userID, req) @@ -216,7 +213,6 @@ func (s *dashboardsImpl) GetAllPaginated(projectId int, userID uint64, req *GetD }, nil } -// UpdateDashboard Update a dashboard func (s *dashboardsImpl) Update(projectId int, dashboardID int, userID uint64, req *UpdateDashboardRequest) (*GetDashboardResponse, error) { sql := ` UPDATE dashboards @@ -240,7 +236,6 @@ func (s *dashboardsImpl) Update(projectId int, dashboardID int, userID uint64, r return dashboard, nil } -// DeleteDashboard Soft-delete a dashboard func (s *dashboardsImpl) Delete(projectId int, dashboardID int, userID uint64) error { sql := ` UPDATE dashboards @@ -255,7 +250,6 @@ func (s *dashboardsImpl) Delete(projectId int, dashboardID int, userID uint64) e return nil } -// Helper to build the base query for dashboards func buildBaseQuery(projectId int, userID uint64, req *GetDashboardsRequest) (string, []interface{}) { var conditions []string args := []interface{}{projectId} From 763aed14a1b12e200dd00c61a42845e829f7e5aa Mon Sep 17 00:00:00 2001 From: Alexander Date: Mon, 23 Dec 2024 15:32:30 +0100 Subject: [PATCH 8/9] feat(analytics): moved charts data to the separate module --- backend/cmd/analytics/main.go | 2 +- backend/pkg/analytics/builder.go | 11 +++ backend/pkg/analytics/charts/charts.go | 50 +++++++++++++ backend/pkg/analytics/charts/handlers.go | 95 ++++++++++++++++++++++++ backend/pkg/analytics/charts/model.go | 21 ++++++ 5 files changed, 178 insertions(+), 1 deletion(-) create mode 100644 backend/pkg/analytics/charts/charts.go create mode 100644 backend/pkg/analytics/charts/handlers.go create mode 100644 backend/pkg/analytics/charts/model.go diff --git a/backend/cmd/analytics/main.go b/backend/cmd/analytics/main.go index 55923fe70..778e8a787 100644 --- a/backend/cmd/analytics/main.go +++ b/backend/cmd/analytics/main.go @@ -37,7 +37,7 @@ func main() { if err != nil { log.Fatal(ctx, "failed while creating router: %s", err) } - router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI) + router.AddHandlers(api.NoPrefix, builder.CardsAPI, builder.DashboardsAPI, builder.ChartsAPI) router.AddMiddlewares(builder.Auth.Middleware, builder.RateLimiter.Middleware, builder.AuditTrail.Middleware) server.Run(ctx, log, &cfg.HTTP, router) diff --git a/backend/pkg/analytics/builder.go b/backend/pkg/analytics/builder.go index 18250790b..e2a5c833b 100644 --- a/backend/pkg/analytics/builder.go +++ b/backend/pkg/analytics/builder.go @@ -1,6 +1,7 @@ package analytics import ( + "openreplay/backend/pkg/analytics/charts" "time" "openreplay/backend/internal/config/analytics" @@ -21,6 +22,7 @@ type ServicesBuilder struct { AuditTrail tracer.Tracer CardsAPI api.Handlers DashboardsAPI api.Handlers + ChartsAPI api.Handlers } func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web.Web, pgconn pool.Pool) (*ServicesBuilder, error) { @@ -45,11 +47,20 @@ func NewServiceBuilder(log logger.Logger, cfg *analytics.Config, webMetrics web. if err != nil { return nil, err } + chartsService, err := charts.New(log, pgconn) + if err != nil { + return nil, err + } + chartsHandlers, err := charts.NewHandlers(log, cfg, responser, chartsService) + if err != nil { + return nil, err + } return &ServicesBuilder{ Auth: auth.NewAuth(log, cfg.JWTSecret, cfg.JWTSpotSecret, pgconn, nil), RateLimiter: limiter.NewUserRateLimiter(10, 30, 1*time.Minute, 5*time.Minute), AuditTrail: audiTrail, CardsAPI: cardsHandlers, DashboardsAPI: dashboardsHandlers, + ChartsAPI: chartsHandlers, }, nil } diff --git a/backend/pkg/analytics/charts/charts.go b/backend/pkg/analytics/charts/charts.go new file mode 100644 index 000000000..1916695fa --- /dev/null +++ b/backend/pkg/analytics/charts/charts.go @@ -0,0 +1,50 @@ +package charts + +import ( + "encoding/json" + "fmt" + + "openreplay/backend/pkg/db/postgres/pool" + "openreplay/backend/pkg/logger" +) + +type Charts interface { + GetData(projectId int, userId uint64, req *GetCardChartDataRequest) ([]DataPoint, error) +} + +type chartsImpl struct { + log logger.Logger + pgconn pool.Pool +} + +func New(log logger.Logger, conn pool.Pool) (Charts, error) { + return &chartsImpl{ + log: log, + pgconn: conn, + }, nil +} + +func (s *chartsImpl) GetData(projectId int, userID uint64, req *GetCardChartDataRequest) ([]DataPoint, error) { + jsonInput := ` + { + "data": [ + { + "timestamp": 1733934939000, + "Series A": 100, + "Series B": 200 + }, + { + "timestamp": 1733935939000, + "Series A": 150, + "Series B": 250 + } + ] + }` + + var resp GetCardChartDataResponse + if err := json.Unmarshal([]byte(jsonInput), &resp); err != nil { + return nil, fmt.Errorf("failed to unmarshal response: %w", err) + } + + return resp.Data, nil +} diff --git a/backend/pkg/analytics/charts/handlers.go b/backend/pkg/analytics/charts/handlers.go new file mode 100644 index 000000000..d0d71dd7c --- /dev/null +++ b/backend/pkg/analytics/charts/handlers.go @@ -0,0 +1,95 @@ +package charts + +import ( + "encoding/json" + "fmt" + "net/http" + "strconv" + "time" + + "github.com/go-playground/validator/v10" + "github.com/gorilla/mux" + + config "openreplay/backend/internal/config/analytics" + "openreplay/backend/pkg/logger" + "openreplay/backend/pkg/server/api" + "openreplay/backend/pkg/server/user" +) + +func getIDFromRequest(r *http.Request, key string) (int, error) { + vars := mux.Vars(r) + idStr := vars[key] + if idStr == "" { + return 0, fmt.Errorf("missing %s in request", key) + } + + id, err := strconv.Atoi(idStr) + if err != nil { + return 0, fmt.Errorf("invalid %s format", key) + } + + return id, nil +} + +type handlersImpl struct { + log logger.Logger + responser *api.Responser + jsonSizeLimit int64 + charts Charts +} + +func (e *handlersImpl) GetAll() []*api.Description { + return []*api.Description{ + {"/v1/analytics/{projectId}/cards/{id}/chart", e.getCardChartData, "POST"}, + {"/v1/analytics/{projectId}/cards/{id}/try", e.getCardChartData, "POST"}, + } +} + +func NewHandlers(log logger.Logger, cfg *config.Config, responser *api.Responser, charts Charts) (api.Handlers, error) { + return &handlersImpl{ + log: log, + responser: responser, + jsonSizeLimit: cfg.JsonSizeLimit, + charts: charts, + }, nil +} + +func (e *handlersImpl) getCardChartData(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + bodySize := 0 + + projectID, err := getIDFromRequest(r, "projectId") + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + bodyBytes, err := api.ReadBody(e.log, w, r, e.jsonSizeLimit) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusRequestEntityTooLarge, err, startTime, r.URL.Path, bodySize) + return + } + bodySize = len(bodyBytes) + + req := &GetCardChartDataRequest{} + if err := json.Unmarshal(bodyBytes, req); err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + validate := validator.New() + err = validate.Struct(req) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusBadRequest, err, startTime, r.URL.Path, bodySize) + return + } + + currentUser := r.Context().Value("userData").(*user.User) + resp, err := e.charts.GetData(projectID, currentUser.ID, req) + if err != nil { + e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusInternalServerError, err, startTime, r.URL.Path, bodySize) + return + } + + e.responser.ResponseWithJSON(e.log, r.Context(), w, resp, startTime, r.URL.Path, bodySize) +} diff --git a/backend/pkg/analytics/charts/model.go b/backend/pkg/analytics/charts/model.go new file mode 100644 index 000000000..81016c7e6 --- /dev/null +++ b/backend/pkg/analytics/charts/model.go @@ -0,0 +1,21 @@ +package charts + +import "openreplay/backend/pkg/analytics/cards" + +type DataPoint struct { + Timestamp int64 `json:"timestamp"` + Series map[string]int64 `json:"series"` +} + +type GetCardChartDataRequest struct { + MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel"` + MetricOf string `json:"metricOf" validate:"required,oneof=session_count user_count"` + ViewType string `json:"viewType" validate:"required,oneof=line_chart table_view"` + MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"` + SessionID int64 `json:"sessionId"` + Series []cards.CardSeries `json:"series" validate:"required,dive"` +} + +type GetCardChartDataResponse struct { + Data []DataPoint `json:"data"` +} From 4c4e1b6580bc012c1b732ae81cd34af75705d8ab Mon Sep 17 00:00:00 2001 From: Kraiem Taha Yassine Date: Mon, 23 Dec 2024 17:19:43 +0100 Subject: [PATCH 9/9] Dev (#2905) * feat(chalice): autocomplete return top 10 with stats * fix(chalice): fixed autocomplete top 10 meta-filters * refactor(chalice): pg client helper handles closed connexion * refactor(chalice): upgraded dependencies refactor(chalice): restricted get error's details --- api/Pipfile | 13 +- api/chalicelib/core/errors/errors.py | 249 ----------------- api/chalicelib/core/errors/errors_ch.py | 262 ------------------ api/chalicelib/core/errors/errors_details.py | 253 +++++++++++++++++ api/chalicelib/utils/errors_helper.py | 2 +- api/chalicelib/utils/pg_client.py | 7 +- api/requirements-alerts.txt | 10 +- api/requirements.txt | 10 +- api/routers/core_dynamic.py | 8 +- ee/api/.gitignore | 1 + ee/api/Pipfile | 14 +- ee/api/chalicelib/core/errors/__init__.py | 1 + .../core/errors/errors_details_exp.py | 261 +++++++++++++++++ ee/api/clean-dev.sh | 1 + ee/api/requirements.txt | 15 +- 15 files changed, 558 insertions(+), 549 deletions(-) create mode 100644 api/chalicelib/core/errors/errors_details.py create mode 100644 ee/api/chalicelib/core/errors/errors_details_exp.py diff --git a/api/Pipfile b/api/Pipfile index ffd5906f5..65a080eda 100644 --- a/api/Pipfile +++ b/api/Pipfile @@ -4,22 +4,21 @@ verify_ssl = true name = "pypi" [packages] -sqlparse = "==0.5.2" urllib3 = "==2.2.3" requests = "==2.32.3" -boto3 = "==1.35.76" +boto3 = "==1.35.86" pyjwt = "==2.10.1" psycopg2-binary = "==2.9.10" -psycopg = {extras = ["binary", "pool"], version = "==3.2.3"} +psycopg = {extras = ["pool", "binary"], version = "==3.2.3"} clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"} -clickhouse-connect = "==0.8.9" -elasticsearch = "==8.16.0" +clickhouse-connect = "==0.8.11" +elasticsearch = "==8.17.0" jira = "==3.8.0" cachetools = "==5.5.0" fastapi = "==0.115.6" -uvicorn = {extras = ["standard"], version = "==0.32.1"} +uvicorn = {extras = ["standard"], version = "==0.34.0"} python-decouple = "==3.8" -pydantic = {extras = ["email"], version = "==2.10.3"} +pydantic = {extras = ["email"], version = "==2.10.4"} apscheduler = "==3.11.0" redis = "==5.2.1" diff --git a/api/chalicelib/core/errors/errors.py b/api/chalicelib/core/errors/errors.py index 4ea1e1f89..0e597832a 100644 --- a/api/chalicelib/core/errors/errors.py +++ b/api/chalicelib/core/errors/errors.py @@ -3,7 +3,6 @@ import json import schemas from chalicelib.core import sourcemaps from chalicelib.core.errors.modules import sessions -from chalicelib.utils import errors_helper from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.metrics_helper import __get_step_size @@ -13,9 +12,7 @@ def get(error_id, family=False): if family: return get_batch([error_id]) with pg_client.PostgresClient() as cur: - # trying: return only 1 error, without event details query = cur.mogrify( - # "SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;", "SELECT * FROM public.errors WHERE error_id = %(error_id)s LIMIT 1;", {"error_id": error_id}) cur.execute(query=query) @@ -50,252 +47,6 @@ def get_batch(error_ids): return helper.list_to_camel_case(errors) -def __flatten_sort_key_count_version(data, merge_nested=False): - if data is None: - return [] - return sorted( - [ - { - "name": f'{o["name"]}@{v["version"]}', - "count": v["count"] - } for o in data for v in o["partition"] - ], - key=lambda o: o["count"], reverse=True) if merge_nested else \ - [ - { - "name": o["name"], - "count": o["count"], - } for o in data - ] - - -def __process_tags(row): - return [ - {"name": "browser", "partitions": __flatten_sort_key_count_version(data=row.get("browsers_partition"))}, - {"name": "browser.ver", - "partitions": __flatten_sort_key_count_version(data=row.pop("browsers_partition"), merge_nested=True)}, - {"name": "OS", "partitions": __flatten_sort_key_count_version(data=row.get("os_partition"))}, - {"name": "OS.ver", - "partitions": __flatten_sort_key_count_version(data=row.pop("os_partition"), merge_nested=True)}, - {"name": "device.family", "partitions": __flatten_sort_key_count_version(data=row.get("device_partition"))}, - {"name": "device", - "partitions": __flatten_sort_key_count_version(data=row.pop("device_partition"), merge_nested=True)}, - {"name": "country", "partitions": row.pop("country_partition")} - ] - - -def get_details(project_id, error_id, user_id, **data): - pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24") - pg_sub_query24.append("error_id = %(error_id)s") - pg_sub_query30_session = __get_basic_constraints(time_constraint=True, chart=False, - startTime_arg_name="startDate30", - endTime_arg_name="endDate30", project_key="sessions.project_id") - pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s") - pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s") - pg_sub_query30_session.append("error_id = %(error_id)s") - pg_sub_query30_err = __get_basic_constraints(time_constraint=True, chart=False, startTime_arg_name="startDate30", - endTime_arg_name="endDate30", project_key="errors.project_id") - pg_sub_query30_err.append("sessions.project_id = %(project_id)s") - pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s") - pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s") - pg_sub_query30_err.append("error_id = %(error_id)s") - pg_sub_query30_err.append("source ='js_exception'") - pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30") - pg_sub_query30.append("error_id = %(error_id)s") - pg_basic_query = __get_basic_constraints(time_constraint=False) - pg_basic_query.append("error_id = %(error_id)s") - with pg_client.PostgresClient() as cur: - data["startDate24"] = TimeUTC.now(-1) - data["endDate24"] = TimeUTC.now() - data["startDate30"] = TimeUTC.now(-30) - data["endDate30"] = TimeUTC.now() - density24 = int(data.get("density24", 24)) - step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1) - density30 = int(data.get("density30", 30)) - step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1) - params = { - "startDate24": data['startDate24'], - "endDate24": data['endDate24'], - "startDate30": data['startDate30'], - "endDate30": data['endDate30'], - "project_id": project_id, - "userId": user_id, - "step_size24": step_size24, - "step_size30": step_size30, - "error_id": error_id} - - main_pg_query = f"""\ - SELECT error_id, - name, - message, - users, - sessions, - last_occurrence, - first_occurrence, - last_session_id, - browsers_partition, - os_partition, - device_partition, - country_partition, - chart24, - chart30, - custom_tags - FROM (SELECT error_id, - name, - message, - COUNT(DISTINCT user_id) AS users, - COUNT(DISTINCT session_id) AS sessions - FROM public.errors - INNER JOIN events.errors AS s_errors USING (error_id) - INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_err)} - GROUP BY error_id, name, message) AS details - INNER JOIN (SELECT MAX(timestamp) AS last_occurrence, - MIN(timestamp) AS first_occurrence - FROM events.errors - WHERE error_id = %(error_id)s) AS time_details ON (TRUE) - INNER JOIN (SELECT session_id AS last_session_id, - coalesce(custom_tags, '[]')::jsonb AS custom_tags - FROM events.errors - LEFT JOIN LATERAL ( - SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags - FROM errors_tags - WHERE errors_tags.error_id = %(error_id)s - AND errors_tags.session_id = errors.session_id - AND errors_tags.message_id = errors.message_id) AS errors_tags ON (TRUE) - WHERE error_id = %(error_id)s - ORDER BY errors.timestamp DESC - LIMIT 1) AS last_session_details ON (TRUE) - INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition - FROM (SELECT * - FROM (SELECT user_browser AS name, - COUNT(session_id) AS count - FROM events.errors - INNER JOIN sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - GROUP BY user_browser - ORDER BY count DESC) AS count_per_browser_query - INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition - FROM (SELECT user_browser_version AS version, - COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - AND sessions.user_browser = count_per_browser_query.name - GROUP BY user_browser_version - ORDER BY count DESC) AS version_details - ) AS browser_version_details ON (TRUE)) AS browser_details) AS browser_details ON (TRUE) - INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition - FROM (SELECT * - FROM (SELECT user_os AS name, - COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - GROUP BY user_os - ORDER BY count DESC) AS count_per_os_details - INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition - FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - AND sessions.user_os = count_per_os_details.name - GROUP BY user_os_version - ORDER BY count DESC) AS count_per_version_details - GROUP BY count_per_os_details.name ) AS os_version_details - ON (TRUE)) AS os_details) AS os_details ON (TRUE) - INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition - FROM (SELECT * - FROM (SELECT user_device_type AS name, - COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - GROUP BY user_device_type - ORDER BY count DESC) AS count_per_device_details - INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition - FROM (SELECT CASE - WHEN user_device = '' OR user_device ISNULL - THEN 'unknown' - ELSE user_device END AS version, - COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - AND sessions.user_device_type = count_per_device_details.name - GROUP BY user_device - ORDER BY count DESC) AS count_per_device_v_details - GROUP BY count_per_device_details.name ) AS device_version_details - ON (TRUE)) AS device_details) AS device_details ON (TRUE) - INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition - FROM (SELECT user_country AS name, - COUNT(session_id) AS count - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30_session)} - GROUP BY user_country - ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE) - INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24 - FROM (SELECT generated_timestamp AS timestamp, - COUNT(session_id) AS count - FROM generate_series(%(startDate24)s, %(endDate24)s, %(step_size24)s) AS generated_timestamp - LEFT JOIN LATERAL (SELECT DISTINCT session_id - FROM events.errors - INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query24)} - ) AS chart_details ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp) AS chart_details) AS chart_details24 ON (TRUE) - INNER JOIN (SELECT jsonb_agg(chart_details) AS chart30 - FROM (SELECT generated_timestamp AS timestamp, - COUNT(session_id) AS count - FROM generate_series(%(startDate30)s, %(endDate30)s, %(step_size30)s) AS generated_timestamp - LEFT JOIN LATERAL (SELECT DISTINCT session_id - FROM events.errors INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query30)}) AS chart_details - ON (TRUE) - GROUP BY timestamp - ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE); - """ - - # print("--------------------") - # print(cur.mogrify(main_pg_query, params)) - # print("--------------------") - cur.execute(cur.mogrify(main_pg_query, params)) - row = cur.fetchone() - if row is None: - return {"errors": ["error not found"]} - row["tags"] = __process_tags(row) - - query = cur.mogrify( - f"""SELECT error_id, status, session_id, start_ts, - parent_error_id,session_id, user_anonymous_id, - user_id, user_uuid, user_browser, user_browser_version, - user_os, user_os_version, user_device, payload, - FALSE AS favorite, - True AS viewed - FROM public.errors AS pe - INNER JOIN events.errors AS ee USING (error_id) - INNER JOIN public.sessions USING (session_id) - WHERE pe.project_id = %(project_id)s - AND error_id = %(error_id)s - ORDER BY start_ts DESC - LIMIT 1;""", - {"project_id": project_id, "error_id": error_id, "user_id": user_id}) - cur.execute(query=query) - status = cur.fetchone() - - if status is not None: - row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack") - row["status"] = status.pop("status") - row["parent_error_id"] = status.pop("parent_error_id") - row["favorite"] = status.pop("favorite") - row["viewed"] = status.pop("viewed") - row["last_hydrated_session"] = status - else: - row["stack"] = [] - row["last_hydrated_session"] = None - row["status"] = "untracked" - row["parent_error_id"] = None - row["favorite"] = False - row["viewed"] = False - return {"data": helper.dict_to_camel_case(row)} - - def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate", endTime_arg_name="endDate", chart=False, step_size_name="step_size", project_key="project_id"): diff --git a/api/chalicelib/core/errors/errors_ch.py b/api/chalicelib/core/errors/errors_ch.py index 8ba0fe45f..f12d135d0 100644 --- a/api/chalicelib/core/errors/errors_ch.py +++ b/api/chalicelib/core/errors/errors_ch.py @@ -62,268 +62,6 @@ def get_batch(error_ids): return errors_legacy.get_batch(error_ids=error_ids) -def __flatten_sort_key_count_version(data, merge_nested=False): - if data is None: - return [] - return sorted( - [ - { - "name": f"{o[0][0][0]}@{v[0]}", - "count": v[1] - } for o in data for v in o[2] - ], - key=lambda o: o["count"], reverse=True) if merge_nested else \ - [ - { - "name": o[0][0][0], - "count": o[1][0][0], - # "versions": [{"version": v[0], "count": v[1]} for v in o[2]] - } for o in data - ] - - -def __transform_map_to_tag(data, key1, key2, requested_key): - result = [] - for i in data: - if requested_key == 0 and i.get(key1) is None and i.get(key2) is None: - result.append({"name": "all", "count": int(i.get("count"))}) - elif requested_key == 1 and i.get(key1) is not None and i.get(key2) is None: - result.append({"name": i.get(key1), "count": int(i.get("count"))}) - elif requested_key == 2 and i.get(key1) is not None and i.get(key2) is not None: - result.append({"name": i.get(key2), "count": int(i.get("count"))}) - return result - - -def __flatten_sort_key_count(data): - if data is None: - return [] - return [ - { - "name": o[0][0][0], - "count": o[1][0][0] - } for o in data - ] - - -def __process_tags_map(row): - browsers_partition = row.pop("browsers_partition") - os_partition = row.pop("os_partition") - device_partition = row.pop("device_partition") - country_partition = row.pop("country_partition") - return [ - {"name": "browser", - "partitions": __transform_map_to_tag(data=browsers_partition, - key1="browser", - key2="browser_version", - requested_key=1)}, - {"name": "browser.ver", - "partitions": __transform_map_to_tag(data=browsers_partition, - key1="browser", - key2="browser_version", - requested_key=2)}, - {"name": "OS", - "partitions": __transform_map_to_tag(data=os_partition, - key1="os", - key2="os_version", - requested_key=1) - }, - {"name": "OS.ver", - "partitions": __transform_map_to_tag(data=os_partition, - key1="os", - key2="os_version", - requested_key=2)}, - {"name": "device.family", - "partitions": __transform_map_to_tag(data=device_partition, - key1="device_type", - key2="device", - requested_key=1)}, - {"name": "device", - "partitions": __transform_map_to_tag(data=device_partition, - key1="device_type", - key2="device", - requested_key=2)}, - {"name": "country", "partitions": __transform_map_to_tag(data=country_partition, - key1="country", - key2="", - requested_key=1)} - ] - - -def get_details(project_id, error_id, user_id, **data): - MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(0) - MAIN_ERR_SESS_TABLE = exp_ch_helper.get_main_js_errors_sessions_table(0) - MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0) - - ch_sub_query24 = __get_basic_constraints(startTime_arg_name="startDate24", endTime_arg_name="endDate24") - ch_sub_query24.append("error_id = %(error_id)s") - - ch_sub_query30 = __get_basic_constraints(startTime_arg_name="startDate30", endTime_arg_name="endDate30", - project_key="errors.project_id") - ch_sub_query30.append("error_id = %(error_id)s") - ch_basic_query = __get_basic_constraints(time_constraint=False) - ch_basic_query.append("error_id = %(error_id)s") - - with ch_client.ClickHouseClient() as ch: - data["startDate24"] = TimeUTC.now(-1) - data["endDate24"] = TimeUTC.now() - data["startDate30"] = TimeUTC.now(-30) - data["endDate30"] = TimeUTC.now() - - density24 = int(data.get("density24", 24)) - step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24) - density30 = int(data.get("density30", 30)) - step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30) - params = { - "startDate24": data['startDate24'], - "endDate24": data['endDate24'], - "startDate30": data['startDate30'], - "endDate30": data['endDate30'], - "project_id": project_id, - "userId": user_id, - "step_size24": step_size24, - "step_size30": step_size30, - "error_id": error_id} - - main_ch_query = f"""\ - WITH pre_processed AS (SELECT error_id, - name, - message, - session_id, - datetime, - user_id, - user_browser, - user_browser_version, - user_os, - user_os_version, - user_device_type, - user_device, - user_country, - error_tags_keys, - error_tags_values - FROM {MAIN_ERR_SESS_TABLE} AS errors - WHERE {" AND ".join(ch_basic_query)} - ) - SELECT %(error_id)s AS error_id, name, message,users, - first_occurrence,last_occurrence,last_session_id, - sessions,browsers_partition,os_partition,device_partition, - country_partition,chart24,chart30,custom_tags - FROM (SELECT error_id, - name, - message - FROM pre_processed - LIMIT 1) AS details - INNER JOIN (SELECT COUNT(DISTINCT user_id) AS users, - COUNT(DISTINCT session_id) AS sessions - FROM pre_processed - WHERE datetime >= toDateTime(%(startDate30)s / 1000) - AND datetime <= toDateTime(%(endDate30)s / 1000) - ) AS last_month_stats ON TRUE - INNER JOIN (SELECT toUnixTimestamp(max(datetime)) * 1000 AS last_occurrence, - toUnixTimestamp(min(datetime)) * 1000 AS first_occurrence - FROM pre_processed) AS time_details ON TRUE - INNER JOIN (SELECT session_id AS last_session_id, - arrayMap((key, value)->(map(key, value)), error_tags_keys, error_tags_values) AS custom_tags - FROM pre_processed - ORDER BY datetime DESC - LIMIT 1) AS last_session_details ON TRUE - INNER JOIN (SELECT groupArray(details) AS browsers_partition - FROM (SELECT COUNT(1) AS count, - coalesce(nullIf(user_browser,''),toNullable('unknown')) AS browser, - coalesce(nullIf(user_browser_version,''),toNullable('unknown')) AS browser_version, - map('browser', browser, - 'browser_version', browser_version, - 'count', toString(count)) AS details - FROM pre_processed - GROUP BY ROLLUP(browser, browser_version) - ORDER BY browser nulls first, browser_version nulls first, count DESC) AS mapped_browser_details - ) AS browser_details ON TRUE - INNER JOIN (SELECT groupArray(details) AS os_partition - FROM (SELECT COUNT(1) AS count, - coalesce(nullIf(user_os,''),toNullable('unknown')) AS os, - coalesce(nullIf(user_os_version,''),toNullable('unknown')) AS os_version, - map('os', os, - 'os_version', os_version, - 'count', toString(count)) AS details - FROM pre_processed - GROUP BY ROLLUP(os, os_version) - ORDER BY os nulls first, os_version nulls first, count DESC) AS mapped_os_details - ) AS os_details ON TRUE - INNER JOIN (SELECT groupArray(details) AS device_partition - FROM (SELECT COUNT(1) AS count, - coalesce(nullIf(user_device,''),toNullable('unknown')) AS user_device, - map('device_type', toString(user_device_type), - 'device', user_device, - 'count', toString(count)) AS details - FROM pre_processed - GROUP BY ROLLUP(user_device_type, user_device) - ORDER BY user_device_type nulls first, user_device nulls first, count DESC - ) AS count_per_device_details - ) AS mapped_device_details ON TRUE - INNER JOIN (SELECT groupArray(details) AS country_partition - FROM (SELECT COUNT(1) AS count, - map('country', toString(user_country), - 'count', toString(count)) AS details - FROM pre_processed - GROUP BY user_country - ORDER BY count DESC) AS count_per_country_details - ) AS mapped_country_details ON TRUE - INNER JOIN (SELECT groupArray(map('timestamp', timestamp, 'count', count)) AS chart24 - FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL 3756 second)) * - 1000 AS timestamp, - COUNT(DISTINCT session_id) AS count - FROM {MAIN_EVENTS_TABLE} AS errors - WHERE {" AND ".join(ch_sub_query24)} - GROUP BY timestamp - ORDER BY timestamp) AS chart_details - ) AS chart_details24 ON TRUE - INNER JOIN (SELECT groupArray(map('timestamp', timestamp, 'count', count)) AS chart30 - FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL 3724 second)) * - 1000 AS timestamp, - COUNT(DISTINCT session_id) AS count - FROM {MAIN_EVENTS_TABLE} AS errors - WHERE {" AND ".join(ch_sub_query30)} - GROUP BY timestamp - ORDER BY timestamp) AS chart_details - ) AS chart_details30 ON TRUE;""" - - # print("--------------------") - # print(ch.format(main_ch_query, params)) - # print("--------------------") - row = ch.execute(query=main_ch_query, parameters=params) - if len(row) == 0: - return {"errors": ["error not found"]} - row = row[0] - - row["tags"] = __process_tags_map(row) - - query = f"""SELECT session_id, toUnixTimestamp(datetime) * 1000 AS start_ts, - user_anonymous_id,user_id, user_uuid, user_browser, user_browser_version, - user_os, user_os_version, user_device, FALSE AS favorite, True AS viewed - FROM {MAIN_SESSIONS_TABLE} AS sessions - WHERE project_id = toUInt16(%(project_id)s) - AND session_id = %(session_id)s - ORDER BY datetime DESC - LIMIT 1;""" - params = {"project_id": project_id, "session_id": row["last_session_id"], "userId": user_id} - # print("--------------------") - # print(ch.format(query, params)) - # print("--------------------") - status = ch.execute(query=query, parameters=params) - - if status is not None: - status = status[0] - row["favorite"] = status.pop("favorite") - row["viewed"] = status.pop("viewed") - row["last_hydrated_session"] = status - else: - row["last_hydrated_session"] = None - row["favorite"] = False - row["viewed"] = False - row["chart24"] = metrics.__complete_missing_steps(start_time=data["startDate24"], end_time=data["endDate24"], - density=density24, rows=row["chart24"], neutral={"count": 0}) - row["chart30"] = metrics.__complete_missing_steps(start_time=data["startDate30"], end_time=data["endDate30"], - density=density30, rows=row["chart30"], neutral={"count": 0}) - return {"data": helper.dict_to_camel_case(row)} def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate", diff --git a/api/chalicelib/core/errors/errors_details.py b/api/chalicelib/core/errors/errors_details.py new file mode 100644 index 000000000..6dffaeed5 --- /dev/null +++ b/api/chalicelib/core/errors/errors_details.py @@ -0,0 +1,253 @@ +from chalicelib.core.errors import errors_legacy as errors +from chalicelib.utils import errors_helper +from chalicelib.utils import pg_client, helper +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils.metrics_helper import __get_step_size + + +def __flatten_sort_key_count_version(data, merge_nested=False): + if data is None: + return [] + return sorted( + [ + { + "name": f'{o["name"]}@{v["version"]}', + "count": v["count"] + } for o in data for v in o["partition"] + ], + key=lambda o: o["count"], reverse=True) if merge_nested else \ + [ + { + "name": o["name"], + "count": o["count"], + } for o in data + ] + + +def __process_tags(row): + return [ + {"name": "browser", "partitions": __flatten_sort_key_count_version(data=row.get("browsers_partition"))}, + {"name": "browser.ver", + "partitions": __flatten_sort_key_count_version(data=row.pop("browsers_partition"), merge_nested=True)}, + {"name": "OS", "partitions": __flatten_sort_key_count_version(data=row.get("os_partition"))}, + {"name": "OS.ver", + "partitions": __flatten_sort_key_count_version(data=row.pop("os_partition"), merge_nested=True)}, + {"name": "device.family", "partitions": __flatten_sort_key_count_version(data=row.get("device_partition"))}, + {"name": "device", + "partitions": __flatten_sort_key_count_version(data=row.pop("device_partition"), merge_nested=True)}, + {"name": "country", "partitions": row.pop("country_partition")} + ] + + +def get_details(project_id, error_id, user_id, **data): + pg_sub_query24 = errors.__get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24") + pg_sub_query24.append("error_id = %(error_id)s") + pg_sub_query30_session = errors.__get_basic_constraints(time_constraint=True, chart=False, + startTime_arg_name="startDate30", + endTime_arg_name="endDate30", + project_key="sessions.project_id") + pg_sub_query30_session.append("sessions.start_ts >= %(startDate30)s") + pg_sub_query30_session.append("sessions.start_ts <= %(endDate30)s") + pg_sub_query30_session.append("error_id = %(error_id)s") + pg_sub_query30_err = errors.__get_basic_constraints(time_constraint=True, chart=False, + startTime_arg_name="startDate30", + endTime_arg_name="endDate30", project_key="errors.project_id") + pg_sub_query30_err.append("sessions.project_id = %(project_id)s") + pg_sub_query30_err.append("sessions.start_ts >= %(startDate30)s") + pg_sub_query30_err.append("sessions.start_ts <= %(endDate30)s") + pg_sub_query30_err.append("error_id = %(error_id)s") + pg_sub_query30_err.append("source ='js_exception'") + pg_sub_query30 = errors.__get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30") + pg_sub_query30.append("error_id = %(error_id)s") + pg_basic_query = errors.__get_basic_constraints(time_constraint=False) + pg_basic_query.append("error_id = %(error_id)s") + with pg_client.PostgresClient() as cur: + data["startDate24"] = TimeUTC.now(-1) + data["endDate24"] = TimeUTC.now() + data["startDate30"] = TimeUTC.now(-30) + data["endDate30"] = TimeUTC.now() + density24 = int(data.get("density24", 24)) + step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1) + density30 = int(data.get("density30", 30)) + step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1) + params = { + "startDate24": data['startDate24'], + "endDate24": data['endDate24'], + "startDate30": data['startDate30'], + "endDate30": data['endDate30'], + "project_id": project_id, + "userId": user_id, + "step_size24": step_size24, + "step_size30": step_size30, + "error_id": error_id} + + main_pg_query = f"""\ + SELECT error_id, + name, + message, + users, + sessions, + last_occurrence, + first_occurrence, + last_session_id, + browsers_partition, + os_partition, + device_partition, + country_partition, + chart24, + chart30, + custom_tags + FROM (SELECT error_id, + name, + message, + COUNT(DISTINCT user_id) AS users, + COUNT(DISTINCT session_id) AS sessions + FROM public.errors + INNER JOIN events.errors AS s_errors USING (error_id) + INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_err)} + GROUP BY error_id, name, message) AS details + INNER JOIN (SELECT MAX(timestamp) AS last_occurrence, + MIN(timestamp) AS first_occurrence + FROM events.errors + WHERE error_id = %(error_id)s) AS time_details ON (TRUE) + INNER JOIN (SELECT session_id AS last_session_id, + coalesce(custom_tags, '[]')::jsonb AS custom_tags + FROM events.errors + LEFT JOIN LATERAL ( + SELECT jsonb_agg(jsonb_build_object(errors_tags.key, errors_tags.value)) AS custom_tags + FROM errors_tags + WHERE errors_tags.error_id = %(error_id)s + AND errors_tags.session_id = errors.session_id + AND errors_tags.message_id = errors.message_id) AS errors_tags ON (TRUE) + WHERE error_id = %(error_id)s + ORDER BY errors.timestamp DESC + LIMIT 1) AS last_session_details ON (TRUE) + INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition + FROM (SELECT * + FROM (SELECT user_browser AS name, + COUNT(session_id) AS count + FROM events.errors + INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_session)} + GROUP BY user_browser + ORDER BY count DESC) AS count_per_browser_query + INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition + FROM (SELECT user_browser_version AS version, + COUNT(session_id) AS count + FROM events.errors INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_session)} + AND sessions.user_browser = count_per_browser_query.name + GROUP BY user_browser_version + ORDER BY count DESC) AS version_details + ) AS browser_version_details ON (TRUE)) AS browser_details) AS browser_details ON (TRUE) + INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition + FROM (SELECT * + FROM (SELECT user_os AS name, + COUNT(session_id) AS count + FROM events.errors INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_session)} + GROUP BY user_os + ORDER BY count DESC) AS count_per_os_details + INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition + FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count + FROM events.errors INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_session)} + AND sessions.user_os = count_per_os_details.name + GROUP BY user_os_version + ORDER BY count DESC) AS count_per_version_details + GROUP BY count_per_os_details.name ) AS os_version_details + ON (TRUE)) AS os_details) AS os_details ON (TRUE) + INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition + FROM (SELECT * + FROM (SELECT user_device_type AS name, + COUNT(session_id) AS count + FROM events.errors INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_session)} + GROUP BY user_device_type + ORDER BY count DESC) AS count_per_device_details + INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition + FROM (SELECT CASE + WHEN user_device = '' OR user_device ISNULL + THEN 'unknown' + ELSE user_device END AS version, + COUNT(session_id) AS count + FROM events.errors INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_session)} + AND sessions.user_device_type = count_per_device_details.name + GROUP BY user_device + ORDER BY count DESC) AS count_per_device_v_details + GROUP BY count_per_device_details.name ) AS device_version_details + ON (TRUE)) AS device_details) AS device_details ON (TRUE) + INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition + FROM (SELECT user_country AS name, + COUNT(session_id) AS count + FROM events.errors INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30_session)} + GROUP BY user_country + ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE) + INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24 + FROM (SELECT generated_timestamp AS timestamp, + COUNT(session_id) AS count + FROM generate_series(%(startDate24)s, %(endDate24)s, %(step_size24)s) AS generated_timestamp + LEFT JOIN LATERAL (SELECT DISTINCT session_id + FROM events.errors + INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query24)} + ) AS chart_details ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp) AS chart_details) AS chart_details24 ON (TRUE) + INNER JOIN (SELECT jsonb_agg(chart_details) AS chart30 + FROM (SELECT generated_timestamp AS timestamp, + COUNT(session_id) AS count + FROM generate_series(%(startDate30)s, %(endDate30)s, %(step_size30)s) AS generated_timestamp + LEFT JOIN LATERAL (SELECT DISTINCT session_id + FROM events.errors INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query30)}) AS chart_details + ON (TRUE) + GROUP BY timestamp + ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE); + """ + + # print("--------------------") + # print(cur.mogrify(main_pg_query, params)) + # print("--------------------") + cur.execute(cur.mogrify(main_pg_query, params)) + row = cur.fetchone() + if row is None: + return {"errors": ["error not found"]} + row["tags"] = __process_tags(row) + + query = cur.mogrify( + f"""SELECT error_id, status, session_id, start_ts, + parent_error_id,session_id, user_anonymous_id, + user_id, user_uuid, user_browser, user_browser_version, + user_os, user_os_version, user_device, payload, + FALSE AS favorite, + True AS viewed + FROM public.errors AS pe + INNER JOIN events.errors AS ee USING (error_id) + INNER JOIN public.sessions USING (session_id) + WHERE pe.project_id = %(project_id)s + AND error_id = %(error_id)s + ORDER BY start_ts DESC + LIMIT 1;""", + {"project_id": project_id, "error_id": error_id, "user_id": user_id}) + cur.execute(query=query) + status = cur.fetchone() + + if status is not None: + row["stack"] = errors_helper.format_first_stack_frame(status).pop("stack") + row["status"] = status.pop("status") + row["parent_error_id"] = status.pop("parent_error_id") + row["favorite"] = status.pop("favorite") + row["viewed"] = status.pop("viewed") + row["last_hydrated_session"] = status + else: + row["stack"] = [] + row["last_hydrated_session"] = None + row["status"] = "untracked" + row["parent_error_id"] = None + row["favorite"] = False + row["viewed"] = False + return {"data": helper.dict_to_camel_case(row)} diff --git a/api/chalicelib/utils/errors_helper.py b/api/chalicelib/utils/errors_helper.py index 73a430110..6c0d697d6 100644 --- a/api/chalicelib/utils/errors_helper.py +++ b/api/chalicelib/utils/errors_helper.py @@ -1,4 +1,4 @@ -from chalicelib.core import sourcemaps +from chalicelib.core.sourcemaps import sourcemaps def format_first_stack_frame(error): diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 29ea84873..fd47e5389 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -147,7 +147,12 @@ class PostgresClient: logger.error(f"!!! Error of type:{type(error)} while executing query:") logger.error(query) logger.info("starting rollback to allow future execution") - self.connection.rollback() + try: + self.connection.rollback() + except psycopg2.InterfaceError as e: + logger.error("!!! Error while rollbacking connection", e) + logger.error("!!! Trying to recreate the cursor") + self.recreate_cursor() raise error return result diff --git a/api/requirements-alerts.txt b/api/requirements-alerts.txt index ee8bbc950..c085444dc 100644 --- a/api/requirements-alerts.txt +++ b/api/requirements-alerts.txt @@ -1,19 +1,19 @@ urllib3==2.2.3 requests==2.32.3 -boto3==1.35.76 +boto3==1.35.86 pyjwt==2.10.1 psycopg2-binary==2.9.10 psycopg[pool,binary]==3.2.3 clickhouse-driver[lz4]==0.2.9 -clickhouse-connect==0.8.9 -elasticsearch==8.16.0 +clickhouse-connect==0.8.11 +elasticsearch==8.17.0 jira==3.8.0 cachetools==5.5.0 fastapi==0.115.6 -uvicorn[standard]==0.32.1 +uvicorn[standard]==0.34.0 python-decouple==3.8 -pydantic[email]==2.10.3 +pydantic[email]==2.10.4 apscheduler==3.11.0 diff --git a/api/requirements.txt b/api/requirements.txt index d643061f1..e9fd3b025 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,21 +1,21 @@ urllib3==2.2.3 requests==2.32.3 -boto3==1.35.76 +boto3==1.35.86 pyjwt==2.10.1 psycopg2-binary==2.9.10 psycopg[pool,binary]==3.2.3 clickhouse-driver[lz4]==0.2.9 -clickhouse-connect==0.8.9 -elasticsearch==8.16.0 +clickhouse-connect==0.8.11 +elasticsearch==8.17.0 jira==3.8.0 cachetools==5.5.0 fastapi==0.115.6 -uvicorn[standard]==0.32.1 +uvicorn[standard]==0.34.0 python-decouple==3.8 -pydantic[email]==2.10.3 +pydantic[email]==2.10.4 apscheduler==3.11.0 redis==5.2.1 diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index ce31c2efd..44f1ba5c2 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -8,9 +8,9 @@ from starlette.responses import RedirectResponse, FileResponse, JSONResponse, Re import schemas from chalicelib.core import scope -from chalicelib.core import errors, assist, signup, feature_flags +from chalicelib.core import assist, signup, feature_flags from chalicelib.core.metrics import heatmaps -from chalicelib.core.errors import errors_favorite, errors_viewed +from chalicelib.core.errors import errors_favorite, errors_viewed, errors, errors_details from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_viewed, \ sessions_assignments, unprocessed_sessions, sessions_search from chalicelib.core import tenants, users, projects, license @@ -331,8 +331,8 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str, @app.get('/{projectId}/errors/{errorId}', tags=['errors']) def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): - data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, - **{"density24": density24, "density30": density30}) + data = errors_details.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, + **{"density24": density24, "density30": density30}) if data.get("data") is not None: background_tasks.add_task(errors_viewed.viewed_error, project_id=projectId, user_id=context.user_id, error_id=errorId) diff --git a/ee/api/.gitignore b/ee/api/.gitignore index f79a41945..6cea5aefd 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -293,3 +293,4 @@ Pipfile.lock /chalicelib/core/errors/errors_ch.py /chalicelib/core/errors/errors_favorite.py /chalicelib/core/errors/errors_viewed.py +/chalicelib/core/errors/errors_details.py diff --git a/ee/api/Pipfile b/ee/api/Pipfile index a32b99a8e..9b162eb18 100644 --- a/ee/api/Pipfile +++ b/ee/api/Pipfile @@ -6,24 +6,24 @@ name = "pypi" [packages] urllib3 = "==2.2.3" requests = "==2.32.3" -boto3 = "==1.35.76" +boto3 = "==1.35.86" pyjwt = "==2.10.1" psycopg2-binary = "==2.9.10" -psycopg = {extras = ["pool", "binary"], version = "==3.2.3"} +psycopg = {extras = ["binary", "pool"], version = "==3.2.3"} clickhouse-driver = {extras = ["lz4"], version = "==0.2.9"} -clickhouse-connect = "==0.8.9" -elasticsearch = "==8.16.0" +clickhouse-connect = "==0.8.11" +elasticsearch = "==8.17.0" jira = "==3.8.0" cachetools = "==5.5.0" fastapi = "==0.115.6" -uvicorn = {extras = ["standard"], version = "==0.32.1"} +uvicorn = {extras = ["standard"], version = "==0.34.0"} gunicorn = "==23.0.0" python-decouple = "==3.8" -pydantic = {extras = ["email"], version = "==2.10.3"} +pydantic = {extras = ["email"], version = "==2.10.4"} apscheduler = "==3.11.0" redis = "==5.2.1" python3-saml = "==1.16.0" -python-multipart = "==0.0.17" +python-multipart = "==0.0.20" azure-storage-blob = "==12.24.0" [dev-packages] diff --git a/ee/api/chalicelib/core/errors/__init__.py b/ee/api/chalicelib/core/errors/__init__.py index 621bf311f..dfa66e2bd 100644 --- a/ee/api/chalicelib/core/errors/__init__.py +++ b/ee/api/chalicelib/core/errors/__init__.py @@ -8,6 +8,7 @@ if config("EXP_ERRORS_SEARCH", cast=bool, default=False): logger.info(">>> Using experimental error search") from . import errors as errors_legacy from . import errors_ch as errors + from . import errors_details_exp as errors_details else: from . import errors diff --git a/ee/api/chalicelib/core/errors/errors_details_exp.py b/ee/api/chalicelib/core/errors/errors_details_exp.py new file mode 100644 index 000000000..51d4c1e75 --- /dev/null +++ b/ee/api/chalicelib/core/errors/errors_details_exp.py @@ -0,0 +1,261 @@ +from decouple import config + +import schemas +from . import errors +from chalicelib.core import metrics, metadata +from chalicelib.core import sessions +from chalicelib.utils import ch_client, exp_ch_helper +from chalicelib.utils import pg_client, helper +from chalicelib.utils.TimeUTC import TimeUTC + + +def __flatten_sort_key_count_version(data, merge_nested=False): + if data is None: + return [] + return sorted( + [ + { + "name": f"{o[0][0][0]}@{v[0]}", + "count": v[1] + } for o in data for v in o[2] + ], + key=lambda o: o["count"], reverse=True) if merge_nested else \ + [ + { + "name": o[0][0][0], + "count": o[1][0][0], + } for o in data + ] + + +def __transform_map_to_tag(data, key1, key2, requested_key): + result = [] + for i in data: + if requested_key == 0 and i.get(key1) is None and i.get(key2) is None: + result.append({"name": "all", "count": int(i.get("count"))}) + elif requested_key == 1 and i.get(key1) is not None and i.get(key2) is None: + result.append({"name": i.get(key1), "count": int(i.get("count"))}) + elif requested_key == 2 and i.get(key1) is not None and i.get(key2) is not None: + result.append({"name": i.get(key2), "count": int(i.get("count"))}) + return result + + +def __process_tags_map(row): + browsers_partition = row.pop("browsers_partition") + os_partition = row.pop("os_partition") + device_partition = row.pop("device_partition") + country_partition = row.pop("country_partition") + return [ + {"name": "browser", + "partitions": __transform_map_to_tag(data=browsers_partition, + key1="browser", + key2="browser_version", + requested_key=1)}, + {"name": "browser.ver", + "partitions": __transform_map_to_tag(data=browsers_partition, + key1="browser", + key2="browser_version", + requested_key=2)}, + {"name": "OS", + "partitions": __transform_map_to_tag(data=os_partition, + key1="os", + key2="os_version", + requested_key=1) + }, + {"name": "OS.ver", + "partitions": __transform_map_to_tag(data=os_partition, + key1="os", + key2="os_version", + requested_key=2)}, + {"name": "device.family", + "partitions": __transform_map_to_tag(data=device_partition, + key1="device_type", + key2="device", + requested_key=1)}, + {"name": "device", + "partitions": __transform_map_to_tag(data=device_partition, + key1="device_type", + key2="device", + requested_key=2)}, + {"name": "country", "partitions": __transform_map_to_tag(data=country_partition, + key1="country", + key2="", + requested_key=1)} + ] + + +def get_details(project_id, error_id, user_id, **data): + MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(0) + MAIN_ERR_SESS_TABLE = exp_ch_helper.get_main_js_errors_sessions_table(0) + MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(0) + + ch_sub_query24 = errors.__get_basic_constraints(startTime_arg_name="startDate24", endTime_arg_name="endDate24") + ch_sub_query24.append("error_id = %(error_id)s") + + ch_sub_query30 = errors.__get_basic_constraints(startTime_arg_name="startDate30", endTime_arg_name="endDate30", + project_key="errors.project_id") + ch_sub_query30.append("error_id = %(error_id)s") + ch_basic_query = errors.__get_basic_constraints(time_constraint=False) + ch_basic_query.append("error_id = %(error_id)s") + + with ch_client.ClickHouseClient() as ch: + data["startDate24"] = TimeUTC.now(-1) + data["endDate24"] = TimeUTC.now() + data["startDate30"] = TimeUTC.now(-30) + data["endDate30"] = TimeUTC.now() + + density24 = int(data.get("density24", 24)) + step_size24 = errors.__get_step_size(data["startDate24"], data["endDate24"], density24) + density30 = int(data.get("density30", 30)) + step_size30 = errors.__get_step_size(data["startDate30"], data["endDate30"], density30) + params = { + "startDate24": data['startDate24'], + "endDate24": data['endDate24'], + "startDate30": data['startDate30'], + "endDate30": data['endDate30'], + "project_id": project_id, + "userId": user_id, + "step_size24": step_size24, + "step_size30": step_size30, + "error_id": error_id} + + main_ch_query = f"""\ + WITH pre_processed AS (SELECT error_id, + name, + message, + session_id, + datetime, + user_id, + user_browser, + user_browser_version, + user_os, + user_os_version, + user_device_type, + user_device, + user_country, + error_tags_keys, + error_tags_values + FROM {MAIN_ERR_SESS_TABLE} AS errors + WHERE {" AND ".join(ch_basic_query)} + ) + SELECT %(error_id)s AS error_id, name, message,users, + first_occurrence,last_occurrence,last_session_id, + sessions,browsers_partition,os_partition,device_partition, + country_partition,chart24,chart30,custom_tags + FROM (SELECT error_id, + name, + message + FROM pre_processed + LIMIT 1) AS details + INNER JOIN (SELECT COUNT(DISTINCT user_id) AS users, + COUNT(DISTINCT session_id) AS sessions + FROM pre_processed + WHERE datetime >= toDateTime(%(startDate30)s / 1000) + AND datetime <= toDateTime(%(endDate30)s / 1000) + ) AS last_month_stats ON TRUE + INNER JOIN (SELECT toUnixTimestamp(max(datetime)) * 1000 AS last_occurrence, + toUnixTimestamp(min(datetime)) * 1000 AS first_occurrence + FROM pre_processed) AS time_details ON TRUE + INNER JOIN (SELECT session_id AS last_session_id, + arrayMap((key, value)->(map(key, value)), error_tags_keys, error_tags_values) AS custom_tags + FROM pre_processed + ORDER BY datetime DESC + LIMIT 1) AS last_session_details ON TRUE + INNER JOIN (SELECT groupArray(details) AS browsers_partition + FROM (SELECT COUNT(1) AS count, + coalesce(nullIf(user_browser,''),toNullable('unknown')) AS browser, + coalesce(nullIf(user_browser_version,''),toNullable('unknown')) AS browser_version, + map('browser', browser, + 'browser_version', browser_version, + 'count', toString(count)) AS details + FROM pre_processed + GROUP BY ROLLUP(browser, browser_version) + ORDER BY browser nulls first, browser_version nulls first, count DESC) AS mapped_browser_details + ) AS browser_details ON TRUE + INNER JOIN (SELECT groupArray(details) AS os_partition + FROM (SELECT COUNT(1) AS count, + coalesce(nullIf(user_os,''),toNullable('unknown')) AS os, + coalesce(nullIf(user_os_version,''),toNullable('unknown')) AS os_version, + map('os', os, + 'os_version', os_version, + 'count', toString(count)) AS details + FROM pre_processed + GROUP BY ROLLUP(os, os_version) + ORDER BY os nulls first, os_version nulls first, count DESC) AS mapped_os_details + ) AS os_details ON TRUE + INNER JOIN (SELECT groupArray(details) AS device_partition + FROM (SELECT COUNT(1) AS count, + coalesce(nullIf(user_device,''),toNullable('unknown')) AS user_device, + map('device_type', toString(user_device_type), + 'device', user_device, + 'count', toString(count)) AS details + FROM pre_processed + GROUP BY ROLLUP(user_device_type, user_device) + ORDER BY user_device_type nulls first, user_device nulls first, count DESC + ) AS count_per_device_details + ) AS mapped_device_details ON TRUE + INNER JOIN (SELECT groupArray(details) AS country_partition + FROM (SELECT COUNT(1) AS count, + map('country', toString(user_country), + 'count', toString(count)) AS details + FROM pre_processed + GROUP BY user_country + ORDER BY count DESC) AS count_per_country_details + ) AS mapped_country_details ON TRUE + INNER JOIN (SELECT groupArray(map('timestamp', timestamp, 'count', count)) AS chart24 + FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL 3756 second)) * + 1000 AS timestamp, + COUNT(DISTINCT session_id) AS count + FROM {MAIN_EVENTS_TABLE} AS errors + WHERE {" AND ".join(ch_sub_query24)} + GROUP BY timestamp + ORDER BY timestamp) AS chart_details + ) AS chart_details24 ON TRUE + INNER JOIN (SELECT groupArray(map('timestamp', timestamp, 'count', count)) AS chart30 + FROM (SELECT toUnixTimestamp(toStartOfInterval(datetime, INTERVAL 3724 second)) * + 1000 AS timestamp, + COUNT(DISTINCT session_id) AS count + FROM {MAIN_EVENTS_TABLE} AS errors + WHERE {" AND ".join(ch_sub_query30)} + GROUP BY timestamp + ORDER BY timestamp) AS chart_details + ) AS chart_details30 ON TRUE;""" + + # print("--------------------") + # print(ch.format(main_ch_query, params)) + # print("--------------------") + row = ch.execute(query=main_ch_query, parameters=params) + if len(row) == 0: + return {"errors": ["error not found"]} + row = row[0] + + row["tags"] = __process_tags_map(row) + + query = f"""SELECT session_id, toUnixTimestamp(datetime) * 1000 AS start_ts, + user_anonymous_id,user_id, user_uuid, user_browser, user_browser_version, + user_os, user_os_version, user_device, FALSE AS favorite, True AS viewed + FROM {MAIN_SESSIONS_TABLE} AS sessions + WHERE project_id = toUInt16(%(project_id)s) + AND session_id = %(session_id)s + ORDER BY datetime DESC + LIMIT 1;""" + params = {"project_id": project_id, "session_id": row["last_session_id"], "userId": user_id} + # print("--------------------") + # print(ch.format(query, params)) + # print("--------------------") + status = ch.execute(query=query, parameters=params) + + if status is not None: + status = status[0] + row["favorite"] = status.pop("favorite") + row["viewed"] = status.pop("viewed") + row["last_hydrated_session"] = status + else: + row["last_hydrated_session"] = None + row["favorite"] = False + row["viewed"] = False + row["chart24"] = metrics.__complete_missing_steps(start_time=data["startDate24"], end_time=data["endDate24"], + density=density24, rows=row["chart24"], neutral={"count": 0}) + row["chart30"] = metrics.__complete_missing_steps(start_time=data["startDate30"], end_time=data["endDate30"], + density=density30, rows=row["chart30"], neutral={"count": 0}) + return {"data": helper.dict_to_camel_case(row)} diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index 5fb3701b2..b94270c2f 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -113,3 +113,4 @@ rm -rf /chalicelib/core/errors/errors.py rm -rf /chalicelib/core/errors/errors_ch.py rm -rf /chalicelib/core/errors/errors_favorite.py rm -rf /chalicelib/core/errors/errors_viewed.py +rm -rf /chalicelib/core/errors/errors_details.py diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 538364050..43b788f6a 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,31 +1,30 @@ urllib3==2.2.3 requests==2.32.3 -boto3==1.35.76 +boto3==1.35.86 pyjwt==2.10.1 psycopg2-binary==2.9.10 psycopg[pool,binary]==3.2.3 clickhouse-driver[lz4]==0.2.9 -clickhouse-connect==0.8.9 -elasticsearch==8.16.0 +clickhouse-connect==0.8.11 +elasticsearch==8.17.0 jira==3.8.0 cachetools==5.5.0 fastapi==0.115.6 -uvicorn[standard]==0.32.1 +uvicorn[standard]==0.34.0 gunicorn==23.0.0 python-decouple==3.8 -pydantic[email]==2.10.3 +pydantic[email]==2.10.4 apscheduler==3.11.0 redis==5.2.1 # TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252 #--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml -python3-saml==1.16.0 ---no-binary=lxml -python-multipart==0.0.18 +python3-saml==1.16.0 --no-binary=lxml +python-multipart==0.0.20 #confluent-kafka==2.1.0 azure-storage-blob==12.24.0