feat(analytics): filters
This commit is contained in:
parent
4eae2ef439
commit
4709918254
5 changed files with 776 additions and 25 deletions
|
|
@ -6,6 +6,24 @@ import (
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type MetricType string
|
||||||
|
type MetricOfTimeseries string
|
||||||
|
type MetricOfTable string
|
||||||
|
|
||||||
|
const (
|
||||||
|
MetricTypeTimeseries MetricType = "TIMESERIES"
|
||||||
|
MetricTypeTable MetricType = "TABLE"
|
||||||
|
|
||||||
|
MetricOfTimeseriesSessionCount MetricOfTimeseries = "SESSION_COUNT"
|
||||||
|
MetricOfTimeseriesUserCount MetricOfTimeseries = "USER_COUNT"
|
||||||
|
|
||||||
|
MetricOfTableVisitedURL MetricOfTable = "VISITED_URL"
|
||||||
|
MetricOfTableIssues MetricOfTable = "ISSUES"
|
||||||
|
MetricOfTableUserCountry MetricOfTable = "USER_COUNTRY"
|
||||||
|
MetricOfTableUserDevice MetricOfTable = "USER_DEVICE"
|
||||||
|
MetricOfTableUserBrowser MetricOfTable = "USER_BROWSER"
|
||||||
|
)
|
||||||
|
|
||||||
// CardBase Common fields for the Card entity
|
// CardBase Common fields for the Card entity
|
||||||
type CardBase struct {
|
type CardBase struct {
|
||||||
Name string `json:"name" validate:"required"`
|
Name string `json:"name" validate:"required"`
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ func New(log logger.Logger, conn pool.Pool) (Charts, error) {
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// def get_chart()
|
// GetData def get_chart()
|
||||||
func (s *chartsImpl) GetData(projectId int, userID uint64, req *GetCardChartDataRequest) ([]DataPoint, error) {
|
func (s *chartsImpl) GetData(projectId int, userID uint64, req *GetCardChartDataRequest) ([]DataPoint, error) {
|
||||||
if req == nil {
|
if req == nil {
|
||||||
return nil, fmt.Errorf("request is empty")
|
return nil, fmt.Errorf("request is empty")
|
||||||
|
|
@ -109,14 +109,47 @@ func (s *chartsImpl) getMetric(projectID int, userID uint64, req *GetCardChartDa
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *chartsImpl) getTimeseriesCharts(projectID int, userID uint64, req *GetCardChartDataRequest) ([]DataPoint, error) {
|
func (s *chartsImpl) getTimeseriesCharts(projectID int, userID uint64, req *GetCardChartDataRequest) ([]DataPoint, error) {
|
||||||
charts := []interface{}{}
|
var dataPoints []DataPoint
|
||||||
|
var stepSize = getStepSize(req.StartTimestamp, req.EndTimestamp, req.Density, true, 1000)
|
||||||
|
var query string
|
||||||
|
|
||||||
|
switch req.MetricOf {
|
||||||
|
case "sessionCount":
|
||||||
|
query = fmt.Sprintf(`
|
||||||
|
SELECT
|
||||||
|
toUnixTimestamp(toStartOfInterval(processed_sessions.datetime, INTERVAL %d second)) * 1000 AS timestamp,
|
||||||
|
COUNT(processed_sessions.session_id) AS count
|
||||||
|
FROM (
|
||||||
|
SELECT
|
||||||
|
s.session_id AS session_id,
|
||||||
|
s.datetime AS datetime
|
||||||
|
%s
|
||||||
|
) AS processed_sessions
|
||||||
|
GROUP BY timestamp
|
||||||
|
ORDER BY timestamp;
|
||||||
|
`, stepSize, "query_part") // Replace "query_part" with the actual query part
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("unsupported metric: %s", req.MetricOf)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("stepSize: %v\n", stepSize)
|
||||||
|
|
||||||
for _, series := range req.Series {
|
for _, series := range req.Series {
|
||||||
res, err := s.searchSeries(projectID, series)
|
res, err := s.searchSeries(projectID, series)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("failed to search series: %w", err)
|
||||||
|
}
|
||||||
|
if seriesData, ok := res.([]DataPoint); ok {
|
||||||
|
dataPoints = append(dataPoints, seriesData...)
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("unexpected data format from searchSeries")
|
||||||
}
|
}
|
||||||
charts = append(charts, res)
|
|
||||||
}
|
}
|
||||||
results := []interface{}{}
|
return dataPoints, nil
|
||||||
return results, nil
|
}
|
||||||
|
|
||||||
|
func (s *chartsImpl) searchSeries(projectID int, series cards.CardSeries) (interface{}, error) {
|
||||||
|
|
||||||
|
// Placeholder implementation
|
||||||
|
return []DataPoint{}, nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -38,14 +38,29 @@ func getMetadataFields() Fields {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getStepSize(startTimestamp, endTimestamp, density uint64, decimal bool, factor uint64) float64 {
|
func getStepSize(startTimestamp, endTimestamp int64, density int, decimal bool, factor int) float64 {
|
||||||
stepSize := (endTimestamp / factor) - (startTimestamp / factor) // TODO: should I use float64 here?
|
factorInt64 := int64(factor)
|
||||||
if !decimal {
|
stepSize := (endTimestamp / factorInt64) - (startTimestamp / factorInt64)
|
||||||
density--
|
|
||||||
|
if density <= 1 {
|
||||||
|
return float64(stepSize)
|
||||||
}
|
}
|
||||||
return float64(stepSize) / float64(density)
|
|
||||||
|
if decimal {
|
||||||
|
return float64(stepSize) / float64(density)
|
||||||
|
}
|
||||||
|
|
||||||
|
return float64(stepSize / int64(density-1))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//func getStepSize(startTimestamp, endTimestamp, density uint64, decimal bool, factor uint64) float64 {
|
||||||
|
// stepSize := (endTimestamp / factor) - (startTimestamp / factor) // TODO: should I use float64 here?
|
||||||
|
// if !decimal {
|
||||||
|
// density--
|
||||||
|
// }
|
||||||
|
// return float64(stepSize) / float64(density)
|
||||||
|
//}
|
||||||
|
|
||||||
func getBasicConstraints(tableName string, timeConstraint, roundStart bool, data map[string]interface{}, identifier string) []string { // Если tableName не пустая, добавляем точку
|
func getBasicConstraints(tableName string, timeConstraint, roundStart bool, data map[string]interface{}, identifier string) []string { // Если tableName не пустая, добавляем точку
|
||||||
if tableName != "" {
|
if tableName != "" {
|
||||||
tableName += "."
|
tableName += "."
|
||||||
|
|
@ -146,8 +161,8 @@ def get_main_sessions_table(timestamp=0):
|
||||||
if config("EXP_7D_MV", cast=bool, default=True) \
|
if config("EXP_7D_MV", cast=bool, default=True) \
|
||||||
and timestamp and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.sessions"
|
and timestamp and timestamp >= TimeUTC.now(delta_days=-7) else "experimental.sessions"
|
||||||
*/
|
*/
|
||||||
func getMainSessionsTable(timestamp uint64) string {
|
func getMainSessionsTable(timestamp int64) string {
|
||||||
return "experimental.sessions"
|
return "product_analytics.sessions"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Function to convert named parameters to positional parameters
|
// Function to convert named parameters to positional parameters
|
||||||
|
|
@ -186,7 +201,7 @@ func addMissingKeys(original, complete map[string]interface{}) map[string]interf
|
||||||
|
|
||||||
// CompleteMissingSteps fills in missing steps in the data
|
// CompleteMissingSteps fills in missing steps in the data
|
||||||
func CompleteMissingSteps(
|
func CompleteMissingSteps(
|
||||||
startTime, endTime uint64,
|
startTime, endTime int64,
|
||||||
density int,
|
density int,
|
||||||
neutral map[string]interface{},
|
neutral map[string]interface{},
|
||||||
rows []map[string]interface{},
|
rows []map[string]interface{},
|
||||||
|
|
@ -198,7 +213,7 @@ func CompleteMissingSteps(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate the step size
|
// Calculate the step size
|
||||||
step := getStepSize(startTime, endTime, uint64(density), true, 1000)
|
step := getStepSize(startTime, endTime, density, true, 1000)
|
||||||
optimal := make([][2]uint64, 0)
|
optimal := make([][2]uint64, 0)
|
||||||
for _, i := range frange(float64(startTime)/float64(timeCoefficient), float64(endTime)/float64(timeCoefficient), step) {
|
for _, i := range frange(float64(startTime)/float64(timeCoefficient), float64(endTime)/float64(timeCoefficient), step) {
|
||||||
startInterval := uint64(i * float64(timeCoefficient))
|
startInterval := uint64(i * float64(timeCoefficient))
|
||||||
|
|
@ -257,7 +272,7 @@ func progress(oldVal, newVal uint64) float64 {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Trying to find a common part
|
// Trying to find a common part
|
||||||
func parse(projectID uint64, startTs, endTs uint64, density uint64, args map[string]interface{}) ([]string, []string, map[string]interface{}) {
|
func parse(projectID uint64, startTs, endTs int64, density int, args map[string]interface{}) ([]string, []string, map[string]interface{}) {
|
||||||
stepSize := getStepSize(startTs, endTs, density, false, 1000)
|
stepSize := getStepSize(startTs, endTs, density, false, 1000)
|
||||||
chSubQuery := getBasicConstraints("sessions", true, false, args, "project_id")
|
chSubQuery := getBasicConstraints("sessions", true, false, args, "project_id")
|
||||||
chSubQueryChart := getBasicConstraints("sessions", true, true, args, "project_id")
|
chSubQueryChart := getBasicConstraints("sessions", true, true, args, "project_id")
|
||||||
|
|
@ -278,7 +293,7 @@ func parse(projectID uint64, startTs, endTs uint64, density uint64, args map[str
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sessions trend
|
// Sessions trend
|
||||||
func (c *chartsImpl) getProcessedSessions(projectID uint64, startTs, endTs uint64, density uint64, args map[string]interface{}) {
|
func (s *chartsImpl) getProcessedSessions(projectID uint64, startTs, endTs int64, density int, args map[string]interface{}) {
|
||||||
chQuery := `
|
chQuery := `
|
||||||
SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL :step_size second)) * 1000 AS timestamp,
|
SELECT toUnixTimestamp(toStartOfInterval(sessions.datetime, INTERVAL :step_size second)) * 1000 AS timestamp,
|
||||||
COUNT(DISTINCT sessions.session_id) AS value
|
COUNT(DISTINCT sessions.session_id) AS value
|
||||||
|
|
@ -293,7 +308,7 @@ func (c *chartsImpl) getProcessedSessions(projectID uint64, startTs, endTs uint6
|
||||||
chQuery = strings.Replace(chQuery, ":sub_query_chart", strings.Join(chSubQueryChart, " AND "), -1)
|
chQuery = strings.Replace(chQuery, ":sub_query_chart", strings.Join(chSubQueryChart, " AND "), -1)
|
||||||
|
|
||||||
preparedQuery, preparedArgs := replaceNamedParams(chQuery, params)
|
preparedQuery, preparedArgs := replaceNamedParams(chQuery, params)
|
||||||
rows, err := c.chConn.Query(context.Background(), preparedQuery, preparedArgs)
|
rows, err := s.chConn.Query(context.Background(), preparedQuery, preparedArgs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("Error executing query: %v", err)
|
log.Fatalf("Error executing query: %v", err)
|
||||||
}
|
}
|
||||||
|
|
@ -331,7 +346,7 @@ func (c *chartsImpl) getProcessedSessions(projectID uint64, startTs, endTs uint6
|
||||||
var count uint64
|
var count uint64
|
||||||
|
|
||||||
preparedQuery, preparedArgs = replaceNamedParams(chQuery, params)
|
preparedQuery, preparedArgs = replaceNamedParams(chQuery, params)
|
||||||
if err := c.chConn.QueryRow(context.Background(), preparedQuery, preparedArgs).Scan(&count); err != nil {
|
if err := s.chConn.QueryRow(context.Background(), preparedQuery, preparedArgs).Scan(&count); err != nil {
|
||||||
log.Fatalf("Error executing query: %v", err)
|
log.Fatalf("Error executing query: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,14 +8,50 @@ type DataPoint struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetCardChartDataRequest struct {
|
type GetCardChartDataRequest struct {
|
||||||
MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel errors performance resources webVitals pathAnalysis retention stickiness heatMap"`
|
StartTimestamp int64 `json:"startTimestamp" validate:"required"`
|
||||||
MetricOf string `json:"metricOf" validate:"required,oneof=sessionCount userCount"`
|
EndTimestamp int64 `json:"endTimestamp" validate:"required"`
|
||||||
ViewType string `json:"viewType" validate:"required,oneof=lineChart areaChart barChart pieChart progressChart table metric"`
|
Density int `json:"density" validate:"required"`
|
||||||
MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"`
|
MetricType string `json:"metricType" validate:"required,oneof=timeseries table funnel errors performance resources webVitals pathAnalysis retention stickiness heatMap"`
|
||||||
SessionID int64 `json:"sessionId"`
|
MetricOf string `json:"metricOf" validate:"required,oneof=sessionCount userCount"`
|
||||||
Series []cards.CardSeries `json:"series" validate:"required,dive"`
|
ViewType string `json:"viewType" validate:"required,oneof=lineChart areaChart barChart pieChart progressChart table metric"`
|
||||||
|
MetricFormat string `json:"metricFormat" validate:"required,oneof=default percentage"`
|
||||||
|
SessionID int64 `json:"sessionId"`
|
||||||
|
Series []cards.CardSeries `json:"series" validate:"required,dive"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetCardChartDataResponse struct {
|
type GetCardChartDataResponse struct {
|
||||||
Data []DataPoint `json:"data"`
|
Data []DataPoint `json:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type MetricType string
|
||||||
|
type MetricOfTimeseries string
|
||||||
|
type MetricOfTable string
|
||||||
|
|
||||||
|
const (
|
||||||
|
MetricTypeTimeseries MetricType = "TIMESERIES"
|
||||||
|
MetricTypeTable MetricType = "TABLE"
|
||||||
|
|
||||||
|
MetricOfTimeseriesSessionCount MetricOfTimeseries = "SESSION_COUNT"
|
||||||
|
MetricOfTimeseriesUserCount MetricOfTimeseries = "USER_COUNT"
|
||||||
|
|
||||||
|
MetricOfTableVisitedURL MetricOfTable = "VISITED_URL"
|
||||||
|
MetricOfTableIssues MetricOfTable = "ISSUES"
|
||||||
|
MetricOfTableUserCountry MetricOfTable = "USER_COUNTRY"
|
||||||
|
MetricOfTableUserDevice MetricOfTable = "USER_DEVICE"
|
||||||
|
MetricOfTableUserBrowser MetricOfTable = "USER_BROWSER"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SessionsSearchPayload struct {
|
||||||
|
StartTimestamp int64
|
||||||
|
EndTimestamp int64
|
||||||
|
Filters []SessionSearchFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
type SessionSearchFilter struct {
|
||||||
|
Type FilterType
|
||||||
|
Value interface{}
|
||||||
|
Operator SearchEventOperator
|
||||||
|
}
|
||||||
|
|
||||||
|
type SearchEventOperator string // Define constants as needed
|
||||||
|
type FilterType string // Define constants as needed
|
||||||
|
|
|
||||||
649
backend/pkg/analytics/query/chartQuery.go
Normal file
649
backend/pkg/analytics/query/chartQuery.go
Normal file
|
|
@ -0,0 +1,649 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
var r Root
|
||||||
|
err := json.Unmarshal([]byte(jsonInput), &r)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
//fmt.Println("ARGS:", r)
|
||||||
|
fmt.Println(buildQuery(r))
|
||||||
|
//fmt.Println("QUERY PART:", qp)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Table string
|
||||||
|
type Column string
|
||||||
|
type FilterType string
|
||||||
|
type EventOrder string
|
||||||
|
type FetchFilterType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
UserOs FilterType = "userOs"
|
||||||
|
UserBrowser FilterType = "userBrowser"
|
||||||
|
UserDevice FilterType = "userDevice"
|
||||||
|
UserCountry FilterType = "userCountry"
|
||||||
|
UserCity FilterType = "userCity"
|
||||||
|
UserState FilterType = "userState"
|
||||||
|
UserId FilterType = "userId"
|
||||||
|
UserAnonymousId FilterType = "userAnonymousId"
|
||||||
|
Referrer FilterType = "referrer"
|
||||||
|
RevId FilterType = "revId"
|
||||||
|
UserOsIos FilterType = "userOsIos"
|
||||||
|
UserDeviceIos FilterType = "userDeviceIos"
|
||||||
|
UserCountryIos FilterType = "userCountryIos"
|
||||||
|
UserIdIos FilterType = "userIdIos"
|
||||||
|
UserAnonymousIdIos FilterType = "userAnonymousIdIos"
|
||||||
|
RevIdIos FilterType = "revIdIos"
|
||||||
|
Duration FilterType = "duration"
|
||||||
|
Platform FilterType = "platform"
|
||||||
|
Metadata FilterType = "metadata"
|
||||||
|
Issue FilterType = "issue"
|
||||||
|
EventsCount FilterType = "eventsCount"
|
||||||
|
UtmSource FilterType = "utmSource"
|
||||||
|
UtmMedium FilterType = "utmMedium"
|
||||||
|
UtmCampaign FilterType = "utmCampaign"
|
||||||
|
ThermalState FilterType = "thermalState"
|
||||||
|
MainThreadCPU FilterType = "mainThreadCPU"
|
||||||
|
ViewComponent FilterType = "viewComponent"
|
||||||
|
LogEvent FilterType = "logEvent"
|
||||||
|
ClickEvent FilterType = "clickEvent"
|
||||||
|
MemoryUsage FilterType = "memoryUsage"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
Click FilterType = "click"
|
||||||
|
Input FilterType = "input"
|
||||||
|
Location FilterType = "location"
|
||||||
|
Custom FilterType = "custom"
|
||||||
|
Request FilterType = "request"
|
||||||
|
Fetch FilterType = "fetch"
|
||||||
|
GraphQL FilterType = "graphql"
|
||||||
|
StateAction FilterType = "stateAction"
|
||||||
|
Error FilterType = "error"
|
||||||
|
Tag FilterType = "tag"
|
||||||
|
ClickMobile FilterType = "clickMobile"
|
||||||
|
InputMobile FilterType = "inputMobile"
|
||||||
|
ViewMobile FilterType = "viewMobile"
|
||||||
|
CustomMobile FilterType = "customMobile"
|
||||||
|
RequestMobile FilterType = "requestMobile"
|
||||||
|
ErrorMobile FilterType = "errorMobile"
|
||||||
|
SwipeMobile FilterType = "swipeMobile"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
EventOrderThen EventOrder = "then"
|
||||||
|
EventOrderOr EventOrder = "or"
|
||||||
|
EventOrderAnd EventOrder = "and"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
FetchFilterTypeFetchUrl FilterType = "fetchUrl"
|
||||||
|
FetchFilterTypeFetchStatusCode FilterType = "fetchStatusCode"
|
||||||
|
FetchFilterTypeFetchMethod FilterType = "fetchMethod"
|
||||||
|
FetchFilterTypeFetchDuration FilterType = "fetchDuration"
|
||||||
|
FetchFilterTypeFetchRequestBody FilterType = "fetchRequestBody"
|
||||||
|
FetchFilterTypeFetchResponseBody FilterType = "fetchResponseBody"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
OperatorStringIs = "is"
|
||||||
|
OperatorStringIsAny = "isAny"
|
||||||
|
OperatorStringOn = "on"
|
||||||
|
OperatorStringOnAny = "onAny"
|
||||||
|
OperatorStringIsNot = "isNot"
|
||||||
|
OperatorStringIsUndefined = "isUndefined"
|
||||||
|
OperatorStringNotOn = "notOn"
|
||||||
|
OperatorStringContains = "contains"
|
||||||
|
OperatorStringNotContains = "notContains"
|
||||||
|
OperatorStringStartsWith = "startsWith"
|
||||||
|
OperatorStringEndsWith = "endsWith"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
OperatorMathEq = "="
|
||||||
|
OperatorMathLt = "<"
|
||||||
|
OperatorMathGt = ">"
|
||||||
|
OperatorMathLe = "<="
|
||||||
|
OperatorMathGe = ">="
|
||||||
|
)
|
||||||
|
|
||||||
|
//--------------------------------------------------
|
||||||
|
// Constants for columns, tables, etc.
|
||||||
|
//--------------------------------------------------
|
||||||
|
|
||||||
|
const (
|
||||||
|
TableEvents Table = "product_analytics.events"
|
||||||
|
TableSessions Table = "experimental.sessions"
|
||||||
|
|
||||||
|
ColEventTime Column = "main.created_at"
|
||||||
|
ColEventName Column = "main.`$event_name`"
|
||||||
|
ColEventProjectID Column = "main.project_id"
|
||||||
|
ColEventProperties Column = "main.`$properties`"
|
||||||
|
ColEventSessionID Column = "main.session_id"
|
||||||
|
ColEventURLPath Column = "main.url_path"
|
||||||
|
ColEventStatus Column = "main.status"
|
||||||
|
|
||||||
|
ColSessionID Column = "s.session_id"
|
||||||
|
ColDuration Column = "s.duration"
|
||||||
|
ColUserCountry Column = "s.user_country"
|
||||||
|
ColUserCity Column = "s.user_city"
|
||||||
|
ColUserState Column = "s.user_state"
|
||||||
|
ColUserID Column = "s.user_id"
|
||||||
|
ColUserAnonymousID Column = "s.user_anonymous_id"
|
||||||
|
ColUserOS Column = "s.user_os"
|
||||||
|
ColUserBrowser Column = "s.user_browser"
|
||||||
|
ColUserDevice Column = "s.user_device"
|
||||||
|
ColUserDeviceType Column = "s.user_device_type"
|
||||||
|
ColRevID Column = "s.rev_id"
|
||||||
|
ColBaseReferrer Column = "s.base_referrer"
|
||||||
|
ColUtmSource Column = "s.utm_source"
|
||||||
|
ColUtmMedium Column = "s.utm_medium"
|
||||||
|
ColUtmCampaign Column = "s.utm_campaign"
|
||||||
|
ColMetadata1 Column = "s.metadata_1"
|
||||||
|
ColSessionProjectID Column = "s.project_id"
|
||||||
|
ColSessionIsNotNull Column = "isNotNull(s.duration)"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Root struct {
|
||||||
|
StartTimestamp int64 `json:"startTimestamp"`
|
||||||
|
EndTimestamp int64 `json:"endTimestamp"`
|
||||||
|
Series []Series `json:"series"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Series struct {
|
||||||
|
SeriesID int64 `json:"seriesId"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Filter SeriesFilter `json:"filter"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SeriesFilter struct {
|
||||||
|
Filters []FilterObj `json:"filters"`
|
||||||
|
EventsOrder EventOrder `json:"eventsOrder"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FilterObj struct {
|
||||||
|
Type FilterType `json:"type"`
|
||||||
|
IsEvent bool `json:"isEvent"`
|
||||||
|
Value []string `json:"value"`
|
||||||
|
Operator string `json:"operator"`
|
||||||
|
Source string `json:"source"`
|
||||||
|
Filters []FilterObj `json:"filters"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// --------------------------------------------------
|
||||||
|
func buildQuery(r Root) string {
|
||||||
|
s := r.Series[0]
|
||||||
|
|
||||||
|
// iterate over series and partition filters
|
||||||
|
//for _, s := range r.Series {
|
||||||
|
// sessionFilters, eventFilters := partitionFilters(s.Filter.Filters)
|
||||||
|
// sessionWhere := buildSessionWhere(sessionFilters)
|
||||||
|
// eventWhere, seqHaving := buildEventsWhere(eventFilters, s.Filter.EventsOrder)
|
||||||
|
// fmt.Println("SESSION FILTERS:", sessionFilters)
|
||||||
|
// fmt.Println("EVENT FILTERS:", eventFilters)
|
||||||
|
// fmt.Println("SESSION WHERE:", sessionWhere)
|
||||||
|
// fmt.Println("EVENT WHERE:", eventWhere)
|
||||||
|
// fmt.Println("SEQ HAVING:", seqHaving)
|
||||||
|
//}
|
||||||
|
|
||||||
|
sessionFilters, eventFilters := partitionFilters(s.Filter.Filters)
|
||||||
|
sessionWhere := buildSessionWhere(sessionFilters)
|
||||||
|
eventWhere, seqHaving := buildEventsWhere(eventFilters, s.Filter.EventsOrder)
|
||||||
|
|
||||||
|
subQuery := fmt.Sprintf(
|
||||||
|
"SELECT %s,\n"+
|
||||||
|
" MIN(%s) AS first_event_ts,\n"+
|
||||||
|
" MAX(%s) AS last_event_ts\n"+
|
||||||
|
"FROM %s AS main\n"+
|
||||||
|
"WHERE main.project_id = %%(project_id)s\n"+
|
||||||
|
" AND %s >= toDateTime(%%(start_time)s/1000)\n"+
|
||||||
|
" AND %s <= toDateTime(%%(end_time)s/1000)\n"+
|
||||||
|
" AND (%s)\n"+
|
||||||
|
"GROUP BY %s\n"+
|
||||||
|
"HAVING %s",
|
||||||
|
ColEventSessionID,
|
||||||
|
ColEventTime,
|
||||||
|
ColEventTime,
|
||||||
|
TableEvents,
|
||||||
|
ColEventTime,
|
||||||
|
ColEventTime,
|
||||||
|
strings.Join(eventWhere, " OR "),
|
||||||
|
ColEventSessionID,
|
||||||
|
seqHaving,
|
||||||
|
)
|
||||||
|
|
||||||
|
joinQuery := fmt.Sprintf(
|
||||||
|
"SELECT *\n"+
|
||||||
|
"FROM %s AS s\n"+
|
||||||
|
"INNER JOIN (\n"+
|
||||||
|
" SELECT DISTINCT ev.session_id, ev.`$current_url` AS url_path\n"+
|
||||||
|
" FROM %s AS ev\n"+
|
||||||
|
" WHERE ev.created_at >= toDateTime(%%(start_time)s/1000)\n"+
|
||||||
|
" AND ev.created_at <= toDateTime(%%(end_time)s/1000)\n"+
|
||||||
|
" AND ev.project_id = %%(project_id)s\n"+
|
||||||
|
" AND ev.`$event_name` = 'LOCATION'\n"+
|
||||||
|
") AS extra_event USING (session_id)\n"+
|
||||||
|
"WHERE s.project_id = %%(project_id)s\n"+
|
||||||
|
" AND %s\n"+
|
||||||
|
" AND s.datetime >= toDateTime(%%(start_time)s/1000)\n"+
|
||||||
|
" AND s.datetime <= toDateTime(%%(end_time)s/1000)\n",
|
||||||
|
TableSessions,
|
||||||
|
TableEvents,
|
||||||
|
ColSessionIsNotNull,
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(sessionWhere) > 0 {
|
||||||
|
joinQuery += " AND " + strings.Join(sessionWhere, " AND ") + "\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
main := fmt.Sprintf(
|
||||||
|
"SELECT s.session_id AS session_id, s.url_path\n"+
|
||||||
|
"FROM (\n%s\n) AS f\n"+
|
||||||
|
"INNER JOIN (\n%s) AS s\n"+
|
||||||
|
" ON (s.session_id = f.session_id)\n",
|
||||||
|
subQuery,
|
||||||
|
joinQuery,
|
||||||
|
)
|
||||||
|
|
||||||
|
final := fmt.Sprintf(
|
||||||
|
"SELECT COUNT(DISTINCT url_path) OVER () AS main_count,\n"+
|
||||||
|
" url_path AS name,\n"+
|
||||||
|
" COUNT(DISTINCT session_id) AS total,\n"+
|
||||||
|
" COALESCE(SUM(COUNT(DISTINCT session_id)) OVER (), 0) AS total_count\n"+
|
||||||
|
"FROM (\n%s) AS filtered_sessions\n"+
|
||||||
|
"GROUP BY url_path\n"+
|
||||||
|
"ORDER BY total DESC\n"+
|
||||||
|
"LIMIT 200 OFFSET 0;",
|
||||||
|
main,
|
||||||
|
)
|
||||||
|
|
||||||
|
return final
|
||||||
|
}
|
||||||
|
|
||||||
|
func partitionFilters(filters []FilterObj) (sessionFilters, eventFilters []FilterObj) {
|
||||||
|
for _, f := range filters {
|
||||||
|
if f.IsEvent {
|
||||||
|
eventFilters = append(eventFilters, f)
|
||||||
|
} else {
|
||||||
|
sessionFilters = append(sessionFilters, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildSessionWhere(filters []FilterObj) []string {
|
||||||
|
var conds []string
|
||||||
|
for _, f := range filters {
|
||||||
|
switch f.Type {
|
||||||
|
case UserCountry:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserCountry, concatValues(f.Value)))
|
||||||
|
case UserCity:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserCity, concatValues(f.Value)))
|
||||||
|
case UserState:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserState, concatValues(f.Value)))
|
||||||
|
case UserId:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserID, concatValues(f.Value)))
|
||||||
|
case UserAnonymousId:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserAnonymousID, concatValues(f.Value)))
|
||||||
|
case UserOs:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserOS, concatValues(f.Value)))
|
||||||
|
case UserBrowser:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserBrowser, concatValues(f.Value)))
|
||||||
|
case UserDevice:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserDevice, concatValues(f.Value)))
|
||||||
|
case Platform:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUserDeviceType, concatValues(f.Value)))
|
||||||
|
case RevId:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColRevID, concatValues(f.Value)))
|
||||||
|
case Referrer:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColBaseReferrer, concatValues(f.Value)))
|
||||||
|
case Duration:
|
||||||
|
if len(f.Value) == 2 {
|
||||||
|
conds = append(conds, fmt.Sprintf("%s >= '%s'", ColDuration, f.Value[0]))
|
||||||
|
conds = append(conds, fmt.Sprintf("%s <= '%s'", ColDuration, f.Value[1]))
|
||||||
|
}
|
||||||
|
case UtmSource:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUtmSource, concatValues(f.Value)))
|
||||||
|
case UtmMedium:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUtmMedium, concatValues(f.Value)))
|
||||||
|
case UtmCampaign:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColUtmCampaign, concatValues(f.Value)))
|
||||||
|
case Metadata:
|
||||||
|
conds = append(conds, fmt.Sprintf("%s = toString('%s')", ColMetadata1, concatValues(f.Value)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// add /n to each condition
|
||||||
|
for i := range conds {
|
||||||
|
conds[i] += "\n"
|
||||||
|
}
|
||||||
|
return conds
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseOperator(op string) string {
|
||||||
|
switch strings.ToLower(op) {
|
||||||
|
case OperatorStringContains:
|
||||||
|
return OperatorMathEq // interpret as "LIKE" if needed
|
||||||
|
case OperatorStringIs, OperatorStringOn, "=", OperatorStringOnAny:
|
||||||
|
return OperatorMathEq
|
||||||
|
case OperatorStringStartsWith:
|
||||||
|
// might interpret differently in real impl
|
||||||
|
return OperatorMathEq
|
||||||
|
case OperatorStringEndsWith:
|
||||||
|
// might interpret differently in real impl
|
||||||
|
return OperatorMathEq
|
||||||
|
default:
|
||||||
|
return OperatorMathEq
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildEventsWhere(filters []FilterObj, order EventOrder) (eventConditions []string, having string) {
|
||||||
|
basicEventTypes := "(" +
|
||||||
|
strings.Join([]string{
|
||||||
|
fmt.Sprintf("%s = 'CLICK'", ColEventName),
|
||||||
|
fmt.Sprintf("%s = 'INPUT'", ColEventName),
|
||||||
|
fmt.Sprintf("%s = 'LOCATION'", ColEventName),
|
||||||
|
fmt.Sprintf("%s = 'CUSTOM'", ColEventName),
|
||||||
|
fmt.Sprintf("%s = 'REQUEST'", ColEventName),
|
||||||
|
}, " OR ") + ")"
|
||||||
|
|
||||||
|
var seq []string
|
||||||
|
for _, f := range filters {
|
||||||
|
switch f.Type {
|
||||||
|
case Click:
|
||||||
|
seq = append(seq, seqCond("CLICK", "selector", f))
|
||||||
|
case Input:
|
||||||
|
seq = append(seq, seqCond("INPUT", "label", f))
|
||||||
|
case Location:
|
||||||
|
seq = append(seq, seqCond("LOCATION", "url_path", f))
|
||||||
|
case Custom:
|
||||||
|
seq = append(seq, seqCond("CUSTOM", "name", f))
|
||||||
|
case Fetch:
|
||||||
|
seq = append(seq, seqFetchCond("REQUEST", f))
|
||||||
|
case FetchFilterTypeFetchStatusCode:
|
||||||
|
seq = append(seq, seqCond("REQUEST", "status", f))
|
||||||
|
default:
|
||||||
|
seq = append(seq, fmt.Sprintf("(%s = '%s')", ColEventName, strings.ToUpper(string(f.Type))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
eventConditions = []string{basicEventTypes}
|
||||||
|
|
||||||
|
// then => sequenceMatch
|
||||||
|
// or => OR
|
||||||
|
// and => AND
|
||||||
|
switch order {
|
||||||
|
case EventOrderThen:
|
||||||
|
var pattern []string
|
||||||
|
for i := range seq {
|
||||||
|
pattern = append(pattern, fmt.Sprintf("(?%d)", i+1))
|
||||||
|
}
|
||||||
|
having = fmt.Sprintf("sequenceMatch('%s')(\n%s,\n%s)",
|
||||||
|
strings.Join(pattern, ""), fmt.Sprintf("toUnixTimestamp(%s)", ColEventTime), strings.Join(seq, ",\n"))
|
||||||
|
case EventOrderAnd:
|
||||||
|
// build AND
|
||||||
|
having = strings.Join(seq, " AND ")
|
||||||
|
default:
|
||||||
|
// default => OR
|
||||||
|
var orParts []string
|
||||||
|
for _, p := range seq {
|
||||||
|
orParts = append(orParts, "("+p+")")
|
||||||
|
}
|
||||||
|
having = strings.Join(orParts, " OR ")
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func seqCond(eventName, key string, f FilterObj) string {
|
||||||
|
op := parseOperator(f.Operator)
|
||||||
|
return fmt.Sprintf("(%s = '%s' AND JSONExtractString(toString(%s), '%s') %s '%s')",
|
||||||
|
ColEventName, strings.ToUpper(eventName), ColEventProperties, key, op, concatValues(f.Value))
|
||||||
|
}
|
||||||
|
|
||||||
|
func seqFetchCond(eventName string, f FilterObj) string {
|
||||||
|
w := []string{fmt.Sprintf("(%s = '%s')", ColEventName, strings.ToUpper(eventName))}
|
||||||
|
var extras []string
|
||||||
|
for _, c := range f.Filters {
|
||||||
|
switch c.Type {
|
||||||
|
case Fetch:
|
||||||
|
if len(c.Value) > 0 {
|
||||||
|
extras = append(extras, fmt.Sprintf("(%s = '%s')", ColEventURLPath, concatValues(c.Value)))
|
||||||
|
}
|
||||||
|
case FetchFilterTypeFetchStatusCode:
|
||||||
|
if len(c.Value) > 0 {
|
||||||
|
extras = append(extras, fmt.Sprintf("(%s = '%s')", ColEventStatus, concatValues(c.Value)))
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
// placeholder if needed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(extras) > 0 {
|
||||||
|
w = append(w, strings.Join(extras, " AND "))
|
||||||
|
}
|
||||||
|
return "(" + strings.Join(w, " AND ") + ")"
|
||||||
|
}
|
||||||
|
|
||||||
|
func concatValues(v []string) string {
|
||||||
|
return strings.Join(v, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
const jsonInput = `
|
||||||
|
{
|
||||||
|
"startTimestamp": 1737043724664,
|
||||||
|
"endTimestamp": 1737130124664,
|
||||||
|
"series": [
|
||||||
|
{
|
||||||
|
"seriesId": 610,
|
||||||
|
"name": "Series 1",
|
||||||
|
"filter": {
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"type": "click",
|
||||||
|
"isEvent": true,
|
||||||
|
"value": ["DEPLOYMENT"],
|
||||||
|
"operator": "on",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "input",
|
||||||
|
"isEvent": true,
|
||||||
|
"value": ["a"],
|
||||||
|
"operator": "contains",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "location",
|
||||||
|
"isEvent": true,
|
||||||
|
"value": ["/en/using-or/"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "userCountry",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["AD"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "userCity",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["Mumbai"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "userState",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["Maharashtra"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "userId",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["test@test.com"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "userAnonymousId",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["asd"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "userOs",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["Mac OS X"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "userBrowser",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["Chrome"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "userDevice",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["iPhone"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "platform",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["desktop"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "revId",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["v1"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "referrer",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["https://www.google.com/"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "duration",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["60000", "6000000"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "tag",
|
||||||
|
"isEvent": true,
|
||||||
|
"value": ["8"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "utmSource",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["aaa"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "utmMedium",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["aa"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "utmCampaign",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["aaa"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "metadata",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["bbbb"],
|
||||||
|
"operator": "is",
|
||||||
|
"source": "userId",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "custom",
|
||||||
|
"isEvent": true,
|
||||||
|
"value": ["test"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "fetch",
|
||||||
|
"isEvent": true,
|
||||||
|
"value": [],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": [
|
||||||
|
{
|
||||||
|
"type": "fetchUrl",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["/ai/docs/chat"],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "fetchStatusCode",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": ["400"],
|
||||||
|
"operator": "=",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "fetchMethod",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": [],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "fetchDuration",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": [],
|
||||||
|
"operator": "=",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "fetchRequestBody",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": [],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "fetchResponseBody",
|
||||||
|
"isEvent": false,
|
||||||
|
"value": [],
|
||||||
|
"operator": "is",
|
||||||
|
"filters": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"eventsOrder": "then"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
`
|
||||||
Loading…
Add table
Reference in a new issue