feat(backend/assets): service refactoring
This commit is contained in:
parent
356bf32bfc
commit
3449440de3
7 changed files with 40 additions and 18 deletions
|
|
@ -23,7 +23,7 @@ function build_service() {
|
|||
image="$1"
|
||||
echo "BUILDING $image"
|
||||
case "$image" in
|
||||
http | db | sink | ender | heuristics | storage)
|
||||
http | db | sink | ender | heuristics | storage | assets)
|
||||
echo build http
|
||||
docker build -t ${DOCKER_REPO:-'local'}/$image:${git_sha1} --platform linux/amd64 --build-arg SERVICE_NAME=$image -f ./cmd/Dockerfile .
|
||||
[[ $PUSH_IMAGE -eq 1 ]] && {
|
||||
|
|
|
|||
|
|
@ -2,35 +2,34 @@ package main
|
|||
|
||||
import (
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"openreplay/backend/pkg/env"
|
||||
"openreplay/backend/internal/assets"
|
||||
"openreplay/backend/internal/assets/cacher"
|
||||
config "openreplay/backend/internal/config/assets"
|
||||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/queue"
|
||||
"openreplay/backend/pkg/queue/types"
|
||||
"openreplay/backend/services/assets/cacher"
|
||||
)
|
||||
|
||||
func main() {
|
||||
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
|
||||
|
||||
GROUP_CACHE := env.String("GROUP_CACHE")
|
||||
TOPIC_CACHE := env.String("TOPIC_CACHE")
|
||||
cfg := config.New()
|
||||
|
||||
cacher := cacher.NewCacher(
|
||||
env.String("AWS_REGION"),
|
||||
env.String("S3_BUCKET_ASSETS"),
|
||||
env.String("ASSETS_ORIGIN"),
|
||||
env.Int("ASSETS_SIZE_LIMIT"),
|
||||
cfg.AWSRegion,
|
||||
cfg.S3BucketAssets,
|
||||
cfg.AssetsOrigin,
|
||||
cfg.AssetsSizeLimit,
|
||||
)
|
||||
|
||||
consumer := queue.NewMessageConsumer(
|
||||
GROUP_CACHE,
|
||||
[]string{TOPIC_CACHE},
|
||||
cfg.GroupCache,
|
||||
[]string{cfg.TopicCache},
|
||||
func(sessionID uint64, message messages.Message, e *types.Meta) {
|
||||
switch msg := message.(type) {
|
||||
case *messages.AssetCache:
|
||||
|
|
@ -39,7 +38,7 @@ func main() {
|
|||
if msg.Source != "js_exception" {
|
||||
return
|
||||
}
|
||||
sourceList, err := extractJSExceptionSources(&msg.Payload)
|
||||
sourceList, err := assets.ExtractJSExceptionSources(&msg.Payload)
|
||||
if err != nil {
|
||||
log.Printf("Error on source extraction: %v", err)
|
||||
return
|
||||
|
|
@ -52,12 +51,12 @@ func main() {
|
|||
true,
|
||||
)
|
||||
|
||||
tick := time.Tick(20 * time.Minute)
|
||||
log.Printf("Cacher service started\n")
|
||||
|
||||
sigchan := make(chan os.Signal, 1)
|
||||
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
log.Printf("Cacher service started\n")
|
||||
tick := time.Tick(20 * time.Minute)
|
||||
for {
|
||||
select {
|
||||
case sig := <-sigchan:
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package main
|
||||
package assets
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
|
@ -9,7 +9,7 @@ type frame struct {
|
|||
FileName string `json:"fileName"`
|
||||
}
|
||||
|
||||
func extractJSExceptionSources(payload *string) ([]string, error) {
|
||||
func ExtractJSExceptionSources(payload *string) ([]string, error) {
|
||||
var frameList []frame
|
||||
err := json.Unmarshal([]byte(*payload), &frameList)
|
||||
if err != nil {
|
||||
23
backend/internal/config/assets/config.go
Normal file
23
backend/internal/config/assets/config.go
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
package assets
|
||||
|
||||
import "openreplay/backend/pkg/env"
|
||||
|
||||
type Config struct {
|
||||
GroupCache string
|
||||
TopicCache string
|
||||
AWSRegion string
|
||||
S3BucketAssets string
|
||||
AssetsOrigin string
|
||||
AssetsSizeLimit int
|
||||
}
|
||||
|
||||
func New() *Config {
|
||||
return &Config{
|
||||
GroupCache: env.String("GROUP_CACHE"),
|
||||
TopicCache: env.String("TOPIC_CACHE"),
|
||||
AWSRegion: env.String("AWS_REGION"),
|
||||
S3BucketAssets: env.String("S3_BUCKET_ASSETS"),
|
||||
AssetsOrigin: env.String("ASSETS_ORIGIN"),
|
||||
AssetsSizeLimit: env.Int("ASSETS_SIZE_LIMIT"),
|
||||
}
|
||||
}
|
||||
0
backend/services/assets/build_hack
Normal file
0
backend/services/assets/build_hack
Normal file
Loading…
Add table
Reference in a new issue