Merge pull request #338 from openreplay/dev
v1.5.1: Added stats to db and ender
This commit is contained in:
commit
366ae808d4
6 changed files with 130 additions and 81 deletions
|
|
@ -43,7 +43,8 @@ ENV TZ=UTC \
|
|||
AWS_REGION_ASSETS=eu-central-1 \
|
||||
CACHE_ASSETS=true \
|
||||
ASSETS_SIZE_LIMIT=6291456 \
|
||||
FS_CLEAN_HRS=72
|
||||
FS_CLEAN_HRS=72 \
|
||||
LOG_QUEUE_STATS_INTERVAL_SEC=60
|
||||
|
||||
|
||||
ARG SERVICE_NAME
|
||||
|
|
|
|||
77
backend/pkg/log/queue.go
Normal file
77
backend/pkg/log/queue.go
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
package log
|
||||
|
||||
import (
|
||||
"time"
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/queue/types"
|
||||
//"openreplay/backend/pkg/env"
|
||||
)
|
||||
|
||||
|
||||
type partitionStats struct {
|
||||
maxts int64
|
||||
mints int64
|
||||
lastts int64
|
||||
lastID uint64
|
||||
count int
|
||||
}
|
||||
|
||||
type queueStats struct {
|
||||
prts map[int32]*partitionStats
|
||||
tick <-chan time.Time
|
||||
}
|
||||
|
||||
func NewQueueStats(sec int)*queueStats {
|
||||
return &queueStats{
|
||||
prts: make(map[int32]*partitionStats),
|
||||
tick: time.Tick(time.Duration(sec) * time.Second),
|
||||
}
|
||||
}
|
||||
|
||||
func (qs *queueStats) HandleAndLog(sessionID uint64, m *types.Meta) {
|
||||
prti := int32(sessionID % 16) // TODO use GetKeyPartition from kafka/key.go
|
||||
prt, ok := qs.prts[prti]
|
||||
if !ok {
|
||||
qs.prts[prti] = &partitionStats{}
|
||||
prt = qs.prts[prti]
|
||||
}
|
||||
|
||||
if prt.maxts < m.Timestamp {
|
||||
prt.maxts = m.Timestamp
|
||||
}
|
||||
if prt.mints > m.Timestamp || prt.mints == 0 {
|
||||
prt.mints = m.Timestamp
|
||||
}
|
||||
prt.lastts = m.Timestamp
|
||||
prt.lastID = m.ID
|
||||
prt.count += 1
|
||||
|
||||
|
||||
select {
|
||||
case <-qs.tick:
|
||||
qs.LogThenReset()
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func (qs *queueStats) LogThenReset() {
|
||||
s := "Queue Statistics: "
|
||||
for i, p := range qs.prts {
|
||||
s = fmt.Sprintf("%v | %v:: lastTS %v, lastID %v, count %v, maxTS %v, minTS %v",
|
||||
s, i, p.lastts, p.lastID, p.count, p.maxts, p.mints)
|
||||
}
|
||||
log.Println(s)
|
||||
// reset
|
||||
qs.prts = make(map[int32]*partitionStats)
|
||||
}
|
||||
|
||||
|
||||
// TODO: list of message id to log (mb filter function with callback in messages/utils.go or something)
|
||||
func LogMessage(s string, sessionID uint64, msg messages.Message, m *types.Meta) {
|
||||
log.Printf("%v | SessionID: %v, Queue info: %v, Message: %v", s, sessionID, m, msg)
|
||||
}
|
||||
|
||||
|
|
@ -15,6 +15,7 @@ import (
|
|||
"openreplay/backend/pkg/queue"
|
||||
"openreplay/backend/pkg/queue/types"
|
||||
"openreplay/backend/services/db/heuristics"
|
||||
logger "openreplay/backend/pkg/log"
|
||||
)
|
||||
|
||||
var pg *cache.PGCache
|
||||
|
|
@ -28,13 +29,18 @@ func main() {
|
|||
|
||||
heurFinder := heuristics.NewHandler()
|
||||
|
||||
|
||||
statsLogger := logger.NewQueueStats(env.Int("LOG_QUEUE_STATS_INTERVAL_SEC"))
|
||||
|
||||
consumer := queue.NewMessageConsumer(
|
||||
env.String("GROUP_DB"),
|
||||
[]string{
|
||||
env.String("TOPIC_RAW_IOS"),
|
||||
env.String("TOPIC_TRIGGER"),
|
||||
},
|
||||
func(sessionID uint64, msg messages.Message, _ *types.Meta) {
|
||||
func(sessionID uint64, msg messages.Message, meta *types.Meta) {
|
||||
statsLogger.HandleAndLog(sessionID, meta)
|
||||
|
||||
if err := insertMessage(sessionID, msg); err != nil {
|
||||
if !postgres.IsPkeyViolation(err) {
|
||||
log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, sessionID, msg)
|
||||
|
|
@ -64,8 +70,7 @@ func main() {
|
|||
return
|
||||
}
|
||||
|
||||
err = insertStats(session, msg)
|
||||
if err != nil {
|
||||
if err := insertStats(session, msg); err != nil {
|
||||
log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg)
|
||||
}
|
||||
})
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import (
|
|||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/queue"
|
||||
"openreplay/backend/pkg/queue/types"
|
||||
logger "openreplay/backend/pkg/log"
|
||||
"openreplay/backend/services/ender/builder"
|
||||
)
|
||||
|
||||
|
|
@ -23,7 +24,8 @@ func main() {
|
|||
TOPIC_TRIGGER := env.String("TOPIC_TRIGGER")
|
||||
|
||||
builderMap := builder.NewBuilderMap()
|
||||
var lastTs int64 = 0
|
||||
|
||||
statsLogger := logger.NewQueueStats(env.Int("LOG_QUEUE_STATS_INTERVAL_SEC"))
|
||||
|
||||
producer := queue.NewProducer()
|
||||
consumer := queue.NewMessageConsumer(
|
||||
|
|
@ -33,11 +35,8 @@ func main() {
|
|||
env.String("TOPIC_RAW_IOS"),
|
||||
},
|
||||
func(sessionID uint64, msg messages.Message, meta *types.Meta) {
|
||||
lastTs = meta.Timestamp
|
||||
statsLogger.HandleAndLog(sessionID, meta)
|
||||
builderMap.HandleMessage(sessionID, msg, msg.Meta().Index)
|
||||
// builderMap.IterateSessionReadyMessages(sessionID, lastTs, func(readyMsg messages.Message) {
|
||||
// producer.Produce(TOPIC_TRIGGER, sessionID, messages.Encode(readyMsg))
|
||||
// })
|
||||
},
|
||||
)
|
||||
consumer.DisableAutoCommit()
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ const HOST = '0.0.0.0';
|
|||
const PORT = 9000;
|
||||
|
||||
var app = express();
|
||||
var wsapp = express();
|
||||
let debug = process.env.debug === "1" || false;
|
||||
const request_logger = (identity) => {
|
||||
return (req, res, next) => {
|
||||
|
|
@ -22,15 +23,20 @@ const request_logger = (identity) => {
|
|||
}
|
||||
};
|
||||
app.use(request_logger("[app]"));
|
||||
wsapp.use(request_logger("[wsapp]"));
|
||||
|
||||
app.use('/sourcemaps', sourcemapsReaderServer);
|
||||
app.use('/assist', peerRouter);
|
||||
wsapp.use('/assist', socket.wsRouter);
|
||||
|
||||
const server = app.listen(PORT, HOST, () => {
|
||||
console.log(`App listening on http://${HOST}:${PORT}`);
|
||||
console.log('Press Ctrl+C to quit.');
|
||||
});
|
||||
|
||||
const wsserver = wsapp.listen(PORT + 1, HOST, () => {
|
||||
console.log(`WS App listening on http://${HOST}:${PORT + 1}`);
|
||||
console.log('Press Ctrl+C to quit.');
|
||||
});
|
||||
const peerServer = ExpressPeerServer(server, {
|
||||
debug: true,
|
||||
path: '/',
|
||||
|
|
@ -42,38 +48,6 @@ peerServer.on('disconnect', peerDisconnect);
|
|||
peerServer.on('error', peerError);
|
||||
app.use('/', peerServer);
|
||||
app.enable('trust proxy');
|
||||
|
||||
|
||||
const {App} = require("uWebSockets.js");
|
||||
const PREFIX = process.env.prefix || '/assist'
|
||||
|
||||
const uapp = new App();
|
||||
|
||||
const healthFn = (res, req) => {
|
||||
res.writeStatus('200 OK').end('ok!');
|
||||
}
|
||||
uapp.get(PREFIX, healthFn);
|
||||
uapp.get(`${PREFIX}/`, healthFn);
|
||||
|
||||
const uWrapper = function (fn) {
|
||||
return (res, req) => fn(req, res);
|
||||
}
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list`, uWrapper(socket.handlers.socketsList));
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-list/:projectKey`, uWrapper(socket.handlers.socketsListByProject));
|
||||
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live`, uWrapper(socket.handlers.socketsLive));
|
||||
uapp.get(`${PREFIX}/${process.env.S3_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject));
|
||||
|
||||
|
||||
socket.start(uapp);
|
||||
|
||||
uapp.listen(HOST, PORT + 1, (token) => {
|
||||
if (!token) {
|
||||
console.warn("port already in use");
|
||||
}
|
||||
console.log(`WS App listening on http://${HOST}:${PORT + 1}`);
|
||||
console.log('Press Ctrl+C to quit.');
|
||||
});
|
||||
|
||||
|
||||
module.exports = {uapp, server};
|
||||
wsapp.enable('trust proxy');
|
||||
socket.start(wsserver);
|
||||
module.exports = {wsserver, server};
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
const _io = require('socket.io');
|
||||
const express = require('express');
|
||||
const uaParser = require('ua-parser-js');
|
||||
const geoip2Reader = require('@maxmind/geoip2-node').Reader;
|
||||
var {extractPeerId} = require('./peerjs-server');
|
||||
var wsRouter = express.Router();
|
||||
const IDENTITIES = {agent: 'agent', session: 'session'};
|
||||
const NEW_AGENT = "NEW_AGENT";
|
||||
const NO_AGENTS = "NO_AGENT";
|
||||
|
|
@ -9,11 +11,11 @@ const AGENT_DISCONNECT = "AGENT_DISCONNECTED";
|
|||
const AGENTS_CONNECTED = "AGENTS_CONNECTED";
|
||||
const NO_SESSIONS = "SESSION_DISCONNECTED";
|
||||
const SESSION_ALREADY_CONNECTED = "SESSION_ALREADY_CONNECTED";
|
||||
// const wsReconnectionTimeout = process.env.wsReconnectionTimeout | 10 * 1000;
|
||||
|
||||
let io;
|
||||
|
||||
let debug = process.env.debug === "1" || false;
|
||||
const socketsList = function (req, res) {
|
||||
wsRouter.get(`/${process.env.S3_KEY}/sockets-list`, function (req, res) {
|
||||
debug && console.log("[WS]looking for all available sessions");
|
||||
let liveSessions = {};
|
||||
for (let peerId of io.sockets.adapter.rooms.keys()) {
|
||||
|
|
@ -23,10 +25,11 @@ const socketsList = function (req, res) {
|
|||
liveSessions[projectKey].push(sessionId);
|
||||
}
|
||||
}
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify({"data": liveSessions}));
|
||||
}
|
||||
const socketsListByProject = function (req, res) {
|
||||
req.params = {projectKey: req.getParameter(0)};
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify({"data": liveSessions}));
|
||||
});
|
||||
wsRouter.get(`/${process.env.S3_KEY}/sockets-list/:projectKey`, function (req, res) {
|
||||
debug && console.log(`[WS]looking for available sessions for ${req.params.projectKey}`);
|
||||
let liveSessions = {};
|
||||
for (let peerId of io.sockets.adapter.rooms.keys()) {
|
||||
|
|
@ -36,9 +39,12 @@ const socketsListByProject = function (req, res) {
|
|||
liveSessions[projectKey].push(sessionId);
|
||||
}
|
||||
}
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []}));
|
||||
}
|
||||
const socketsLive = async function (req, res) {
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []}));
|
||||
});
|
||||
|
||||
wsRouter.get(`/${process.env.S3_KEY}/sockets-live`, async function (req, res) {
|
||||
debug && console.log("[WS]looking for all available LIVE sessions");
|
||||
let liveSessions = {};
|
||||
for (let peerId of io.sockets.adapter.rooms.keys()) {
|
||||
|
|
@ -53,10 +59,12 @@ const socketsLive = async function (req, res) {
|
|||
}
|
||||
}
|
||||
}
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify({"data": liveSessions}));
|
||||
}
|
||||
const socketsLiveByProject = async function (req, res) {
|
||||
req.params = {projectKey: req.getParameter(0)};
|
||||
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify({"data": liveSessions}));
|
||||
});
|
||||
wsRouter.get(`/${process.env.S3_KEY}/sockets-live/:projectKey`, async function (req, res) {
|
||||
debug && console.log(`[WS]looking for available LIVE sessions for ${req.params.projectKey}`);
|
||||
let liveSessions = {};
|
||||
for (let peerId of io.sockets.adapter.rooms.keys()) {
|
||||
|
|
@ -71,8 +79,10 @@ const socketsLiveByProject = async function (req, res) {
|
|||
}
|
||||
}
|
||||
}
|
||||
res.writeStatus('200 OK').writeHeader('Content-Type', 'application/json').end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []}));
|
||||
}
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.end(JSON.stringify({"data": liveSessions[req.params.projectKey] || []}));
|
||||
});
|
||||
|
||||
const findSessionSocketId = async (io, peerId) => {
|
||||
const connected_sockets = await io.in(peerId).fetchSockets();
|
||||
|
|
@ -148,19 +158,16 @@ function extractSessionInfo(socket) {
|
|||
}
|
||||
|
||||
module.exports = {
|
||||
wsRouter,
|
||||
start: (server) => {
|
||||
io = new _io.Server({
|
||||
io = _io(server, {
|
||||
maxHttpBufferSize: 1e6,
|
||||
cors: {
|
||||
origin: "*",
|
||||
methods: ["GET", "POST", "PUT"]
|
||||
},
|
||||
path: '/ws-assist/socket',
|
||||
transports: ['websocket'],
|
||||
// upgrade: false
|
||||
path: '/socket'
|
||||
});
|
||||
io.attachApp(server);
|
||||
|
||||
|
||||
io.on('connection', async (socket) => {
|
||||
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
|
||||
|
|
@ -200,13 +207,10 @@ module.exports = {
|
|||
}
|
||||
|
||||
socket.on('disconnect', async () => {
|
||||
// console.log(`${socket.id} disconnected from ${socket.peerId}, waiting ${wsReconnectionTimeout / 1000}s before checking remaining`);
|
||||
debug && console.log(`${socket.id} disconnected from ${socket.peerId}`);
|
||||
if (socket.identity === IDENTITIES.agent) {
|
||||
socket.to(socket.peerId).emit(AGENT_DISCONNECT, socket.id);
|
||||
}
|
||||
// wait a little bit before notifying everyone
|
||||
// setTimeout(async () => {
|
||||
debug && console.log("checking for number of connected agents and sessions");
|
||||
let {c_sessions, c_agents} = await sessions_agents_count(io, socket);
|
||||
if (c_sessions === -1 && c_agents === -1) {
|
||||
|
|
@ -220,9 +224,6 @@ module.exports = {
|
|||
debug && console.log(`notifying everyone in ${socket.peerId} about no AGENTS`);
|
||||
socket.to(socket.peerId).emit(NO_AGENTS);
|
||||
}
|
||||
|
||||
|
||||
// }, wsReconnectionTimeout);
|
||||
});
|
||||
|
||||
socket.onAny(async (eventName, ...args) => {
|
||||
|
|
@ -244,9 +245,7 @@ module.exports = {
|
|||
});
|
||||
|
||||
});
|
||||
console.log("WS server started");
|
||||
debug ? console.log("Debugging enabled.") : console.log("Debugging disabled, set debug=\"1\" to enable debugging.");
|
||||
|
||||
console.log("WS server started")
|
||||
setInterval((io) => {
|
||||
try {
|
||||
let count = 0;
|
||||
|
|
@ -269,11 +268,5 @@ module.exports = {
|
|||
console.error(e);
|
||||
}
|
||||
}, 20000, io);
|
||||
},
|
||||
handlers: {
|
||||
socketsList,
|
||||
socketsListByProject,
|
||||
socketsLive,
|
||||
socketsLiveByProject
|
||||
}
|
||||
};
|
||||
Loading…
Add table
Reference in a new issue