Compare commits
31 commits
main
...
dev-pr-v1.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f41ef37f4e | ||
|
|
ecb4f42461 | ||
|
|
d2a6115845 | ||
|
|
4784c1548d | ||
|
|
33312250cd | ||
|
|
584a08161e | ||
|
|
a39fbc7afc | ||
|
|
2fd3103882 | ||
|
|
31e97bae0e | ||
|
|
e8d3ca63dc | ||
|
|
bb73f5e577 | ||
|
|
fdeda67577 | ||
|
|
d9106b3d7a | ||
|
|
37f83cfd12 | ||
|
|
bfe6b5b480 | ||
|
|
b4e614d867 | ||
|
|
a712ab45e4 | ||
|
|
53781fe655 | ||
|
|
26f465ec69 | ||
|
|
3bbff218c4 | ||
|
|
112ebd3105 | ||
|
|
2a49c930d0 | ||
|
|
810ec92dbe | ||
|
|
cd915a535c | ||
|
|
d0e74076e0 | ||
|
|
234ddb2d0f | ||
|
|
4bb08d2a5e | ||
|
|
f6123c1c08 | ||
|
|
19935dc105 | ||
|
|
bdf5dbba0a | ||
|
|
e08408ecfa |
73 changed files with 1234 additions and 891 deletions
|
|
@ -47,8 +47,9 @@ OpenReplay is a session replay suite you can host yourself, that lets you see wh
|
|||
- **Session replay:** Lets you relive your users' experience, see where they struggle and how it affects their behavior. Each session replay is automatically analyzed based on heuristics, for easy triage.
|
||||
- **DevTools:** It's like debugging in your own browser. OpenReplay provides you with the full context (network activity, JS errors, store actions/state and 40+ metrics) so you can instantly reproduce bugs and understand performance issues.
|
||||
- **Assist:** Helps you support your users by seeing their live screen and instantly hopping on call (WebRTC) with them without requiring any 3rd-party screen sharing software.
|
||||
- **Feature:** flags: Enable or disable a feature, make gradual releases and A/B test all without redeploying your app.
|
||||
- **Omni-search:** Search and filter by almost any user action/criteria, session attribute or technical event, so you can answer any question. No instrumentation required.
|
||||
- **Funnels:** For surfacing the most impactful issues causing conversion and revenue loss.
|
||||
- **Analytics:** For surfacing the most impactful issues causing conversion and revenue loss.
|
||||
- **Fine-grained privacy controls:** Choose what to capture, what to obscure or what to ignore so user data doesn't even reach your servers.
|
||||
- **Plugins oriented:** Get to the root cause even faster by tracking application state (Redux, VueX, MobX, NgRx, Pinia and Zustand) and logging GraphQL queries (Apollo, Relay) and Fetch/Axios requests.
|
||||
- **Integrations:** Sync your backend logs with your session replays and see what happened front-to-back. OpenReplay supports Sentry, Datadog, CloudWatch, Stackdriver, Elastic and more.
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class JWTAuth(HTTPBearer):
|
|||
if credentials:
|
||||
if not credentials.scheme == "Bearer":
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.")
|
||||
jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials)
|
||||
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
|
||||
auth_exists = jwt_payload is not None \
|
||||
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
tenant_id=jwt_payload.get("tenantId", -1),
|
||||
|
|
@ -27,18 +27,13 @@ class JWTAuth(HTTPBearer):
|
|||
if jwt_payload is None \
|
||||
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
|
||||
or not auth_exists:
|
||||
print("JWTAuth: Token issue")
|
||||
if jwt_payload is not None:
|
||||
print(jwt_payload)
|
||||
print(f"JWTAuth: user_id={jwt_payload.get('userId')} tenant_id={jwt_payload.get('tenantId')}")
|
||||
if jwt_payload is None:
|
||||
print("JWTAuth: jwt_payload is None")
|
||||
print(credentials.scheme + " " + credentials.credentials)
|
||||
if jwt_payload is not None and jwt_payload.get("iat") is None:
|
||||
print("JWTAuth: iat is None")
|
||||
if jwt_payload is not None and jwt_payload.get("aud") is None:
|
||||
print("JWTAuth: aud is None")
|
||||
if jwt_payload is not None and not auth_exists:
|
||||
if jwt_payload.get("iat") is None:
|
||||
print("JWTAuth: iat is None")
|
||||
if jwt_payload.get("aud") is None:
|
||||
print("JWTAuth: aud is None")
|
||||
if not auth_exists:
|
||||
print("JWTAuth: not users.auth_exists")
|
||||
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
|
||||
|
|
@ -47,7 +42,6 @@ class JWTAuth(HTTPBearer):
|
|||
print("JWTAuth: User not found.")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
|
||||
jwt_payload["authorizer_identity"] = "jwt"
|
||||
print(jwt_payload)
|
||||
request.state.authorizer_identity = "jwt"
|
||||
request.state.currentContext = schemas.CurrentContext(tenant_id=jwt_payload.get("tenantId", -1),
|
||||
user_id=jwt_payload.get("userId", -1),
|
||||
|
|
|
|||
|
|
@ -6,13 +6,12 @@ from chalicelib.core import tenants
|
|||
from chalicelib.core import users
|
||||
|
||||
|
||||
def jwt_authorizer(token):
|
||||
token = token.split(" ")
|
||||
if len(token) != 2 or token[0].lower() != "bearer":
|
||||
def jwt_authorizer(scheme: str, token: str):
|
||||
if scheme.lower() != "bearer":
|
||||
return None
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token[1],
|
||||
token,
|
||||
config("jwt_secret"),
|
||||
algorithms=config("jwt_algorithm"),
|
||||
audience=[f"front:{helper.get_stage_name()}"]
|
||||
|
|
@ -22,6 +21,7 @@ def jwt_authorizer(token):
|
|||
return None
|
||||
except BaseException as e:
|
||||
print("! JWT Base Exception")
|
||||
print(e)
|
||||
return None
|
||||
return payload
|
||||
|
||||
|
|
|
|||
|
|
@ -548,16 +548,12 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
|
|||
WHERE user_id = %(userId)s
|
||||
AND deleted_at IS NULL
|
||||
LIMIT 1;""",
|
||||
{"userId": user_id})
|
||||
{"userId": user_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return r is not None \
|
||||
and r.get("jwt_iat") is not None \
|
||||
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
|
||||
or (jwt_aud.startswith("plugin") \
|
||||
and (r["changed_at"] is None \
|
||||
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
|
||||
)
|
||||
and abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1
|
||||
|
||||
|
||||
def change_jwt_iat(user_id):
|
||||
|
|
@ -566,7 +562,7 @@ def change_jwt_iat(user_id):
|
|||
SET jwt_iat = timezone('utc'::text, now())
|
||||
WHERE user_id = %(user_id)s
|
||||
RETURNING jwt_iat;""",
|
||||
{"user_id": user_id})
|
||||
{"user_id": user_id})
|
||||
cur.execute(query)
|
||||
return cur.fetchone().get("jwt_iat")
|
||||
|
||||
|
|
|
|||
|
|
@ -18,20 +18,6 @@ from routers.base import get_routers
|
|||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
||||
@app.post('/{projectId}/sessions/search', tags=["sessions"])
|
||||
def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"])
|
||||
def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
@app.get('/{projectId}/events/search', tags=["events"])
|
||||
def events_search(projectId: int, q: str,
|
||||
type: Union[schemas.FilterType, schemas.EventType,
|
||||
|
|
|
|||
|
|
@ -207,6 +207,20 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
|
|||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/sessions/search', tags=["sessions"])
|
||||
def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"])
|
||||
def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"])
|
||||
def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ func main() {
|
|||
messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction,
|
||||
messages.MsgSetInputTarget, messages.MsgSetInputValue, messages.MsgCreateDocument, messages.MsgMouseClick,
|
||||
messages.MsgSetPageLocation, messages.MsgPageLoadTiming, messages.MsgPageRenderTiming,
|
||||
messages.MsgInputEvent, messages.MsgPageEvent, messages.MsgMouseThrashing, messages.MsgInputChange,
|
||||
messages.MsgPageEvent, messages.MsgMouseThrashing, messages.MsgInputChange,
|
||||
messages.MsgUnbindNodes}
|
||||
|
||||
// Init consumer
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"openreplay/backend/pkg/db/redis"
|
||||
"openreplay/backend/pkg/memory"
|
||||
"openreplay/backend/pkg/projects"
|
||||
"openreplay/backend/pkg/queue/types"
|
||||
"openreplay/backend/pkg/sessions"
|
||||
"os"
|
||||
"os/signal"
|
||||
|
|
@ -94,28 +95,69 @@ func main() {
|
|||
case <-tick:
|
||||
failedSessionEnds := make(map[uint64]uint64)
|
||||
duplicatedSessionEnds := make(map[uint64]uint64)
|
||||
negativeDuration := make(map[uint64]uint64)
|
||||
shorterDuration := make(map[uint64]int64)
|
||||
diffDuration := make(map[uint64]int64)
|
||||
noSessionInDB := make(map[uint64]uint64)
|
||||
updatedDurations := 0
|
||||
newSessionEnds := 0
|
||||
|
||||
type SessionEndType int
|
||||
const (
|
||||
FailedSessionEnd SessionEndType = iota + 1
|
||||
DuplicatedSessionEnd
|
||||
NegativeDuration
|
||||
ShorterDuration
|
||||
DiffDuration
|
||||
NewSessionEnd
|
||||
NoSessionInDB
|
||||
)
|
||||
|
||||
// Find ended sessions and send notification to other services
|
||||
sessionEndGenerator.HandleEndedSessions(func(sessionID uint64, timestamp uint64) bool {
|
||||
sessionEndGenerator.HandleEndedSessions(func(sessionID uint64, timestamp uint64) (bool, int) {
|
||||
msg := &messages.SessionEnd{Timestamp: timestamp}
|
||||
currDuration, err := sessManager.GetDuration(sessionID)
|
||||
if err != nil {
|
||||
log.Printf("getSessionDuration failed, sessID: %d, err: %s", sessionID, err)
|
||||
}
|
||||
sess, err := sessManager.Get(sessionID)
|
||||
if err != nil {
|
||||
log.Printf("can't get session from database to compare durations, sessID: %d, err: %s", sessionID, err)
|
||||
} else {
|
||||
newDur := timestamp - sess.Timestamp
|
||||
// Skip if session was ended before with same duration
|
||||
if currDuration == newDur {
|
||||
duplicatedSessionEnds[sessionID] = currDuration
|
||||
return true, int(DuplicatedSessionEnd)
|
||||
}
|
||||
// Skip if session was ended before with longer duration
|
||||
if currDuration > newDur {
|
||||
shorterDuration[sessionID] = int64(currDuration) - int64(newDur)
|
||||
return true, int(ShorterDuration)
|
||||
}
|
||||
}
|
||||
newDuration, err := sessManager.UpdateDuration(sessionID, msg.Timestamp)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "integer out of range") {
|
||||
// Skip session with broken duration
|
||||
failedSessionEnds[sessionID] = timestamp
|
||||
return true
|
||||
return true, int(FailedSessionEnd)
|
||||
}
|
||||
if strings.Contains(err.Error(), "is less than zero for uint64") {
|
||||
negativeDuration[sessionID] = timestamp
|
||||
return true, int(NegativeDuration)
|
||||
}
|
||||
if strings.Contains(err.Error(), "no rows in result set") {
|
||||
noSessionInDB[sessionID] = timestamp
|
||||
return true, int(NoSessionInDB)
|
||||
}
|
||||
log.Printf("can't save sessionEnd to database, sessID: %d, err: %s", sessionID, err)
|
||||
return false
|
||||
return false, 0
|
||||
}
|
||||
// Check one more time just in case
|
||||
if currDuration == newDuration {
|
||||
// Skip session end duplicate
|
||||
duplicatedSessionEnds[sessionID] = currDuration
|
||||
return true
|
||||
return true, int(DuplicatedSessionEnd)
|
||||
}
|
||||
if cfg.UseEncryption {
|
||||
if key := storage.GenerateEncryptionKey(); key != nil {
|
||||
|
|
@ -128,22 +170,40 @@ func main() {
|
|||
}
|
||||
if err := producer.Produce(cfg.TopicRawWeb, sessionID, msg.Encode()); err != nil {
|
||||
log.Printf("can't send sessionEnd to topic: %s; sessID: %d", err, sessionID)
|
||||
return false
|
||||
return false, 0
|
||||
}
|
||||
return true
|
||||
if currDuration != 0 {
|
||||
diffDuration[sessionID] = int64(newDuration) - int64(currDuration)
|
||||
updatedDurations++
|
||||
} else {
|
||||
newSessionEnds++
|
||||
}
|
||||
return true, int(NewSessionEnd)
|
||||
})
|
||||
if len(failedSessionEnds) > 0 {
|
||||
log.Println("sessions with wrong duration:", failedSessionEnds)
|
||||
if n := len(failedSessionEnds); n > 0 {
|
||||
log.Println("sessions with wrong duration:", n, failedSessionEnds)
|
||||
}
|
||||
if len(duplicatedSessionEnds) > 0 {
|
||||
log.Println("session end duplicates:", duplicatedSessionEnds)
|
||||
if n := len(negativeDuration); n > 0 {
|
||||
log.Println("sessions with negative duration:", n, negativeDuration)
|
||||
}
|
||||
if n := len(noSessionInDB); n > 0 {
|
||||
log.Printf("sessions without info in DB: %d, %v", n, noSessionInDB)
|
||||
}
|
||||
log.Printf("[INFO] failed: %d, negative: %d, shorter: %d, same: %d, updated: %d, new: %d, not found: %d",
|
||||
len(failedSessionEnds), len(negativeDuration), len(shorterDuration), len(duplicatedSessionEnds),
|
||||
updatedDurations, newSessionEnds, len(noSessionInDB))
|
||||
producer.Flush(cfg.ProducerTimeout)
|
||||
if err := consumer.CommitBack(intervals.EVENTS_BACK_COMMIT_GAP); err != nil {
|
||||
log.Printf("can't commit messages with offset: %s", err)
|
||||
}
|
||||
case msg := <-consumer.Rebalanced():
|
||||
log.Println(msg)
|
||||
log.Printf("Rebalanced event, type: %s, partitions: %+v", msg.Type, msg.Partitions)
|
||||
if msg.Type == types.RebalanceTypeRevoke {
|
||||
sessionEndGenerator.Disable()
|
||||
} else {
|
||||
sessionEndGenerator.ActivePartitions(msg.Partitions)
|
||||
sessionEndGenerator.Enable()
|
||||
}
|
||||
default:
|
||||
if !memoryManager.HasFreeMemory() {
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ func main() {
|
|||
// HandlersFabric returns the list of message handlers we want to be applied to each incoming message.
|
||||
handlersFabric := func() []handlers.MessageProcessor {
|
||||
return []handlers.MessageProcessor{
|
||||
custom.NewInputEventBuilder(),
|
||||
custom.NewPageEventBuilder(),
|
||||
web.NewDeadClickDetector(),
|
||||
&web.ClickRageDetector{},
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ type Config struct {
|
|||
MaxFileSize int64 `env:"MAX_FILE_SIZE,default=524288000"`
|
||||
UseSort bool `env:"USE_SESSION_SORT,default=true"`
|
||||
UseProfiler bool `env:"PROFILER_ENABLED,default=false"`
|
||||
UseBrotli bool `env:"USE_BROTLI,default=false"`
|
||||
}
|
||||
|
||||
func New() *Config {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
package datasaver
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"log"
|
||||
|
||||
"openreplay/backend/internal/config/db"
|
||||
|
|
@ -102,14 +101,6 @@ func (s *saverImpl) handleMessage(msg Message) error {
|
|||
return err
|
||||
}
|
||||
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
|
||||
case *InputEvent:
|
||||
if err = s.pg.InsertWebInputEvent(session, m); err != nil {
|
||||
if errors.Is(err, postgres.EmptyLabel) {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateEventsStats(session.SessionID, 1, 0)
|
||||
case *PageEvent:
|
||||
if err = s.pg.InsertWebPageEvent(session, m); err != nil {
|
||||
return err
|
||||
|
|
@ -123,7 +114,7 @@ func (s *saverImpl) handleMessage(msg Message) error {
|
|||
if err = s.pg.InsertWebErrorEvent(session, types.WrapJSException(m)); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.sessions.UpdateIssuesStats(session.SessionID, 0, 1000)
|
||||
return s.sessions.UpdateIssuesStats(session.SessionID, 1, 1000)
|
||||
case *IntegrationEvent:
|
||||
return s.pg.InsertWebErrorEvent(session, types.WrapIntegrationEvent(m))
|
||||
case *InputChange:
|
||||
|
|
|
|||
|
|
@ -9,12 +9,12 @@ import (
|
|||
)
|
||||
|
||||
// EndedSessionHandler handler for ended sessions
|
||||
type EndedSessionHandler func(sessionID uint64, timestamp uint64) bool
|
||||
type EndedSessionHandler func(sessionID uint64, timestamp uint64) (bool, int)
|
||||
|
||||
// session holds information about user's session live status
|
||||
type session struct {
|
||||
lastTimestamp int64
|
||||
lastUpdate int64
|
||||
lastTimestamp int64 // timestamp from message broker
|
||||
lastUpdate int64 // local timestamp
|
||||
lastUserTime uint64
|
||||
isEnded bool
|
||||
}
|
||||
|
|
@ -24,6 +24,8 @@ type SessionEnder struct {
|
|||
timeout int64
|
||||
sessions map[uint64]*session // map[sessionID]session
|
||||
timeCtrl *timeController
|
||||
parts uint64
|
||||
enabled bool
|
||||
}
|
||||
|
||||
func New(timeout int64, parts int) (*SessionEnder, error) {
|
||||
|
|
@ -31,9 +33,38 @@ func New(timeout int64, parts int) (*SessionEnder, error) {
|
|||
timeout: timeout,
|
||||
sessions: make(map[uint64]*session),
|
||||
timeCtrl: NewTimeController(parts),
|
||||
parts: uint64(parts), // ender uses all partitions by default
|
||||
enabled: true,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (se *SessionEnder) Enable() {
|
||||
se.enabled = true
|
||||
}
|
||||
|
||||
func (se *SessionEnder) Disable() {
|
||||
se.enabled = false
|
||||
}
|
||||
|
||||
func (se *SessionEnder) ActivePartitions(parts []uint64) {
|
||||
activeParts := make(map[uint64]bool, 0)
|
||||
for _, p := range parts {
|
||||
activeParts[p] = true
|
||||
}
|
||||
removedSessions := 0
|
||||
activeSessions := 0
|
||||
for sessID, _ := range se.sessions {
|
||||
if !activeParts[sessID%se.parts] {
|
||||
delete(se.sessions, sessID)
|
||||
removedSessions++
|
||||
} else {
|
||||
activeSessions++
|
||||
}
|
||||
}
|
||||
log.Printf("SessionEnder: %d sessions left in active partitions: %+v, removed %d sessions",
|
||||
activeSessions, parts, removedSessions)
|
||||
}
|
||||
|
||||
// UpdateSession save timestamp for new sessions and update for existing sessions
|
||||
func (se *SessionEnder) UpdateSession(msg messages.Message) {
|
||||
var (
|
||||
|
|
@ -46,14 +77,14 @@ func (se *SessionEnder) UpdateSession(msg messages.Message) {
|
|||
log.Printf("got empty timestamp for sessionID: %d", sessionID)
|
||||
return
|
||||
}
|
||||
se.timeCtrl.UpdateTime(sessionID, batchTimestamp)
|
||||
se.timeCtrl.UpdateTime(sessionID, batchTimestamp, localTimestamp)
|
||||
sess, ok := se.sessions[sessionID]
|
||||
if !ok {
|
||||
// Register new session
|
||||
se.sessions[sessionID] = &session{
|
||||
lastTimestamp: batchTimestamp, // timestamp from message broker
|
||||
lastUpdate: localTimestamp, // local timestamp
|
||||
lastUserTime: msgTimestamp, // last timestamp from user's machine
|
||||
lastTimestamp: batchTimestamp,
|
||||
lastUpdate: localTimestamp,
|
||||
lastUserTime: msgTimestamp, // last timestamp from user's machine
|
||||
isEnded: false,
|
||||
}
|
||||
ender.IncreaseActiveSessions()
|
||||
|
|
@ -74,21 +105,53 @@ func (se *SessionEnder) UpdateSession(msg messages.Message) {
|
|||
|
||||
// HandleEndedSessions runs handler for each ended session and delete information about session in successful case
|
||||
func (se *SessionEnder) HandleEndedSessions(handler EndedSessionHandler) {
|
||||
if !se.enabled {
|
||||
log.Printf("SessionEnder is disabled")
|
||||
return
|
||||
}
|
||||
currTime := time.Now().UnixMilli()
|
||||
allSessions, removedSessions := len(se.sessions), 0
|
||||
brokerTime := make(map[int]int, 0)
|
||||
serverTime := make(map[int]int, 0)
|
||||
|
||||
isSessionEnded := func(sessID uint64, sess *session) (bool, int) {
|
||||
// Has been finished already
|
||||
if sess.isEnded {
|
||||
return true, 1
|
||||
}
|
||||
batchTimeDiff := se.timeCtrl.LastBatchTimestamp(sessID) - sess.lastTimestamp
|
||||
|
||||
// Has been finished according to batch timestamp and hasn't been updated for a long time
|
||||
if (batchTimeDiff >= se.timeout) && (currTime-sess.lastUpdate >= se.timeout) {
|
||||
return true, 2
|
||||
}
|
||||
|
||||
// Hasn't been finished according to batch timestamp but hasn't been read from partition for a long time
|
||||
if (batchTimeDiff < se.timeout) && (currTime-se.timeCtrl.LastUpdateTimestamp(sessID) >= se.timeout) {
|
||||
return true, 3
|
||||
}
|
||||
return false, 0
|
||||
}
|
||||
|
||||
for sessID, sess := range se.sessions {
|
||||
if sess.isEnded || (se.timeCtrl.LastTimestamp(sessID)-sess.lastTimestamp > se.timeout) ||
|
||||
(currTime-sess.lastUpdate > se.timeout) {
|
||||
if ended, endCase := isSessionEnded(sessID, sess); ended {
|
||||
sess.isEnded = true
|
||||
if handler(sessID, sess.lastUserTime) {
|
||||
if res, _ := handler(sessID, sess.lastUserTime); res {
|
||||
delete(se.sessions, sessID)
|
||||
ender.DecreaseActiveSessions()
|
||||
ender.IncreaseClosedSessions()
|
||||
removedSessions++
|
||||
if endCase == 2 {
|
||||
brokerTime[1]++
|
||||
}
|
||||
if endCase == 3 {
|
||||
serverTime[1]++
|
||||
}
|
||||
} else {
|
||||
log.Printf("sessID: %d, userTime: %d", sessID, sess.lastUserTime)
|
||||
}
|
||||
}
|
||||
}
|
||||
log.Printf("Removed %d of %d sessions", removedSessions, allSessions)
|
||||
log.Printf("Removed %d of %d sessions; brokerTime: %d, serverTime: %d",
|
||||
removedSessions, allSessions, brokerTime, serverTime)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,21 +1,28 @@
|
|||
package sessionender
|
||||
|
||||
type timeController struct {
|
||||
parts uint64
|
||||
lastTimestamp map[uint64]int64 // map[partition]consumerTimeOfLastMessage
|
||||
parts uint64
|
||||
lastBatchTimestamp map[uint64]int64 // map[partition]consumerTimeOfLastMessage
|
||||
lastUpdateTimestamp map[uint64]int64 // map[partition]systemTimeOfLastMessage
|
||||
}
|
||||
|
||||
func NewTimeController(parts int) *timeController {
|
||||
return &timeController{
|
||||
parts: uint64(parts),
|
||||
lastTimestamp: make(map[uint64]int64),
|
||||
parts: uint64(parts),
|
||||
lastBatchTimestamp: make(map[uint64]int64),
|
||||
lastUpdateTimestamp: make(map[uint64]int64),
|
||||
}
|
||||
}
|
||||
|
||||
func (tc *timeController) UpdateTime(sessionID uint64, timestamp int64) {
|
||||
tc.lastTimestamp[sessionID%tc.parts] = timestamp
|
||||
func (tc *timeController) UpdateTime(sessionID uint64, batchTimestamp, updateTimestamp int64) {
|
||||
tc.lastBatchTimestamp[sessionID%tc.parts] = batchTimestamp
|
||||
tc.lastUpdateTimestamp[sessionID%tc.parts] = updateTimestamp
|
||||
}
|
||||
|
||||
func (tc *timeController) LastTimestamp(sessionID uint64) int64 {
|
||||
return tc.lastTimestamp[sessionID%tc.parts]
|
||||
func (tc *timeController) LastBatchTimestamp(sessionID uint64) int64 {
|
||||
return tc.lastBatchTimestamp[sessionID%tc.parts]
|
||||
}
|
||||
|
||||
func (tc *timeController) LastUpdateTimestamp(sessionID uint64) int64 {
|
||||
return tc.lastUpdateTimestamp[sessionID%tc.parts]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -45,6 +45,7 @@ type Task struct {
|
|||
dome *bytes.Buffer
|
||||
dev *bytes.Buffer
|
||||
isBreakTask bool
|
||||
compression objectstorage.CompressionType
|
||||
}
|
||||
|
||||
func (t *Task) SetMob(mob []byte, tp FileType) {
|
||||
|
|
@ -111,8 +112,9 @@ func (s *Storage) Process(msg *messages.SessionEnd) (err error) {
|
|||
|
||||
// Prepare sessions
|
||||
newTask := &Task{
|
||||
id: sessionID,
|
||||
key: msg.EncryptionKey,
|
||||
id: sessionID,
|
||||
key: msg.EncryptionKey,
|
||||
compression: objectstorage.NoCompression,
|
||||
}
|
||||
wg := &sync.WaitGroup{}
|
||||
wg.Add(2)
|
||||
|
|
@ -197,13 +199,14 @@ func (s *Storage) prepareSession(path string, tp FileType, task *Task) error {
|
|||
|
||||
func (s *Storage) packSession(task *Task, tp FileType) {
|
||||
// If encryption key is empty, pack session using better algorithm
|
||||
if task.key == "" {
|
||||
if task.key == "" && s.cfg.UseBrotli {
|
||||
s.packSessionBetter(task, tp)
|
||||
return
|
||||
}
|
||||
|
||||
// Prepare mob file
|
||||
mob := task.Mob(tp)
|
||||
task.compression = objectstorage.Gzip
|
||||
|
||||
if tp == DEV || len(mob) <= s.cfg.FileSplitSize {
|
||||
// Compression
|
||||
|
|
@ -270,6 +273,7 @@ func (s *Storage) packSession(task *Task, tp FileType) {
|
|||
func (s *Storage) packSessionBetter(task *Task, tp FileType) {
|
||||
// Prepare mob file
|
||||
mob := task.Mob(tp)
|
||||
task.compression = objectstorage.Brotli
|
||||
|
||||
if tp == DEV || len(mob) <= s.cfg.FileSplitSize {
|
||||
// Compression
|
||||
|
|
@ -377,17 +381,13 @@ func (s *Storage) uploadSession(task *Task) {
|
|||
uploadDome int64 = 0
|
||||
uploadDev int64 = 0
|
||||
)
|
||||
compression := objectstorage.NoCompression
|
||||
if task.key == "" {
|
||||
compression = objectstorage.Brotli
|
||||
}
|
||||
go func() {
|
||||
if task.doms != nil {
|
||||
// Record compression ratio
|
||||
metrics.RecordSessionCompressionRatio(task.domsRawSize/float64(task.doms.Len()), DOM.String())
|
||||
// Upload session to s3
|
||||
start := time.Now()
|
||||
if err := s.objStorage.Upload(task.doms, task.id+string(DOM)+"s", "application/octet-stream", compression); err != nil {
|
||||
if err := s.objStorage.Upload(task.doms, task.id+string(DOM)+"s", "application/octet-stream", task.compression); err != nil {
|
||||
log.Fatalf("Storage: start upload failed. %s", err)
|
||||
}
|
||||
uploadDoms = time.Now().Sub(start).Milliseconds()
|
||||
|
|
@ -400,7 +400,7 @@ func (s *Storage) uploadSession(task *Task) {
|
|||
metrics.RecordSessionCompressionRatio(task.domeRawSize/float64(task.dome.Len()), DOM.String())
|
||||
// Upload session to s3
|
||||
start := time.Now()
|
||||
if err := s.objStorage.Upload(task.dome, task.id+string(DOM)+"e", "application/octet-stream", compression); err != nil {
|
||||
if err := s.objStorage.Upload(task.dome, task.id+string(DOM)+"e", "application/octet-stream", task.compression); err != nil {
|
||||
log.Fatalf("Storage: start upload failed. %s", err)
|
||||
}
|
||||
uploadDome = time.Now().Sub(start).Milliseconds()
|
||||
|
|
@ -413,7 +413,7 @@ func (s *Storage) uploadSession(task *Task) {
|
|||
metrics.RecordSessionCompressionRatio(task.devRawSize/float64(task.dev.Len()), DEV.String())
|
||||
// Upload session to s3
|
||||
start := time.Now()
|
||||
if err := s.objStorage.Upload(task.dev, task.id+string(DEV), "application/octet-stream", compression); err != nil {
|
||||
if err := s.objStorage.Upload(task.dev, task.id+string(DEV), "application/octet-stream", task.compression); err != nil {
|
||||
log.Fatalf("Storage: start upload failed. %s", err)
|
||||
}
|
||||
uploadDev = time.Now().Sub(start).Milliseconds()
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ type BulkSet struct {
|
|||
requests Bulk
|
||||
customEvents Bulk
|
||||
webPageEvents Bulk
|
||||
webInputEvents Bulk
|
||||
webInputDurations Bulk
|
||||
webGraphQL Bulk
|
||||
webErrors Bulk
|
||||
|
|
@ -57,8 +56,6 @@ func (conn *BulkSet) Get(name string) Bulk {
|
|||
return conn.customEvents
|
||||
case "webPageEvents":
|
||||
return conn.webPageEvents
|
||||
case "webInputEvents":
|
||||
return conn.webInputEvents
|
||||
case "webInputDurations":
|
||||
return conn.webInputDurations
|
||||
case "webGraphQL":
|
||||
|
|
@ -122,14 +119,6 @@ func (conn *BulkSet) initBulks() {
|
|||
if err != nil {
|
||||
log.Fatalf("can't create webPageEvents bulk: %s", err)
|
||||
}
|
||||
conn.webInputEvents, err = NewBulk(conn.c,
|
||||
"events.inputs",
|
||||
"(session_id, message_id, timestamp, label)",
|
||||
"($%d, $%d, $%d, NULLIF(LEFT($%d, 2000),''))",
|
||||
4, 200)
|
||||
if err != nil {
|
||||
log.Fatalf("can't create webPageEvents bulk: %s", err)
|
||||
}
|
||||
conn.webInputDurations, err = NewBulk(conn.c,
|
||||
"events.inputs",
|
||||
"(session_id, message_id, timestamp, label, hesitation, duration)",
|
||||
|
|
@ -220,7 +209,6 @@ func (conn *BulkSet) Send() {
|
|||
newTask.bulks = append(newTask.bulks, conn.requests)
|
||||
newTask.bulks = append(newTask.bulks, conn.customEvents)
|
||||
newTask.bulks = append(newTask.bulks, conn.webPageEvents)
|
||||
newTask.bulks = append(newTask.bulks, conn.webInputEvents)
|
||||
newTask.bulks = append(newTask.bulks, conn.webInputDurations)
|
||||
newTask.bulks = append(newTask.bulks, conn.webGraphQL)
|
||||
newTask.bulks = append(newTask.bulks, conn.webErrors)
|
||||
|
|
|
|||
|
|
@ -136,17 +136,6 @@ func (conn *Conn) InsertWebClickEvent(sess *sessions.Session, e *messages.MouseC
|
|||
return nil
|
||||
}
|
||||
|
||||
func (conn *Conn) InsertWebInputEvent(sess *sessions.Session, e *messages.InputEvent) error {
|
||||
if e.Label == "" {
|
||||
return EmptyLabel
|
||||
}
|
||||
if err := conn.bulks.Get("webInputEvents").Append(sess.SessionID, truncSqIdx(e.MessageID), e.Timestamp, e.Label); err != nil {
|
||||
log.Printf("insert web input event err: %s", err)
|
||||
}
|
||||
conn.InsertAutocompleteValue(sess.SessionID, sess.ProjectID, "INPUT", e.Label)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (conn *Conn) InsertInputChangeEvent(sess *sessions.Session, e *messages.InputChange) error {
|
||||
if e.Label == "" {
|
||||
return nil
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ func WrapJSException(m *JSException) *ErrorEvent {
|
|||
}
|
||||
return &ErrorEvent{
|
||||
MessageID: m.Meta().Index,
|
||||
Timestamp: uint64(m.Meta().Timestamp),
|
||||
Timestamp: m.Meta().Timestamp,
|
||||
Source: SOURCE_JS,
|
||||
Name: m.Name,
|
||||
Message: m.Message,
|
||||
|
|
|
|||
|
|
@ -1,77 +0,0 @@
|
|||
package custom
|
||||
|
||||
import (
|
||||
. "openreplay/backend/pkg/messages"
|
||||
)
|
||||
|
||||
const InputEventTimeout = 1 * 60 * 1000
|
||||
|
||||
type inputLabels map[uint64]string
|
||||
|
||||
type inputEventBuilder struct {
|
||||
inputEvent *InputEvent
|
||||
inputLabels inputLabels
|
||||
inputID uint64
|
||||
}
|
||||
|
||||
func NewInputEventBuilder() *inputEventBuilder {
|
||||
ieBuilder := &inputEventBuilder{}
|
||||
ieBuilder.clearLabels()
|
||||
return ieBuilder
|
||||
}
|
||||
|
||||
func (b *inputEventBuilder) clearLabels() {
|
||||
b.inputLabels = make(inputLabels)
|
||||
}
|
||||
|
||||
func (b *inputEventBuilder) Handle(message Message, timestamp uint64) Message {
|
||||
var inputEvent Message = nil
|
||||
switch msg := message.(type) {
|
||||
case *SetInputTarget:
|
||||
if b.inputID != msg.ID {
|
||||
inputEvent = b.Build()
|
||||
b.inputID = msg.ID
|
||||
}
|
||||
b.inputLabels[msg.ID] = msg.Label
|
||||
return inputEvent
|
||||
case *SetInputValue:
|
||||
if b.inputID != msg.ID {
|
||||
inputEvent = b.Build()
|
||||
b.inputID = msg.ID
|
||||
}
|
||||
if b.inputEvent == nil {
|
||||
b.inputEvent = &InputEvent{
|
||||
MessageID: message.MsgID(),
|
||||
Timestamp: timestamp,
|
||||
Value: msg.Value,
|
||||
ValueMasked: msg.Mask > 0,
|
||||
}
|
||||
} else {
|
||||
b.inputEvent.Value = msg.Value
|
||||
b.inputEvent.ValueMasked = msg.Mask > 0
|
||||
}
|
||||
return inputEvent
|
||||
case *CreateDocument:
|
||||
inputEvent = b.Build()
|
||||
b.clearLabels()
|
||||
return inputEvent
|
||||
case *MouseClick:
|
||||
return b.Build()
|
||||
}
|
||||
|
||||
if b.inputEvent != nil && b.inputEvent.Timestamp+InputEventTimeout < timestamp {
|
||||
return b.Build()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *inputEventBuilder) Build() Message {
|
||||
if b.inputEvent == nil {
|
||||
return nil
|
||||
}
|
||||
inputEvent := b.inputEvent
|
||||
inputEvent.Label = b.inputLabels[b.inputID] // might be empty string
|
||||
|
||||
b.inputEvent = nil
|
||||
return inputEvent
|
||||
}
|
||||
|
|
@ -1,12 +1,24 @@
|
|||
package types
|
||||
|
||||
type RebalanceType string
|
||||
|
||||
const (
|
||||
RebalanceTypeAssign RebalanceType = "assign"
|
||||
RebalanceTypeRevoke RebalanceType = "revoke"
|
||||
)
|
||||
|
||||
type PartitionsRebalancedEvent struct {
|
||||
Type RebalanceType
|
||||
Partitions []uint64
|
||||
}
|
||||
|
||||
// Consumer reads batches of session data from queue (redis or kafka)
|
||||
type Consumer interface {
|
||||
ConsumeNext() error
|
||||
CommitBack(gap int64) error
|
||||
Commit() error
|
||||
Close()
|
||||
Rebalanced() <-chan interface{}
|
||||
Rebalanced() <-chan *PartitionsRebalancedEvent
|
||||
}
|
||||
|
||||
// Producer sends batches of session data to queue (redis or kafka)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import (
|
|||
"log"
|
||||
"net"
|
||||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/queue/types"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
|
@ -27,7 +28,7 @@ type Consumer struct {
|
|||
idsPending streamPendingIDsMap
|
||||
lastTs int64
|
||||
autoCommit bool
|
||||
event chan interface{}
|
||||
event chan *types.PartitionsRebalancedEvent
|
||||
}
|
||||
|
||||
func NewConsumer(group string, streams []string, messageIterator messages.MessageIterator) *Consumer {
|
||||
|
|
@ -58,13 +59,13 @@ func NewConsumer(group string, streams []string, messageIterator messages.Messag
|
|||
group: group,
|
||||
autoCommit: true,
|
||||
idsPending: idsPending,
|
||||
event: make(chan interface{}, 4),
|
||||
event: make(chan *types.PartitionsRebalancedEvent, 4),
|
||||
}
|
||||
}
|
||||
|
||||
const READ_COUNT = 10
|
||||
|
||||
func (c *Consumer) Rebalanced() <-chan interface{} {
|
||||
func (c *Consumer) Rebalanced() <-chan *types.PartitionsRebalancedEvent {
|
||||
return c.event
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -11,15 +11,17 @@ pyjwt = "==2.7.0"
|
|||
psycopg2-binary = "==2.9.6"
|
||||
elasticsearch = "==8.8.0"
|
||||
jira = "==3.5.1"
|
||||
fastapi = "==0.96.0"
|
||||
uvicorn = {version = "==0.22.0", extras = ["standard"]}
|
||||
fastapi = "==0.97.0"
|
||||
python-decouple = "==3.8"
|
||||
pydantic = {version = "==1.10.8", extras = ["email"]}
|
||||
apscheduler = "==3.10.1"
|
||||
clickhouse-driver = {version = "==0.2.5", extras = ["lz4"]}
|
||||
python-multipart = "==0.0.6"
|
||||
redis = "==4.5.5"
|
||||
azure-storage-blob = "==12.16.0"
|
||||
uvicorn = {version = "==0.22.0", extras = ["standard"]}
|
||||
gunicorn = "==20.1.0"
|
||||
pydantic = {version = "==1.10.8", extras = ["email"]}
|
||||
clickhouse-driver = {version = "==0.2.6", extras = ["lz4"]}
|
||||
python3-saml = "==1.15.0"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class JWTAuth(HTTPBearer):
|
|||
if credentials:
|
||||
if not credentials.scheme == "Bearer":
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.")
|
||||
jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials)
|
||||
jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials)
|
||||
auth_exists = jwt_payload is not None \
|
||||
and users.auth_exists(user_id=jwt_payload.get("userId", -1),
|
||||
tenant_id=jwt_payload.get("tenantId", -1),
|
||||
|
|
@ -27,18 +27,13 @@ class JWTAuth(HTTPBearer):
|
|||
if jwt_payload is None \
|
||||
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
|
||||
or not auth_exists:
|
||||
print("JWTAuth: Token issue")
|
||||
if jwt_payload is not None:
|
||||
print(jwt_payload)
|
||||
print(f"JWTAuth: user_id={jwt_payload.get('userId')} tenant_id={jwt_payload.get('tenantId')}")
|
||||
if jwt_payload is None:
|
||||
print("JWTAuth: jwt_payload is None")
|
||||
print(credentials.scheme + " " + credentials.credentials)
|
||||
if jwt_payload is not None and jwt_payload.get("iat") is None:
|
||||
print("JWTAuth: iat is None")
|
||||
if jwt_payload is not None and jwt_payload.get("aud") is None:
|
||||
print("JWTAuth: aud is None")
|
||||
if jwt_payload is not None and not auth_exists:
|
||||
if jwt_payload.get("iat") is None:
|
||||
print("JWTAuth: iat is None")
|
||||
if jwt_payload.get("aud") is None:
|
||||
print("JWTAuth: aud is None")
|
||||
if not auth_exists:
|
||||
print("JWTAuth: not users.auth_exists")
|
||||
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
|
||||
|
|
@ -47,12 +42,14 @@ class JWTAuth(HTTPBearer):
|
|||
print("JWTAuth: User not found.")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
|
||||
jwt_payload["authorizer_identity"] = "jwt"
|
||||
print(jwt_payload)
|
||||
request.state.authorizer_identity = "jwt"
|
||||
if user["serviceAccount"]:
|
||||
user["permissions"] = [p.value for p in schemas_ee.ServicePermissions]
|
||||
request.state.currentContext = schemas_ee.CurrentContext(tenant_id=jwt_payload.get("tenantId", -1),
|
||||
user_id=jwt_payload.get("userId", -1),
|
||||
email=user["email"],
|
||||
permissions=user["permissions"])
|
||||
permissions=user["permissions"],
|
||||
service_account=user["serviceAccount"])
|
||||
return request.state.currentContext
|
||||
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -7,13 +7,12 @@ from chalicelib.utils import helper
|
|||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def jwt_authorizer(token):
|
||||
token = token.split(" ")
|
||||
if len(token) != 2 or token[0].lower() != "bearer":
|
||||
def jwt_authorizer(scheme: str, token: str):
|
||||
if scheme.lower() != "bearer":
|
||||
return None
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token[1],
|
||||
token,
|
||||
config("jwt_secret"),
|
||||
algorithms=config("jwt_algorithm"),
|
||||
audience=[f"front:{helper.get_stage_name()}"]
|
||||
|
|
@ -23,6 +22,7 @@ def jwt_authorizer(token):
|
|||
return None
|
||||
except BaseException as e:
|
||||
print("! JWT Base Exception")
|
||||
print(e)
|
||||
return None
|
||||
return payload
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ import json
|
|||
import secrets
|
||||
|
||||
from decouple import config
|
||||
from fastapi import BackgroundTasks
|
||||
from fastapi import BackgroundTasks, HTTPException
|
||||
from starlette import status
|
||||
|
||||
import schemas
|
||||
import schemas_ee
|
||||
|
|
@ -282,7 +283,8 @@ def get(user_id, tenant_id):
|
|||
roles.name AS role_name,
|
||||
roles.permissions,
|
||||
roles.all_projects,
|
||||
basic_authentication.password IS NOT NULL AS has_password
|
||||
basic_authentication.password IS NOT NULL AS has_password,
|
||||
users.service_account
|
||||
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
|
||||
LEFT JOIN public.roles USING (role_id)
|
||||
WHERE
|
||||
|
|
@ -472,7 +474,9 @@ def get_members(tenant_id):
|
|||
FROM public.users
|
||||
LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
|
||||
LEFT JOIN public.roles USING (role_id)
|
||||
WHERE users.tenant_id = %(tenant_id)s AND users.deleted_at IS NULL
|
||||
WHERE users.tenant_id = %(tenant_id)s
|
||||
AND users.deleted_at IS NULL
|
||||
AND NOT users.service_account
|
||||
ORDER BY name, user_id""",
|
||||
{"tenant_id": tenant_id})
|
||||
)
|
||||
|
|
@ -626,17 +630,24 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
|
|||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"SELECT user_id AS id,jwt_iat, changed_at FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE user_id = %(userId)s AND tenant_id = %(tenant_id)s AND deleted_at IS NULL LIMIT 1;",
|
||||
f"""SELECT user_id,
|
||||
jwt_iat,
|
||||
changed_at,
|
||||
service_account,
|
||||
basic_authentication.user_id IS NOT NULL AS has_basic_auth
|
||||
FROM public.users
|
||||
LEFT JOIN public.basic_authentication USING(user_id)
|
||||
WHERE user_id = %(userId)s
|
||||
AND tenant_id = %(tenant_id)s
|
||||
AND deleted_at IS NULL
|
||||
LIMIT 1;""",
|
||||
{"userId": user_id, "tenant_id": tenant_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return r is not None \
|
||||
and r.get("jwt_iat") is not None \
|
||||
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
|
||||
or (jwt_aud.startswith("plugin") \
|
||||
and (r["changed_at"] is None \
|
||||
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
|
||||
)
|
||||
and (r["service_account"] and not r["has_basic_auth"]
|
||||
or r.get("jwt_iat") is not None \
|
||||
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1))
|
||||
|
||||
|
||||
def change_jwt_iat(user_id):
|
||||
|
|
@ -665,7 +676,8 @@ def authenticate(email, password, for_change_password=False) -> dict | None:
|
|||
users.origin,
|
||||
users.role_id,
|
||||
roles.name AS role_name,
|
||||
roles.permissions
|
||||
roles.permissions,
|
||||
users.service_account
|
||||
FROM public.users AS users INNER JOIN public.basic_authentication USING(user_id)
|
||||
LEFT JOIN public.roles ON (roles.role_id = users.role_id AND roles.tenant_id = users.tenant_id)
|
||||
WHERE users.email = %(email)s
|
||||
|
|
@ -694,7 +706,10 @@ def authenticate(email, password, for_change_password=False) -> dict | None:
|
|||
if for_change_password:
|
||||
return True
|
||||
r = helper.dict_to_camel_case(r)
|
||||
if config("enforce_SSO", cast=bool, default=False) and helper.is_saml2_available():
|
||||
if r["serviceAccount"]:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="service account is not authorized to login")
|
||||
elif config("enforce_SSO", cast=bool, default=False) and helper.is_saml2_available():
|
||||
return {"errors": ["must sign-in with SSO, enforced by admin"]}
|
||||
|
||||
jwt_iat = change_jwt_iat(r['userId'])
|
||||
|
|
@ -722,8 +737,9 @@ def authenticate_sso(email, internal_id, exp=None):
|
|||
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
|
||||
origin,
|
||||
role_id
|
||||
FROM public.users AS users
|
||||
role_id,
|
||||
service_account
|
||||
FROM public.users
|
||||
WHERE users.email = %(email)s AND internal_id = %(internal_id)s;""",
|
||||
{"email": email, "internal_id": internal_id})
|
||||
|
||||
|
|
@ -732,6 +748,9 @@ def authenticate_sso(email, internal_id, exp=None):
|
|||
|
||||
if r is not None:
|
||||
r = helper.dict_to_camel_case(r)
|
||||
if r["serviceAccount"]:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="service account is not authorized to login")
|
||||
jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId']))
|
||||
return authorizers.generate_jwt(r['userId'], r['tenantId'],
|
||||
iat=jwt_iat, aud=f"front:{helper.get_stage_name()}",
|
||||
|
|
|
|||
|
|
@ -56,10 +56,19 @@ class ORRoute(APIRoute):
|
|||
|
||||
|
||||
def __check(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext = Depends(OR_context)):
|
||||
s_p = 0
|
||||
for scope in security_scopes.scopes:
|
||||
if isinstance(scope, schemas_ee.ServicePermissions):
|
||||
s_p += 1
|
||||
if context.service_account and not isinstance(scope, schemas_ee.ServicePermissions) \
|
||||
or not context.service_account and not isinstance(scope, schemas_ee.Permissions):
|
||||
continue
|
||||
if scope not in context.permissions:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Not enough permissions")
|
||||
if context.service_account and s_p == 0:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Not enough permissions (service account)")
|
||||
|
||||
|
||||
def OR_scope(*scopes):
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ from chalicelib.utils.TimeUTC import TimeUTC
|
|||
from or_dependencies import OR_context, OR_scope
|
||||
from routers import saml
|
||||
from routers.base import get_routers
|
||||
from schemas_ee import Permissions
|
||||
from schemas_ee import Permissions, ServicePermissions
|
||||
|
||||
public_app, app, app_apikey = get_routers()
|
||||
|
||||
|
|
@ -203,7 +203,7 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
|
||||
# for backward compatibility
|
||||
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
|
||||
def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(sessionId, str):
|
||||
|
|
@ -220,8 +220,24 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
|
|||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/sessions/search', tags=["sessions"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True)
|
||||
return {'data': data}
|
||||
|
||||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
|
||||
def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(sessionId, str):
|
||||
|
|
@ -239,7 +255,7 @@ def get_session_events(projectId: int, sessionId: Union[int, str], background_ta
|
|||
|
||||
|
||||
@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"],
|
||||
dependencies=[OR_scope(Permissions.session_replay)])
|
||||
dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)])
|
||||
def get_session_events(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
if isinstance(sessionId, str):
|
||||
|
|
@ -326,7 +342,8 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa
|
|||
return {"errors": ["undefined action"]}
|
||||
|
||||
|
||||
@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live)])
|
||||
@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"],
|
||||
dependencies=[OR_scope(Permissions.assist_live, ServicePermissions.assist_live)])
|
||||
def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks,
|
||||
context: schemas_ee.CurrentContext = Depends(OR_context)):
|
||||
data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
|
||||
|
|
@ -342,7 +359,8 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
|
|||
|
||||
|
||||
@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"],
|
||||
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)])
|
||||
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay,
|
||||
ServicePermissions.assist_live, ServicePermissions.session_replay)])
|
||||
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
not_found = {"errors": ["Replay file not found"]}
|
||||
|
|
@ -363,7 +381,9 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
|
|||
|
||||
|
||||
@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"],
|
||||
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)])
|
||||
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools,
|
||||
ServicePermissions.assist_live, ServicePermissions.session_replay,
|
||||
ServicePermissions.dev_tools)])
|
||||
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
not_found = {"errors": ["Devtools file not found"]}
|
||||
|
|
|
|||
|
|
@ -18,8 +18,16 @@ class Permissions(str, Enum):
|
|||
feature_flags = "FEATURE_FLAGS"
|
||||
|
||||
|
||||
class ServicePermissions(str, Enum):
|
||||
session_replay = "SERVICE_SESSION_REPLAY"
|
||||
dev_tools = "SERVICE_DEV_TOOLS"
|
||||
assist_live = "SERVICE_ASSIST_LIVE"
|
||||
assist_call = "SERVICE_ASSIST_CALL"
|
||||
|
||||
|
||||
class CurrentContext(schemas.CurrentContext):
|
||||
permissions: List[Optional[Permissions]] = Field(...)
|
||||
permissions: List[Union[Permissions, ServicePermissions]] = Field(...)
|
||||
service_account: bool = Field(default=False)
|
||||
|
||||
|
||||
class RolePayloadSchema(BaseModel):
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ ENV ENTERPRISE_BUILD=${envarg} \
|
|||
MAXMINDDB_FILE=/home/openreplay/geoip.mmdb \
|
||||
PRIVATE_ENDPOINTS=false \
|
||||
LISTEN_PORT=9001 \
|
||||
ERROR=1 \
|
||||
NODE_ENV=production
|
||||
WORKDIR /work
|
||||
COPY package.json .
|
||||
|
|
@ -19,4 +20,4 @@ USER 1001
|
|||
ADD --chown=1001 https://static.openreplay.com/geoip/GeoLite2-City.mmdb $MAXMINDDB_FILE
|
||||
|
||||
ENTRYPOINT ["/sbin/tini", "--"]
|
||||
CMD npm start
|
||||
CMD npm start
|
||||
|
|
|
|||
|
|
@ -27,7 +27,8 @@ const {
|
|||
const wsRouter = express.Router();
|
||||
|
||||
let io;
|
||||
const debug = process.env.debug === "1";
|
||||
const debug_log = process.env.debug === "1";
|
||||
const error_log = process.env.ERROR === "1";
|
||||
|
||||
const createSocketIOServer = function (server, prefix) {
|
||||
if (process.env.uws !== "true") {
|
||||
|
|
@ -66,7 +67,7 @@ const respond = function (res, data) {
|
|||
}
|
||||
|
||||
const socketsList = async function (req, res) {
|
||||
debug && console.log("[WS]looking for all available sessions");
|
||||
debug_log && console.log("[WS]looking for all available sessions");
|
||||
let filters = await extractPayloadFromRequest(req, res);
|
||||
let withFilters = hasFilters(filters);
|
||||
let liveSessionsPerProject = {};
|
||||
|
|
@ -96,7 +97,7 @@ const socketsList = async function (req, res) {
|
|||
}
|
||||
|
||||
const socketsListByProject = async function (req, res) {
|
||||
debug && console.log("[WS]looking for available sessions");
|
||||
debug_log && console.log("[WS]looking for available sessions");
|
||||
let _projectKey = extractProjectKeyFromRequest(req);
|
||||
let _sessionId = extractSessionIdFromRequest(req);
|
||||
let filters = await extractPayloadFromRequest(req, res);
|
||||
|
|
@ -126,7 +127,7 @@ const socketsListByProject = async function (req, res) {
|
|||
}
|
||||
|
||||
const socketsLive = async function (req, res) {
|
||||
debug && console.log("[WS]looking for all available LIVE sessions");
|
||||
debug_log && console.log("[WS]looking for all available LIVE sessions");
|
||||
let filters = await extractPayloadFromRequest(req, res);
|
||||
let withFilters = hasFilters(filters);
|
||||
let liveSessionsPerProject = {};
|
||||
|
|
@ -157,7 +158,7 @@ const socketsLive = async function (req, res) {
|
|||
}
|
||||
|
||||
const socketsLiveByProject = async function (req, res) {
|
||||
debug && console.log("[WS]looking for available LIVE sessions");
|
||||
debug_log && console.log("[WS]looking for available LIVE sessions");
|
||||
let _projectKey = extractProjectKeyFromRequest(req);
|
||||
let _sessionId = extractSessionIdFromRequest(req);
|
||||
let filters = await extractPayloadFromRequest(req, res);
|
||||
|
|
@ -194,7 +195,7 @@ const socketsLiveByProject = async function (req, res) {
|
|||
}
|
||||
|
||||
const autocomplete = async function (req, res) {
|
||||
debug && console.log("[WS]autocomplete");
|
||||
debug_log && console.log("[WS]autocomplete");
|
||||
let _projectKey = extractProjectKeyFromRequest(req);
|
||||
let filters = await extractPayloadFromRequest(req);
|
||||
let results = [];
|
||||
|
|
@ -285,7 +286,7 @@ module.exports = {
|
|||
io.use(async (socket, next) => await authorizer.check(socket, next));
|
||||
io.on('connection', async (socket) => {
|
||||
socket.on(EVENTS_DEFINITION.listen.ERROR, err => errorHandler(EVENTS_DEFINITION.listen.ERROR, err));
|
||||
debug && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
|
||||
debug_log && console.log(`WS started:${socket.id}, Query:${JSON.stringify(socket.handshake.query)}`);
|
||||
socket._connectedAt = new Date();
|
||||
|
||||
let {projectKey: connProjectKey, sessionId: connSessionId, tabId:connTabId} = extractPeerId(socket.handshake.query.peerId);
|
||||
|
|
@ -295,7 +296,7 @@ module.exports = {
|
|||
connTabId = connTabId ?? (Math.random() + 1).toString(36).substring(2);
|
||||
socket.tabId = connTabId;
|
||||
socket.identity = socket.handshake.query.identity;
|
||||
debug && console.log(`connProjectKey:${connProjectKey}, connSessionId:${connSessionId}, connTabId:${connTabId}, roomId:${socket.roomId}`);
|
||||
debug_log && console.log(`connProjectKey:${connProjectKey}, connSessionId:${connSessionId}, connTabId:${connTabId}, roomId:${socket.roomId}`);
|
||||
|
||||
let {c_sessions, c_agents} = await sessions_agents_count(io, socket);
|
||||
if (socket.identity === IDENTITIES.session) {
|
||||
|
|
@ -307,7 +308,7 @@ module.exports = {
|
|||
const connected_sockets = await io.in(roomId).fetchSockets();
|
||||
for (let item of connected_sockets) {
|
||||
if (item.tabId === connTabId) {
|
||||
debug && console.log(`session already connected, refusing new connexion`);
|
||||
error_log && console.log(`session already connected, refusing new connexion, peerId: ${socket.peerId}`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.SESSION_ALREADY_CONNECTED);
|
||||
return socket.disconnect();
|
||||
}
|
||||
|
|
@ -317,20 +318,20 @@ module.exports = {
|
|||
}
|
||||
extractSessionInfo(socket);
|
||||
if (c_agents > 0) {
|
||||
debug && console.log(`notifying new session about agent-existence`);
|
||||
debug_log && console.log(`notifying new session about agent-existence`);
|
||||
let agents_ids = await get_all_agents_ids(io, socket);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.AGENTS_CONNECTED, agents_ids);
|
||||
socket.to(socket.roomId).emit(EVENTS_DEFINITION.emit.SESSION_RECONNECTED, socket.id);
|
||||
}
|
||||
|
||||
} else if (c_sessions <= 0) {
|
||||
debug && console.log(`notifying new agent about no SESSIONS with peerId:${socket.peerId}`);
|
||||
debug_log && console.log(`notifying new agent about no SESSIONS with peerId:${socket.peerId}`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
}
|
||||
await socket.join(socket.roomId);
|
||||
const rooms = await getAvailableRooms(io);
|
||||
if (rooms.get(socket.roomId)) {
|
||||
debug && console.log(`${socket.id} joined room:${socket.roomId}, as:${socket.identity}, members:${rooms.get(socket.roomId).size}`);
|
||||
debug_log && console.log(`${socket.id} joined room:${socket.roomId}, as:${socket.identity}, members:${rooms.get(socket.roomId).size}`);
|
||||
}
|
||||
if (socket.identity === IDENTITIES.agent) {
|
||||
if (socket.handshake.query.agentInfo !== undefined) {
|
||||
|
|
@ -340,29 +341,29 @@ module.exports = {
|
|||
}
|
||||
|
||||
socket.on('disconnect', async () => {
|
||||
debug && console.log(`${socket.id} disconnected from ${socket.roomId}`);
|
||||
debug_log && console.log(`${socket.id} disconnected from ${socket.roomId}`);
|
||||
if (socket.identity === IDENTITIES.agent) {
|
||||
socket.to(socket.roomId).emit(EVENTS_DEFINITION.emit.AGENT_DISCONNECT, socket.id);
|
||||
}
|
||||
debug && console.log("checking for number of connected agents and sessions");
|
||||
debug_log && console.log("checking for number of connected agents and sessions");
|
||||
let {c_sessions, c_agents} = await sessions_agents_count(io, socket);
|
||||
if (c_sessions === -1 && c_agents === -1) {
|
||||
debug && console.log(`room not found: ${socket.roomId}`);
|
||||
debug_log && console.log(`room not found: ${socket.roomId}`);
|
||||
}
|
||||
if (c_sessions === 0) {
|
||||
debug && console.log(`notifying everyone in ${socket.roomId} about no SESSIONS`);
|
||||
debug_log && console.log(`notifying everyone in ${socket.roomId} about no SESSIONS`);
|
||||
socket.to(socket.roomId).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
}
|
||||
if (c_agents === 0) {
|
||||
debug && console.log(`notifying everyone in ${socket.peerId} about no AGENTS`);
|
||||
debug_log && console.log(`notifying everyone in ${socket.peerId} about no AGENTS`);
|
||||
socket.to(socket.roomId).emit(EVENTS_DEFINITION.emit.NO_AGENTS);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on(EVENTS_DEFINITION.listen.UPDATE_EVENT, async (...args) => {
|
||||
debug && console.log(`${socket.id} sent update event.`);
|
||||
debug_log && console.log(`${socket.id} sent update event.`);
|
||||
if (socket.identity !== IDENTITIES.session) {
|
||||
debug && console.log('Ignoring update event.');
|
||||
debug_log && console.log('Ignoring update event.');
|
||||
return
|
||||
}
|
||||
// Back compatibility (add top layer with meta information)
|
||||
|
|
@ -390,7 +391,7 @@ module.exports = {
|
|||
|
||||
socket.onAny(async (eventName, ...args) => {
|
||||
if (Object.values(EVENTS_DEFINITION.listen).indexOf(eventName) >= 0) {
|
||||
debug && console.log(`received event:${eventName}, should be handled by another listener, stopping onAny.`);
|
||||
debug_log && console.log(`received event:${eventName}, should be handled by another listener, stopping onAny.`);
|
||||
return
|
||||
}
|
||||
// Back compatibility (add top layer with meta information)
|
||||
|
|
@ -398,16 +399,16 @@ module.exports = {
|
|||
args[0] = {meta: {tabId: socket.tabId, version: 1}, data: args[0]};
|
||||
}
|
||||
if (socket.identity === IDENTITIES.session) {
|
||||
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`);
|
||||
debug_log && console.log(`received event:${eventName}, from:${socket.identity}, sending message to room:${socket.peerId}`);
|
||||
socket.to(socket.roomId).emit(eventName, args[0]);
|
||||
} else {
|
||||
debug && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`);
|
||||
debug_log && console.log(`received event:${eventName}, from:${socket.identity}, sending message to session of room:${socket.peerId}`);
|
||||
let socketId = await findSessionSocketId(io, socket.roomId, args[0]?.meta?.tabId);
|
||||
if (socketId === null) {
|
||||
debug && console.log(`session not found for:${socket.roomId}`);
|
||||
debug_log && console.log(`session not found for:${socket.roomId}`);
|
||||
io.to(socket.id).emit(EVENTS_DEFINITION.emit.NO_SESSIONS);
|
||||
} else {
|
||||
debug && console.log("message sent");
|
||||
debug_log && console.log("message sent");
|
||||
io.to(socketId).emit(eventName, socket.id, args[0]);
|
||||
}
|
||||
}
|
||||
|
|
@ -428,7 +429,7 @@ module.exports = {
|
|||
}
|
||||
}
|
||||
console.log(` ====== Valid Rooms: ${count} ====== `);
|
||||
if (debug) {
|
||||
if (debug_log) {
|
||||
for (let item of filtered) {
|
||||
console.log(`Room: ${item[0]} connected: ${item[1].size}`);
|
||||
}
|
||||
|
|
@ -447,4 +448,4 @@ module.exports = {
|
|||
socketsLiveByProject,
|
||||
autocomplete
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ type consumerImpl struct {
|
|||
idsPending streamPendingIDsMap
|
||||
lastTs int64
|
||||
autoCommit bool
|
||||
event chan interface{}
|
||||
event chan *types.PartitionsRebalancedEvent
|
||||
}
|
||||
|
||||
type QueueMessage struct {
|
||||
|
|
@ -67,7 +67,7 @@ func NewConsumer(client *Client, group string, streams []string) types.Consumer
|
|||
group: group,
|
||||
autoCommit: true,
|
||||
idsPending: idsPending,
|
||||
event: make(chan interface{}, 4),
|
||||
event: make(chan *types.PartitionsRebalancedEvent, 4),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -169,6 +169,6 @@ func (c *consumerImpl) Commit() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (c *consumerImpl) Rebalanced() <-chan interface{} {
|
||||
func (c *consumerImpl) Rebalanced() <-chan *types.PartitionsRebalancedEvent {
|
||||
return c.event
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import (
|
|||
|
||||
"openreplay/backend/pkg/env"
|
||||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/queue/types"
|
||||
|
||||
"github.com/confluentinc/confluent-kafka-go/kafka"
|
||||
"github.com/pkg/errors"
|
||||
|
|
@ -20,6 +21,7 @@ type Consumer struct {
|
|||
commitTicker *time.Ticker
|
||||
pollTimeout uint
|
||||
events chan interface{}
|
||||
rebalanced chan *types.PartitionsRebalancedEvent
|
||||
lastReceivedPrtTs map[int32]int64
|
||||
}
|
||||
|
||||
|
|
@ -72,7 +74,8 @@ func NewConsumer(
|
|||
messageIterator: messageIterator,
|
||||
commitTicker: commitTicker,
|
||||
pollTimeout: 200,
|
||||
events: make(chan interface{}, 4),
|
||||
events: make(chan interface{}, 32),
|
||||
rebalanced: make(chan *types.PartitionsRebalancedEvent, 32),
|
||||
lastReceivedPrtTs: make(map[int32]int64, 16),
|
||||
}
|
||||
|
||||
|
|
@ -96,15 +99,25 @@ func (consumer *Consumer) reBalanceCallback(_ *kafka.Consumer, e kafka.Event) er
|
|||
case kafka.RevokedPartitions:
|
||||
// receive before re-balancing partitions; stop consuming messages and commit current state
|
||||
consumer.events <- evt.String()
|
||||
parts := make([]uint64, len(evt.Partitions))
|
||||
for i, p := range evt.Partitions {
|
||||
parts[i] = uint64(p.Partition)
|
||||
}
|
||||
consumer.rebalanced <- &types.PartitionsRebalancedEvent{Type: types.RebalanceTypeRevoke, Partitions: parts}
|
||||
case kafka.AssignedPartitions:
|
||||
// receive after re-balancing partitions; continue consuming messages
|
||||
//consumer.events <- evt.String()
|
||||
consumer.events <- evt.String()
|
||||
parts := make([]uint64, len(evt.Partitions))
|
||||
for i, p := range evt.Partitions {
|
||||
parts[i] = uint64(p.Partition)
|
||||
}
|
||||
consumer.rebalanced <- &types.PartitionsRebalancedEvent{Type: types.RebalanceTypeAssign, Partitions: parts}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (consumer *Consumer) Rebalanced() <-chan interface{} {
|
||||
return consumer.events
|
||||
func (consumer *Consumer) Rebalanced() <-chan *types.PartitionsRebalancedEvent {
|
||||
return consumer.rebalanced
|
||||
}
|
||||
|
||||
func (consumer *Consumer) Commit() error {
|
||||
|
|
|
|||
|
|
@ -71,6 +71,14 @@ UPDATE public.roles
|
|||
SET permissions = (SELECT array_agg(distinct e) FROM unnest(permissions || '{FEATURE_FLAGS}') AS e)
|
||||
where not permissions @> '{FEATURE_FLAGS}';
|
||||
|
||||
ALTER TYPE public.user_role ADD VALUE IF NOT EXISTS 'service';
|
||||
|
||||
ALTER TABLE IF EXISTS public.users
|
||||
ADD COLUMN IF NOT EXISTS service_account bool NOT NULL DEFAULT FALSE;
|
||||
|
||||
ALTER TABLE IF EXISTS public.roles
|
||||
ADD COLUMN IF NOT EXISTS service_role bool NOT NULL DEFAULT FALSE;
|
||||
|
||||
COMMIT;
|
||||
|
||||
\elif :is_next
|
||||
|
|
|
|||
|
|
@ -172,32 +172,34 @@ $$
|
|||
protected bool NOT NULL DEFAULT FALSE,
|
||||
all_projects bool NOT NULL DEFAULT TRUE,
|
||||
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
|
||||
deleted_at timestamp NULL DEFAULT NULL
|
||||
deleted_at timestamp NULL DEFAULT NULL,
|
||||
service_role bool NOT NULL DEFAULT FALSE
|
||||
);
|
||||
|
||||
IF NOT EXISTS(SELECT *
|
||||
FROM pg_type typ
|
||||
WHERE typ.typname = 'user_role') THEN
|
||||
CREATE TYPE user_role AS ENUM ('owner','admin','member');
|
||||
CREATE TYPE user_role AS ENUM ('owner','admin','member','service');
|
||||
END IF;
|
||||
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users
|
||||
(
|
||||
user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
|
||||
email text NOT NULL UNIQUE,
|
||||
role user_role NOT NULL DEFAULT 'member',
|
||||
name text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
deleted_at timestamp without time zone NULL DEFAULT NULL,
|
||||
api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL,
|
||||
jwt_iat timestamp without time zone NULL DEFAULT NULL,
|
||||
data jsonb NOT NULL DEFAULT'{}'::jsonb,
|
||||
weekly_report boolean NOT NULL DEFAULT TRUE,
|
||||
origin text NULL DEFAULT NULL,
|
||||
role_id integer REFERENCES roles (role_id) ON DELETE SET NULL,
|
||||
internal_id text NULL DEFAULT NULL
|
||||
user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
|
||||
tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
|
||||
email text NOT NULL UNIQUE,
|
||||
role user_role NOT NULL DEFAULT 'member',
|
||||
name text NOT NULL,
|
||||
created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'),
|
||||
deleted_at timestamp without time zone NULL DEFAULT NULL,
|
||||
api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL,
|
||||
jwt_iat timestamp without time zone NULL DEFAULT NULL,
|
||||
data jsonb NOT NULL DEFAULT'{}'::jsonb,
|
||||
weekly_report boolean NOT NULL DEFAULT TRUE,
|
||||
origin text NULL DEFAULT NULL,
|
||||
role_id integer REFERENCES roles (role_id) ON DELETE SET NULL,
|
||||
internal_id text NULL DEFAULT NULL,
|
||||
service_account bool NOT NULL DEFAULT FALSE
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL;
|
||||
CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops);
|
||||
|
|
|
|||
|
|
@ -22,5 +22,5 @@ MINIO_ACCESS_KEY = ''
|
|||
MINIO_SECRET_KEY = ''
|
||||
|
||||
# APP and TRACKER VERSIONS
|
||||
VERSION = 1.14.0
|
||||
VERSION = 1.14.9
|
||||
TRACKER_VERSION = '9.0.0'
|
||||
|
|
|
|||
|
|
@ -15,11 +15,13 @@ import APIClient from './api_client';
|
|||
import * as routes from './routes';
|
||||
import { OB_DEFAULT_TAB, isRoute } from 'App/routes';
|
||||
import Signup from 'Components/Signup';
|
||||
import { fetchTenants } from 'Duck/user';
|
||||
import { fetchTenants, setJwt } from 'Duck/user';
|
||||
import { setSessionPath } from 'Duck/sessions';
|
||||
import { ModalProvider } from './components/Modal';
|
||||
import { GLOBAL_DESTINATION_PATH, GLOBAL_HAS_NO_RECORDINGS } from 'App/constants/storageKeys';
|
||||
import { GLOBAL_DESTINATION_PATH, GLOBAL_HAS_NO_RECORDINGS, IFRAME, JWT_PARAM } from 'App/constants/storageKeys';
|
||||
import SupportCallout from 'Shared/SupportCallout';
|
||||
import NotFoundPage from 'Shared/NotFoundPage';
|
||||
import { checkParam } from 'App/utils';
|
||||
|
||||
const Login = lazy(() => import('Components/Login/Login'));
|
||||
const ForgotPassword = lazy(() => import('Components/ForgotPassword/ForgotPassword'));
|
||||
|
|
@ -48,7 +50,7 @@ const Onboarding = withSiteIdUpdater(OnboardingPure);
|
|||
const FunnelPage = withSiteIdUpdater(FunnelPagePure);
|
||||
const FunnelsDetails = withSiteIdUpdater(FunnelDetailsPure);
|
||||
const FunnelIssue = withSiteIdUpdater(FunnelIssueDetails);
|
||||
const Multiview = withSiteIdUpdater(MultiviewPure)
|
||||
const Multiview = withSiteIdUpdater(MultiviewPure);
|
||||
const withSiteId = routes.withSiteId;
|
||||
|
||||
const METRICS_PATH = routes.metrics();
|
||||
|
|
@ -68,7 +70,7 @@ const SESSIONS_PATH = routes.sessions();
|
|||
const FFLAGS_PATH = routes.fflags();
|
||||
const FFLAG_PATH = routes.fflag();
|
||||
const FFLAG_CREATE_PATH = routes.newFFlag();
|
||||
const FFLAG_READ_PATH = routes.fflagRead()
|
||||
const FFLAG_READ_PATH = routes.fflagRead();
|
||||
const NOTES_PATH = routes.notes();
|
||||
const BOOKMARKS_PATH = routes.bookmarks();
|
||||
const ASSIST_PATH = routes.assist();
|
||||
|
|
@ -92,193 +94,257 @@ const MULTIVIEW_INDEX_PATH = routes.multiviewIndex();
|
|||
@withStore
|
||||
@withRouter
|
||||
@connect(
|
||||
(state) => {
|
||||
const siteId = state.getIn(['site', 'siteId']);
|
||||
const jwt = state.getIn(['user', 'jwt']);
|
||||
const changePassword = state.getIn(['user', 'account', 'changePassword']);
|
||||
const userInfoLoading = state.getIn(['user', 'fetchUserInfoRequest', 'loading']);
|
||||
return {
|
||||
jwt,
|
||||
siteId,
|
||||
changePassword,
|
||||
sites: state.getIn(['site', 'list']),
|
||||
isLoggedIn: jwt !== null && !changePassword,
|
||||
loading: siteId === null || userInfoLoading,
|
||||
email: state.getIn(['user', 'account', 'email']),
|
||||
account: state.getIn(['user', 'account']),
|
||||
organisation: state.getIn(['user', 'account', 'name']),
|
||||
tenantId: state.getIn(['user', 'account', 'tenantId']),
|
||||
tenants: state.getIn(['user', 'tenants']),
|
||||
existingTenant: state.getIn(['user', 'authDetails', 'tenants']),
|
||||
onboarding: state.getIn(['user', 'onboarding']),
|
||||
isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee' || state.getIn(['user', 'authDetails', 'edition']) === 'ee',
|
||||
};
|
||||
},
|
||||
{
|
||||
fetchUserInfo,
|
||||
fetchTenants,
|
||||
setSessionPath,
|
||||
fetchSiteList,
|
||||
}
|
||||
(state) => {
|
||||
const siteId = state.getIn(['site', 'siteId']);
|
||||
const jwt = state.getIn(['user', 'jwt']);
|
||||
const changePassword = state.getIn(['user', 'account', 'changePassword']);
|
||||
const userInfoLoading = state.getIn(['user', 'fetchUserInfoRequest', 'loading']);
|
||||
return {
|
||||
jwt,
|
||||
siteId,
|
||||
changePassword,
|
||||
sites: state.getIn(['site', 'list']),
|
||||
isLoggedIn: jwt !== null && !changePassword,
|
||||
loading: siteId === null || userInfoLoading,
|
||||
email: state.getIn(['user', 'account', 'email']),
|
||||
account: state.getIn(['user', 'account']),
|
||||
organisation: state.getIn(['user', 'account', 'name']),
|
||||
tenantId: state.getIn(['user', 'account', 'tenantId']),
|
||||
tenants: state.getIn(['user', 'tenants']),
|
||||
existingTenant: state.getIn(['user', 'authDetails', 'tenants']),
|
||||
onboarding: state.getIn(['user', 'onboarding']),
|
||||
isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee' || state.getIn(['user', 'authDetails', 'edition']) === 'ee'
|
||||
};
|
||||
},
|
||||
{
|
||||
fetchUserInfo,
|
||||
fetchTenants,
|
||||
setSessionPath,
|
||||
fetchSiteList,
|
||||
setJwt
|
||||
}
|
||||
)
|
||||
class Router extends React.Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
if (props.isLoggedIn) {
|
||||
this.fetchInitialData();
|
||||
}
|
||||
constructor(props) {
|
||||
super(props);
|
||||
if (props.isLoggedIn) {
|
||||
this.fetchInitialData();
|
||||
}
|
||||
|
||||
fetchInitialData = async () => {
|
||||
const siteIdFromPath = parseInt(window.location.pathname.split("/")[1])
|
||||
await this.props.fetchUserInfo()
|
||||
await this.props.fetchSiteList(siteIdFromPath)
|
||||
const { mstore } = this.props;
|
||||
mstore.initClient();
|
||||
this.state = {
|
||||
isIframe: checkParam('iframe', IFRAME),
|
||||
isJwt: checkParam('jwt', JWT_PARAM)
|
||||
};
|
||||
|
||||
componentDidMount() {
|
||||
const { isLoggedIn, location } = this.props;
|
||||
const destinationPath = localStorage.getItem(GLOBAL_DESTINATION_PATH);
|
||||
if (!isLoggedIn && !location.pathname.includes('login')) {
|
||||
localStorage.setItem(GLOBAL_DESTINATION_PATH, location.pathname);
|
||||
} else if (isLoggedIn && destinationPath && !location.pathname.includes(destinationPath)) {
|
||||
this.props.history.push(destinationPath || '/');
|
||||
localStorage.removeItem(GLOBAL_DESTINATION_PATH);
|
||||
}
|
||||
const urlJWT = new URLSearchParams(window.location.search).get('jwt');
|
||||
if (urlJWT && !props.isLoggedIn) {
|
||||
props.setJwt(urlJWT);
|
||||
}
|
||||
}
|
||||
|
||||
fetchInitialData = async () => {
|
||||
const siteIdFromPath = parseInt(window.location.pathname.split('/')[1]);
|
||||
await this.props.fetchUserInfo();
|
||||
await this.props.fetchSiteList(siteIdFromPath);
|
||||
const { mstore } = this.props;
|
||||
mstore.initClient();
|
||||
};
|
||||
|
||||
componentDidMount() {
|
||||
const { isLoggedIn, location } = this.props;
|
||||
const destinationPath = localStorage.getItem(GLOBAL_DESTINATION_PATH);
|
||||
|
||||
if (!isLoggedIn && !location.pathname.includes('login')) {
|
||||
localStorage.setItem(GLOBAL_DESTINATION_PATH, location.pathname);
|
||||
} else if (isLoggedIn && destinationPath && !location.pathname.includes(destinationPath)) {
|
||||
this.props.history.push(destinationPath || '/');
|
||||
localStorage.removeItem(GLOBAL_DESTINATION_PATH);
|
||||
}
|
||||
}
|
||||
|
||||
componentDidUpdate(prevProps, prevState) {
|
||||
this.props.setSessionPath(prevProps.location);
|
||||
const destinationPath = localStorage.getItem(GLOBAL_DESTINATION_PATH);
|
||||
|
||||
if (prevProps.email !== this.props.email && !this.props.email) {
|
||||
this.props.fetchTenants();
|
||||
}
|
||||
|
||||
componentDidUpdate(prevProps, prevState) {
|
||||
this.props.setSessionPath(prevProps.location);
|
||||
const destinationPath = localStorage.getItem(GLOBAL_DESTINATION_PATH);
|
||||
|
||||
if (prevProps.email !== this.props.email && !this.props.email) {
|
||||
this.props.fetchTenants();
|
||||
}
|
||||
|
||||
if (
|
||||
destinationPath &&
|
||||
!prevProps.isLoggedIn &&
|
||||
this.props.isLoggedIn &&
|
||||
destinationPath !== routes.login() &&
|
||||
destinationPath !== '/'
|
||||
) {
|
||||
this.props.history.push(destinationPath);
|
||||
}
|
||||
|
||||
if (!prevProps.isLoggedIn && this.props.isLoggedIn) {
|
||||
this.fetchInitialData();
|
||||
}
|
||||
if (
|
||||
destinationPath &&
|
||||
!prevProps.isLoggedIn &&
|
||||
this.props.isLoggedIn &&
|
||||
destinationPath !== routes.login() &&
|
||||
destinationPath !== '/'
|
||||
) {
|
||||
this.props.history.push(destinationPath + window.location.search);
|
||||
}
|
||||
|
||||
render() {
|
||||
const { isLoggedIn, jwt, siteId, sites, loading, changePassword, location, existingTenant, onboarding, isEnterprise } = this.props;
|
||||
const siteIdList = sites.map(({ id }) => id).toJS();
|
||||
const hideHeader = (location.pathname && location.pathname.includes('/session/'))
|
||||
|| location.pathname.includes('/assist/')
|
||||
|| location.pathname.includes('multiview');
|
||||
const isPlayer = isRoute(SESSION_PATH, location.pathname)
|
||||
|| isRoute(LIVE_SESSION_PATH, location.pathname)
|
||||
|| isRoute(MULTIVIEW_PATH, location.pathname)
|
||||
|| isRoute(MULTIVIEW_INDEX_PATH, location.pathname);
|
||||
|
||||
const redirectToOnboarding = !onboarding && localStorage.getItem(GLOBAL_HAS_NO_RECORDINGS) === 'true'
|
||||
|
||||
return isLoggedIn ? (
|
||||
<ModalProvider>
|
||||
<Loader loading={loading} className="flex-1">
|
||||
<Notification />
|
||||
{!hideHeader && <Header key="header" />}
|
||||
<Suspense fallback={<Loader loading={true} className="flex-1" />}>
|
||||
<Switch key="content">
|
||||
<Route path={CLIENT_PATH} component={Client} />
|
||||
<Route path={withSiteId(ONBOARDING_PATH, siteIdList)} component={Onboarding} />
|
||||
<Route
|
||||
path="/integrations/"
|
||||
render={({ location }) => {
|
||||
const client = new APIClient(jwt);
|
||||
switch (location.pathname) {
|
||||
case '/integrations/slack':
|
||||
client.post('integrations/slack/add', {
|
||||
code: location.search.split('=')[1],
|
||||
state: tenantId,
|
||||
});
|
||||
break;
|
||||
case '/integrations/msteams':
|
||||
client.post('integrations/msteams/add', {
|
||||
code: location.search.split('=')[1],
|
||||
state: tenantId,
|
||||
});
|
||||
break;
|
||||
}
|
||||
return <Redirect to={CLIENT_PATH} />;
|
||||
}}
|
||||
/>
|
||||
{redirectToOnboarding && <Redirect to={withSiteId(ONBOARDING_REDIRECT_PATH, siteId)} />}
|
||||
|
||||
{/* DASHBOARD and Metrics */}
|
||||
<Route exact strict path={withSiteId(ALERTS_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(ALERT_EDIT_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(ALERT_CREATE_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(METRICS_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(METRICS_DETAILS, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(METRICS_DETAILS_SUB, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(DASHBOARD_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(DASHBOARD_SELECT_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(DASHBOARD_METRIC_CREATE_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(DASHBOARD_METRIC_DETAILS_PATH, siteIdList)} component={Dashboard} />
|
||||
|
||||
<Route exact path={withSiteId(MULTIVIEW_INDEX_PATH, siteIdList)} component={Multiview} />
|
||||
<Route path={withSiteId(MULTIVIEW_PATH, siteIdList)} component={Multiview} />
|
||||
<Route exact strict path={withSiteId(ASSIST_PATH, siteIdList)} component={Assist} />
|
||||
<Route exact strict path={withSiteId(RECORDINGS_PATH, siteIdList)} component={Assist} />
|
||||
{/*<Route exact strict path={withSiteId(ERRORS_PATH, siteIdList)} component={Errors} />*/}
|
||||
{/*<Route exact strict path={withSiteId(ERROR_PATH, siteIdList)} component={Errors} />*/}
|
||||
<Route exact strict path={withSiteId(FUNNEL_PATH, siteIdList)} component={FunnelPage} />
|
||||
<Route exact strict path={withSiteId(FUNNEL_CREATE_PATH, siteIdList)} component={FunnelsDetails} />
|
||||
<Route exact strict path={withSiteId(FUNNEL_ISSUE_PATH, siteIdList)} component={FunnelIssue} />
|
||||
<Route
|
||||
exact
|
||||
strict
|
||||
path={[
|
||||
withSiteId(SESSIONS_PATH, siteIdList),
|
||||
withSiteId(FFLAGS_PATH, siteIdList),
|
||||
withSiteId(FFLAG_PATH, siteIdList),
|
||||
withSiteId(FFLAG_READ_PATH, siteIdList),
|
||||
withSiteId(FFLAG_CREATE_PATH, siteIdList),
|
||||
withSiteId(NOTES_PATH, siteIdList),
|
||||
withSiteId(BOOKMARKS_PATH, siteIdList),
|
||||
]}
|
||||
component={SessionsOverview}
|
||||
/>
|
||||
<Route exact strict path={withSiteId(SESSION_PATH, siteIdList)} component={Session} />
|
||||
<Route exact strict path={withSiteId(LIVE_SESSION_PATH, siteIdList)} component={LiveSession} />
|
||||
<Route exact strict path={withSiteId(LIVE_SESSION_PATH, siteIdList)} render={(props) => <Session {...props} live />} />
|
||||
{routes.redirects.map(([fr, to]) => (
|
||||
<Redirect key={fr} exact strict from={fr} to={to} />
|
||||
))}
|
||||
<Redirect to={withSiteId(SESSIONS_PATH, siteId)} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</Loader>
|
||||
{!isEnterprise && !isPlayer && <SupportCallout /> }
|
||||
</ModalProvider>
|
||||
) : (
|
||||
<Suspense fallback={<Loader loading={true} className="flex-1" />}>
|
||||
<Switch>
|
||||
<Route exact strict path={FORGOT_PASSWORD} component={ForgotPassword} />
|
||||
<Route exact strict path={LOGIN_PATH} component={changePassword ? UpdatePassword : Login} />
|
||||
<Route exact strict path={SIGNUP_PATH} component={Signup} />
|
||||
<Redirect to={LOGIN_PATH} />
|
||||
</Switch>
|
||||
{!isEnterprise && <SupportCallout /> }
|
||||
</Suspense>
|
||||
);
|
||||
if (!prevProps.isLoggedIn && this.props.isLoggedIn) {
|
||||
this.fetchInitialData();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
render() {
|
||||
const {
|
||||
isLoggedIn,
|
||||
jwt,
|
||||
siteId,
|
||||
sites,
|
||||
loading,
|
||||
changePassword,
|
||||
location,
|
||||
existingTenant,
|
||||
onboarding,
|
||||
isEnterprise
|
||||
} = this.props;
|
||||
const siteIdList = sites.map(({ id }) => id).toJS();
|
||||
const hideHeader = (location.pathname && location.pathname.includes('/session/'))
|
||||
|| location.pathname.includes('/assist/')
|
||||
|| location.pathname.includes('multiview');
|
||||
const isPlayer = isRoute(SESSION_PATH, location.pathname)
|
||||
|| isRoute(LIVE_SESSION_PATH, location.pathname)
|
||||
|| isRoute(MULTIVIEW_PATH, location.pathname)
|
||||
|| isRoute(MULTIVIEW_INDEX_PATH, location.pathname);
|
||||
|
||||
const redirectToOnboarding = !onboarding && localStorage.getItem(GLOBAL_HAS_NO_RECORDINGS) === 'true';
|
||||
const { isIframe, isJwt } = this.state;
|
||||
|
||||
const renderAuthenticatedIframeRoutes = () => (
|
||||
<ModalProvider>
|
||||
<Loader loading={loading} className='flex-1'>
|
||||
<Suspense fallback={<Loader loading={true} className='flex-1' />}>
|
||||
<Switch key='content'>
|
||||
<Route exact strict path={withSiteId(SESSION_PATH, siteIdList)} component={Session} />
|
||||
<Route exact strict path={withSiteId(LIVE_SESSION_PATH, siteIdList)} component={LiveSession} />
|
||||
<Route path='*' render={NotFoundPage} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</Loader>
|
||||
</ModalProvider>
|
||||
);
|
||||
|
||||
const renderUnauthenticatedIframeRoutes = () => (
|
||||
<Suspense fallback={<Loader loading={true} className='flex-1' />}>
|
||||
<Switch>
|
||||
<Route exact strict path={FORGOT_PASSWORD} component={ForgotPassword} />
|
||||
<Route exact strict path={LOGIN_PATH} component={changePassword ? UpdatePassword : Login} />
|
||||
<Route exact strict path={SIGNUP_PATH} component={Signup} />
|
||||
<Redirect to={LOGIN_PATH} />
|
||||
</Switch>
|
||||
{!isEnterprise && <SupportCallout />}
|
||||
</Suspense>
|
||||
);
|
||||
|
||||
if (isIframe) {
|
||||
if (isLoggedIn) {
|
||||
return renderAuthenticatedIframeRoutes();
|
||||
}
|
||||
|
||||
if (isJwt) {
|
||||
return <NotFoundPage />;
|
||||
}
|
||||
|
||||
return renderUnauthenticatedIframeRoutes();
|
||||
}
|
||||
|
||||
return isLoggedIn ? (
|
||||
<ModalProvider>
|
||||
<Loader loading={loading} className='flex-1'>
|
||||
<Notification />
|
||||
{!hideHeader && <Header key='header' />}
|
||||
<Suspense fallback={<Loader loading={true} className='flex-1' />}>
|
||||
<Switch key='content'>
|
||||
<Route path={CLIENT_PATH} component={Client} />
|
||||
<Route path={withSiteId(ONBOARDING_PATH, siteIdList)} component={Onboarding} />
|
||||
<Route
|
||||
path='/integrations/'
|
||||
render={({ location }) => {
|
||||
const client = new APIClient(jwt);
|
||||
switch (location.pathname) {
|
||||
case '/integrations/slack':
|
||||
client.post('integrations/slack/add', {
|
||||
code: location.search.split('=')[1],
|
||||
state: tenantId
|
||||
});
|
||||
break;
|
||||
case '/integrations/msteams':
|
||||
client.post('integrations/msteams/add', {
|
||||
code: location.search.split('=')[1],
|
||||
state: tenantId
|
||||
});
|
||||
break;
|
||||
}
|
||||
return <Redirect to={CLIENT_PATH} />;
|
||||
}}
|
||||
/>
|
||||
{redirectToOnboarding && <Redirect to={withSiteId(ONBOARDING_REDIRECT_PATH, siteId)} />}
|
||||
|
||||
{/* DASHBOARD and Metrics */}
|
||||
<Route exact strict path={withSiteId(ALERTS_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(ALERT_EDIT_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(ALERT_CREATE_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(METRICS_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(METRICS_DETAILS, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(METRICS_DETAILS_SUB, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(DASHBOARD_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(DASHBOARD_SELECT_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(DASHBOARD_METRIC_CREATE_PATH, siteIdList)} component={Dashboard} />
|
||||
<Route exact strict path={withSiteId(DASHBOARD_METRIC_DETAILS_PATH, siteIdList)} component={Dashboard} />
|
||||
|
||||
<Route exact path={withSiteId(MULTIVIEW_INDEX_PATH, siteIdList)} component={Multiview} />
|
||||
<Route path={withSiteId(MULTIVIEW_PATH, siteIdList)} component={Multiview} />
|
||||
<Route exact strict path={withSiteId(ASSIST_PATH, siteIdList)} component={Assist} />
|
||||
<Route exact strict path={withSiteId(RECORDINGS_PATH, siteIdList)} component={Assist} />
|
||||
{/*<Route exact strict path={withSiteId(ERRORS_PATH, siteIdList)} component={Errors} />*/}
|
||||
{/*<Route exact strict path={withSiteId(ERROR_PATH, siteIdList)} component={Errors} />*/}
|
||||
<Route exact strict path={withSiteId(FUNNEL_PATH, siteIdList)} component={FunnelPage} />
|
||||
<Route exact strict path={withSiteId(FUNNEL_CREATE_PATH, siteIdList)} component={FunnelsDetails} />
|
||||
<Route exact strict path={withSiteId(FUNNEL_ISSUE_PATH, siteIdList)} component={FunnelIssue} />
|
||||
<Route
|
||||
exact
|
||||
strict
|
||||
path={[
|
||||
withSiteId(SESSIONS_PATH, siteIdList),
|
||||
withSiteId(FFLAGS_PATH, siteIdList),
|
||||
withSiteId(FFLAG_PATH, siteIdList),
|
||||
withSiteId(FFLAG_READ_PATH, siteIdList),
|
||||
withSiteId(FFLAG_CREATE_PATH, siteIdList),
|
||||
withSiteId(NOTES_PATH, siteIdList),
|
||||
withSiteId(BOOKMARKS_PATH, siteIdList)
|
||||
]}
|
||||
component={SessionsOverview}
|
||||
/>
|
||||
<Route exact strict path={withSiteId(SESSION_PATH, siteIdList)} component={Session} />
|
||||
<Route exact strict path={withSiteId(LIVE_SESSION_PATH, siteIdList)} component={LiveSession} />
|
||||
<Route exact strict path={withSiteId(LIVE_SESSION_PATH, siteIdList)}
|
||||
render={(props) => <Session {...props} live />} />
|
||||
{routes.redirects.map(([fr, to]) => (
|
||||
<Redirect key={fr} exact strict from={fr} to={to} />
|
||||
))}
|
||||
<Redirect to={withSiteId(SESSIONS_PATH, siteId)} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</Loader>
|
||||
{!isEnterprise && !isPlayer && <SupportCallout />}
|
||||
</ModalProvider>
|
||||
) : (
|
||||
<Suspense fallback={<Loader loading={true} className='flex-1' />}>
|
||||
<Switch>
|
||||
<Route exact strict path={FORGOT_PASSWORD} component={ForgotPassword} />
|
||||
<Route exact strict path={LOGIN_PATH} component={changePassword ? UpdatePassword : Login} />
|
||||
<Route exact strict path={SIGNUP_PATH} component={Signup} />
|
||||
<Redirect to={LOGIN_PATH} />
|
||||
</Switch>
|
||||
{!isEnterprise && <SupportCallout />}
|
||||
</Suspense>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default () => (
|
||||
<BrowserRouter>
|
||||
<Router />
|
||||
</BrowserRouter>
|
||||
<BrowserRouter>
|
||||
<Router />
|
||||
</BrowserRouter>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -3,12 +3,7 @@ import { Button, Tooltip } from 'UI';
|
|||
import { connect } from 'react-redux';
|
||||
import cn from 'classnames';
|
||||
import ChatWindow from '../../ChatWindow';
|
||||
import {
|
||||
CallingState,
|
||||
ConnectionStatus,
|
||||
RemoteControlStatus,
|
||||
RequestLocalStream,
|
||||
} from 'Player';
|
||||
import { CallingState, ConnectionStatus, RemoteControlStatus, RequestLocalStream } from 'Player';
|
||||
import type { LocalStream } from 'Player';
|
||||
import { PlayerContext, ILivePlayerContext } from 'App/components/Session/playerContext';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
|
|
@ -16,12 +11,14 @@ import { toast } from 'react-toastify';
|
|||
import { confirm } from 'UI';
|
||||
import stl from './AassistActions.module.css';
|
||||
import ScreenRecorder from 'App/components/Session_/ScreenRecorder/ScreenRecorder';
|
||||
import { audioContextManager } from 'App/utils/screenRecorder';
|
||||
|
||||
function onReject() {
|
||||
toast.info(`Call was rejected.`);
|
||||
}
|
||||
|
||||
function onControlReject() {
|
||||
toast.info('Remote control request was rejected by user')
|
||||
toast.info('Remote control request was rejected by user');
|
||||
}
|
||||
|
||||
function onError(e: any) {
|
||||
|
|
@ -47,7 +44,7 @@ function AssistActions({
|
|||
userDisplayName,
|
||||
}: Props) {
|
||||
// @ts-ignore ???
|
||||
const { player, store } = React.useContext<ILivePlayerContext>(PlayerContext)
|
||||
const { player, store } = React.useContext<ILivePlayerContext>(PlayerContext);
|
||||
|
||||
const {
|
||||
assistManager: {
|
||||
|
|
@ -55,17 +52,17 @@ function AssistActions({
|
|||
setCallArgs,
|
||||
requestReleaseRemoteControl,
|
||||
toggleAnnotation,
|
||||
setRemoteControlCallbacks
|
||||
setRemoteControlCallbacks,
|
||||
},
|
||||
toggleUserName,
|
||||
} = player
|
||||
toggleUserName,
|
||||
} = player;
|
||||
const {
|
||||
calling,
|
||||
annotating,
|
||||
peerConnectionStatus,
|
||||
remoteControl: remoteControlStatus,
|
||||
livePlay,
|
||||
} = store.get()
|
||||
} = store.get();
|
||||
|
||||
const [isPrestart, setPrestart] = useState(false);
|
||||
const [incomeStream, setIncomeStream] = useState<MediaStream[] | null>([]);
|
||||
|
|
@ -121,8 +118,9 @@ function AssistActions({
|
|||
|
||||
const addIncomeStream = (stream: MediaStream) => {
|
||||
setIncomeStream((oldState) => {
|
||||
if (oldState === null) return [stream]
|
||||
if (oldState === null) return [stream];
|
||||
if (!oldState.find((existingStream) => existingStream.id === stream.id)) {
|
||||
audioContextManager.mergeAudioStreams(stream);
|
||||
return [...oldState, stream];
|
||||
}
|
||||
return oldState;
|
||||
|
|
@ -133,7 +131,16 @@ function AssistActions({
|
|||
RequestLocalStream()
|
||||
.then((lStream) => {
|
||||
setLocalStream(lStream);
|
||||
setCallArgs(lStream, addIncomeStream, lStream.stop.bind(lStream), onReject, onError);
|
||||
audioContextManager.mergeAudioStreams(lStream.stream);
|
||||
setCallArgs(
|
||||
lStream,
|
||||
addIncomeStream,
|
||||
() => {
|
||||
lStream.stop.bind(lStream);
|
||||
},
|
||||
onReject,
|
||||
onError
|
||||
);
|
||||
setCallObject(callPeer());
|
||||
if (additionalAgentIds) {
|
||||
callPeer(additionalAgentIds);
|
||||
|
|
@ -157,7 +164,7 @@ function AssistActions({
|
|||
};
|
||||
|
||||
const requestControl = () => {
|
||||
setRemoteControlCallbacks({ onReject: onControlReject })
|
||||
setRemoteControlCallbacks({ onReject: onControlReject });
|
||||
if (callRequesting || remoteRequesting) return;
|
||||
requestReleaseRemoteControl();
|
||||
};
|
||||
|
|
@ -249,17 +256,13 @@ function AssistActions({
|
|||
);
|
||||
}
|
||||
|
||||
const con = connect(
|
||||
(state: any) => {
|
||||
const permissions = state.getIn(['user', 'account', 'permissions']) || [];
|
||||
return {
|
||||
hasPermission: permissions.includes('ASSIST_CALL'),
|
||||
isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
|
||||
userDisplayName: state.getIn(['sessions', 'current']).userDisplayName,
|
||||
};
|
||||
}
|
||||
);
|
||||
const con = connect((state: any) => {
|
||||
const permissions = state.getIn(['user', 'account', 'permissions']) || [];
|
||||
return {
|
||||
hasPermission: permissions.includes('ASSIST_CALL'),
|
||||
isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
|
||||
userDisplayName: state.getIn(['sessions', 'current']).userDisplayName,
|
||||
};
|
||||
});
|
||||
|
||||
export default con(
|
||||
observer(AssistActions)
|
||||
);
|
||||
export default con(observer(AssistActions));
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import {audioContextManager} from "App/utils/screenRecorder";
|
||||
import React from 'react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { connect } from 'react-redux';
|
||||
|
|
@ -82,6 +83,7 @@ function LivePlayer({
|
|||
return () => {
|
||||
if (!location.pathname.includes('multiview') || !location.pathname.includes(usedSession.sessionId)) {
|
||||
console.debug('cleaning live player for', usedSession.sessionId)
|
||||
audioContextManager.clear();
|
||||
playerInst?.clean?.();
|
||||
// @ts-ignore default empty
|
||||
setContextValue(defaultContextValue)
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import {
|
|||
import { PlayerContext, ILivePlayerContext } from 'App/components/Session/playerContext';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { fetchSessions } from 'Duck/liveSearch';
|
||||
|
||||
import { useLocation } from "react-router-dom";
|
||||
import AssistDuration from './AssistDuration';
|
||||
import Timeline from './Timeline';
|
||||
import ControlButton from 'Components/Session_/Player/Controls/ControlButton';
|
||||
|
|
@ -20,6 +20,8 @@ import styles from 'Components/Session_/Player/Controls/controls.module.css';
|
|||
function Controls(props: any) {
|
||||
// @ts-ignore ?? TODO
|
||||
const { player, store } = React.useContext<ILivePlayerContext>(PlayerContext);
|
||||
const [noMulti, setNoMulti] = React.useState(false);
|
||||
const { search } = useLocation();
|
||||
|
||||
const { jumpToLive } = player;
|
||||
const {
|
||||
|
|
@ -58,6 +60,12 @@ function Controls(props: any) {
|
|||
if (totalAssistSessions === 0) {
|
||||
fetchAssistSessions();
|
||||
}
|
||||
const queryParams = new URLSearchParams(search);
|
||||
if (
|
||||
(queryParams.has('noFooter') && queryParams.get('noFooter') === 'true')
|
||||
) {
|
||||
setNoMulti(true);
|
||||
}
|
||||
return () => {
|
||||
document.removeEventListener('keydown', onKeyDown.bind(this));
|
||||
};
|
||||
|
|
@ -73,8 +81,6 @@ function Controls(props: any) {
|
|||
player.jumpInterval(-SKIP_INTERVALS[skipInterval]);
|
||||
};
|
||||
|
||||
|
||||
|
||||
const toggleBottomTools = (blockName: number) => {
|
||||
toggleBottomBlock(blockName);
|
||||
};
|
||||
|
|
@ -82,6 +88,7 @@ function Controls(props: any) {
|
|||
return (
|
||||
<div className={styles.controls}>
|
||||
<Timeline />
|
||||
{!noMulti ?
|
||||
<div className={cn(styles.buttons, '!px-5 !pt-0')} data-is-live>
|
||||
<div className="flex items-center">
|
||||
{!closedLive && (
|
||||
|
|
@ -112,6 +119,7 @@ function Controls(props: any) {
|
|||
/>
|
||||
</div>
|
||||
</div>
|
||||
: null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import { withRequest } from 'HOCs';
|
|||
import SessionInfoItem from 'Components/Session_/SessionInfoItem';
|
||||
import { useModal } from 'App/components/Modal';
|
||||
import UserSessionsModal from 'Shared/UserSessionsModal';
|
||||
import { IFRAME } from 'App/constants/storageKeys';
|
||||
|
||||
function UserCard({ className, request, session, width, height, similarSessions, loading }) {
|
||||
const { settingsStore } = useStore();
|
||||
|
|
@ -125,9 +126,10 @@ export default withRequest({
|
|||
|
||||
// inner component
|
||||
function UserName({ name, userId, hash }) {
|
||||
const hasIframe = localStorage.getItem(IFRAME) === 'true';
|
||||
const { showModal } = useModal();
|
||||
const onClick = () => {
|
||||
showModal(<UserSessionsModal userId={userId} hash={hash} name={name} />, { right: true, width: 700 });
|
||||
};
|
||||
return <div onClick={userId ? onClick : () => {}}>{name}</div>;
|
||||
return <div onClick={userId && !hasIframe ? onClick : () => {}}>{name}</div>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
import React from 'react';
|
||||
import { connect } from 'react-redux';
|
||||
import { withRouter } from 'react-router-dom';
|
||||
|
|
@ -16,6 +17,7 @@ import { PlayerContext } from 'App/components/Session/playerContext';
|
|||
import { observer } from 'mobx-react-lite';
|
||||
import stl from './playerBlockHeader.module.css';
|
||||
import { fetchListActive as fetchMetadata } from 'Duck/customField';
|
||||
import { IFRAME } from 'App/constants/storageKeys';
|
||||
|
||||
const SESSIONS_ROUTE = sessionsRoute();
|
||||
|
||||
|
|
@ -42,8 +44,8 @@ function PlayerBlockHeader(props: any) {
|
|||
} = props;
|
||||
|
||||
React.useEffect(() => {
|
||||
const queryParams = new URLSearchParams(location.search);
|
||||
setHideBack(queryParams.has('iframe') && queryParams.get('iframe') === 'true');
|
||||
const iframe = localStorage.getItem(IFRAME) || false;
|
||||
setHideBack(!!iframe && iframe === 'true');
|
||||
|
||||
if (metaList.size === 0) fetchMetadata();
|
||||
}, []);
|
||||
|
|
@ -90,7 +92,7 @@ function PlayerBlockHeader(props: any) {
|
|||
<UserCard className="" width={width} height={height} />
|
||||
|
||||
<div className={cn('ml-auto flex items-center h-full', { hidden: closedLive })}>
|
||||
{live && (
|
||||
{live && !hideBack && (
|
||||
<>
|
||||
<div className={cn(stl.liveSwitchButton, 'pr-4')}>
|
||||
<Link to={withSiteId(liveSessionRoute(sessionId), siteId)}>
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ import { observer } from 'mobx-react-lite';
|
|||
import AutoplayToggle from 'Shared/AutoplayToggle';
|
||||
import { connect } from 'react-redux';
|
||||
import SessionTabs from 'Components/Session/Player/SharedComponents/SessionTabs'
|
||||
import { IFRAME } from 'App/constants/storageKeys';
|
||||
import cn from 'classnames';
|
||||
|
||||
const localhostWarn = (project) => project + '_localhost_warn';
|
||||
|
||||
|
|
@ -22,6 +24,7 @@ function SubHeader(props) {
|
|||
const [showWarningModal, setWarning] = React.useState(defaultLocalhostWarn);
|
||||
const { player, store } = React.useContext(PlayerContext);
|
||||
const { width, height, endTime, location: currentLocation = 'loading...', } = store.get();
|
||||
const hasIframe = localStorage.getItem(IFRAME) === "true";
|
||||
|
||||
const enabledIntegration = useMemo(() => {
|
||||
const { integrations } = props;
|
||||
|
|
@ -103,7 +106,7 @@ function SubHeader(props) {
|
|||
) : null}
|
||||
<SessionTabs />
|
||||
<div
|
||||
className="ml-auto text-sm flex items-center color-gray-medium gap-2"
|
||||
className={cn("ml-auto text-sm flex items-center color-gray-medium gap-2", { 'opacity-50 pointer-events-none' : hasIframe })}
|
||||
style={{ width: 'max-content' }}
|
||||
>
|
||||
<Button icon="file-pdf" variant="text" onClick={showReportModal}>
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import React from 'react';
|
|||
function Copyright() {
|
||||
return (
|
||||
<div className="fixed bottom-0 m-auto text-center mb-6 color-gray-medium">
|
||||
© 2023 OpenReplay. All rights reserved. <a className="underline" href="https://openreplay.com/privacy.html" target="_blank">Privacy</a> and <a className="underline" href="https://openreplay.com/terms.html" target="_blank">terms</a>.
|
||||
© 2023 OpenReplay. All rights reserved. <a className="underline" href="https://openreplay.com/privacy.html" target="_blank">Privacy</a> and <a className="underline" href="https://openreplay.com/terms.html" target="_blank">Terms</a>.
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ import cn from 'classnames';
|
|||
import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG';
|
||||
import { numberWithCommas } from 'App/utils';
|
||||
|
||||
const AUTOREFRESH_INTERVAL = 0.5 * 60 * 1000;
|
||||
const AUTOREFRESH_INTERVAL = 2 * 60 * 1000;
|
||||
const PER_PAGE = 10;
|
||||
|
||||
interface Props {
|
||||
|
|
|
|||
24
frontend/app/components/shared/NotFoundPage.tsx
Normal file
24
frontend/app/components/shared/NotFoundPage.tsx
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import React from 'react';
|
||||
|
||||
function NotFoundPage() {
|
||||
return (
|
||||
<div
|
||||
className='inset-0 flex items-center justify-center absolute'
|
||||
style={{
|
||||
height: 'calc(100vh - 50px)'
|
||||
// zIndex: '999',
|
||||
}}
|
||||
>
|
||||
<div className='flex flex-col items-center'>
|
||||
<div className='text-2xl -mt-8'>
|
||||
Session not found.
|
||||
</div>
|
||||
<div className='text-sm'>
|
||||
Please check your data retention policy.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default NotFoundPage;
|
||||
|
|
@ -52,12 +52,8 @@ function SessionHeader(props: Props) {
|
|||
<>
|
||||
<SessionTags />
|
||||
<div className="mr-auto" />
|
||||
{listCount > 0 && (
|
||||
<>
|
||||
<SelectDateRange period={period} onChange={onDateChange} right={true} />
|
||||
<div className="mx-2" />
|
||||
</>
|
||||
)}
|
||||
<SelectDateRange period={period} onChange={onDateChange} right={true} />
|
||||
<div className="mx-2" />
|
||||
</>
|
||||
)}
|
||||
<SessionSort />
|
||||
|
|
|
|||
|
|
@ -6,4 +6,6 @@ export const GLOBAL_DESTINATION_PATH = "__$global-destinationPath$__"
|
|||
export const GLOBAL_HAS_NO_RECORDINGS = "__$global-hasNoRecordings$__"
|
||||
export const SITE_ID_STORAGE_KEY = "__$user-siteId$__"
|
||||
export const GETTING_STARTED = "__$user-gettingStarted$__"
|
||||
export const MOUSE_TRAIL = "__$session-mouseTrail$__"
|
||||
export const MOUSE_TRAIL = "__$session-mouseTrail$__"
|
||||
export const IFRAME = "__$session-iframe$__"
|
||||
export const JWT_PARAM = "__$session-jwt-param$__"
|
||||
|
|
@ -152,7 +152,9 @@ export default class AssistManager {
|
|||
const urlObject = new URL(window.env.API_EDP || window.location.origin) // does it handle ssl automatically?
|
||||
|
||||
const socket: Socket = this.socket = io(urlObject.origin, {
|
||||
withCredentials: true,
|
||||
multiplex: true,
|
||||
transports: ['websocket'],
|
||||
path: '/ws-assist/socket',
|
||||
auth: {
|
||||
token: agentToken
|
||||
|
|
|
|||
|
|
@ -2,293 +2,306 @@ import type Peer from 'peerjs';
|
|||
import type { MediaConnection } from 'peerjs';
|
||||
|
||||
import type { LocalStream } from './LocalStream';
|
||||
import type { Socket } from './types'
|
||||
import type { Store } from '../../common/types'
|
||||
import type { Socket } from './types';
|
||||
import type { Store } from '../../common/types';
|
||||
|
||||
import appStore from 'App/store';
|
||||
|
||||
|
||||
export enum CallingState {
|
||||
NoCall,
|
||||
Connecting,
|
||||
Requesting,
|
||||
Reconnecting,
|
||||
OnCall,
|
||||
NoCall,
|
||||
Connecting,
|
||||
Requesting,
|
||||
Reconnecting,
|
||||
OnCall,
|
||||
}
|
||||
|
||||
export interface State {
|
||||
calling: CallingState;
|
||||
currentTab?: string;
|
||||
calling: CallingState;
|
||||
currentTab?: string;
|
||||
}
|
||||
|
||||
export default class Call {
|
||||
private assistVersion = 1
|
||||
static readonly INITIAL_STATE: Readonly<State> = {
|
||||
calling: CallingState.NoCall
|
||||
}
|
||||
private assistVersion = 1;
|
||||
static readonly INITIAL_STATE: Readonly<State> = {
|
||||
calling: CallingState.NoCall,
|
||||
};
|
||||
|
||||
private _peer: Peer | null = null
|
||||
private connectionAttempts: number = 0
|
||||
private callConnection: MediaConnection[] = []
|
||||
private videoStreams: Record<string, MediaStreamTrack> = {}
|
||||
private _peer: Peer | null = null;
|
||||
private connectionAttempts: number = 0;
|
||||
private callConnection: MediaConnection[] = [];
|
||||
private videoStreams: Record<string, MediaStreamTrack> = {};
|
||||
|
||||
constructor(
|
||||
private store: Store<State>,
|
||||
private socket: Socket,
|
||||
private config: RTCIceServer[] | null,
|
||||
private peerID: string,
|
||||
private getAssistVersion: () => number
|
||||
) {
|
||||
socket.on('call_end', this.onRemoteCallEnd)
|
||||
socket.on('videofeed', ({ streamId, enabled }) => {
|
||||
console.log(streamId, enabled)
|
||||
console.log(this.videoStreams)
|
||||
constructor(
|
||||
private store: Store<State>,
|
||||
private socket: Socket,
|
||||
private config: RTCIceServer[] | null,
|
||||
private peerID: string,
|
||||
private getAssistVersion: () => number
|
||||
) {
|
||||
socket.on('call_end', this.onRemoteCallEnd);
|
||||
socket.on('videofeed', ({ streamId, enabled }) => {
|
||||
console.log(streamId, enabled);
|
||||
console.log(this.videoStreams);
|
||||
if (this.videoStreams[streamId]) {
|
||||
this.videoStreams[streamId].enabled = enabled
|
||||
this.videoStreams[streamId].enabled = enabled;
|
||||
}
|
||||
console.log(this.videoStreams)
|
||||
})
|
||||
let reconnecting = false
|
||||
console.log(this.videoStreams);
|
||||
});
|
||||
let reconnecting = false;
|
||||
socket.on('SESSION_DISCONNECTED', () => {
|
||||
if (this.store.get().calling === CallingState.OnCall) {
|
||||
this.store.update({ calling: CallingState.Reconnecting })
|
||||
reconnecting = true
|
||||
} else if (this.store.get().calling === CallingState.Requesting){
|
||||
this.store.update({ calling: CallingState.NoCall })
|
||||
if (this.store.get().calling === CallingState.OnCall) {
|
||||
this.store.update({ calling: CallingState.Reconnecting });
|
||||
reconnecting = true;
|
||||
} else if (this.store.get().calling === CallingState.Requesting) {
|
||||
this.store.update({ calling: CallingState.NoCall });
|
||||
}
|
||||
})
|
||||
});
|
||||
socket.on('messages', () => {
|
||||
if (reconnecting) { // 'messages' come frequently, so it is better to have Reconnecting
|
||||
this._callSessionPeer()
|
||||
reconnecting = false
|
||||
}
|
||||
})
|
||||
socket.on("disconnect", () => {
|
||||
this.store.update({ calling: CallingState.NoCall })
|
||||
})
|
||||
this.assistVersion = this.getAssistVersion()
|
||||
}
|
||||
|
||||
private getPeer(): Promise<Peer> {
|
||||
if (this._peer && !this._peer.disconnected) { return Promise.resolve(this._peer) }
|
||||
|
||||
// @ts-ignore
|
||||
const urlObject = new URL(window.env.API_EDP || window.location.origin)
|
||||
|
||||
// @ts-ignore TODO: set module in ts settings
|
||||
return import('peerjs').then(({ default: Peer }) => {
|
||||
if (this.cleaned) {return Promise.reject("Already cleaned")}
|
||||
const peerOpts: Peer.PeerJSOption = {
|
||||
host: urlObject.hostname,
|
||||
path: '/assist',
|
||||
port: urlObject.port === "" ? (location.protocol === 'https:' ? 443 : 80 ): parseInt(urlObject.port),
|
||||
}
|
||||
if (this.config) {
|
||||
peerOpts['config'] = {
|
||||
iceServers: this.config,
|
||||
//@ts-ignore
|
||||
sdpSemantics: 'unified-plan',
|
||||
iceTransportPolicy: 'all',
|
||||
};
|
||||
}
|
||||
const peer = this._peer = new Peer(peerOpts)
|
||||
peer.on('call', call => {
|
||||
console.log('getting call from', call.peer)
|
||||
call.answer(this.callArgs.localStream.stream)
|
||||
this.callConnection.push(call)
|
||||
|
||||
this.callArgs.localStream.onVideoTrack(vTrack => {
|
||||
const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video")
|
||||
if (!sender) {
|
||||
console.warn("No video sender found")
|
||||
return
|
||||
}
|
||||
sender.replaceTrack(vTrack)
|
||||
})
|
||||
|
||||
call.on('stream', stream => {
|
||||
this.videoStreams[call.peer] = stream.getVideoTracks()[0]
|
||||
this.callArgs && this.callArgs.onStream(stream)
|
||||
});
|
||||
// call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))
|
||||
|
||||
call.on("close", this.onRemoteCallEnd)
|
||||
call.on("error", (e) => {
|
||||
console.error("PeerJS error (on call):", e)
|
||||
this.initiateCallEnd();
|
||||
this.callArgs && this.callArgs.onError && this.callArgs.onError();
|
||||
});
|
||||
})
|
||||
peer.on('error', e => {
|
||||
if (e.type === 'disconnected') {
|
||||
return peer.reconnect()
|
||||
} else if (e.type !== 'peer-unavailable') {
|
||||
console.error(`PeerJS error (on peer). Type ${e.type}`, e);
|
||||
}
|
||||
|
||||
// call-reconnection connected
|
||||
// if (['peer-unavailable', 'network', 'webrtc'].includes(e.type)) {
|
||||
// this.setStatus(this.connectionAttempts++ < MAX_RECONNECTION_COUNT
|
||||
// ? ConnectionStatus.Connecting
|
||||
// : ConnectionStatus.Disconnected);
|
||||
// Reconnect...
|
||||
})
|
||||
|
||||
return new Promise(resolve => {
|
||||
peer.on("open", () => resolve(peer))
|
||||
})
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
private handleCallEnd() {
|
||||
this.callArgs && this.callArgs.onCallEnd()
|
||||
this.callConnection[0] && this.callConnection[0].close()
|
||||
this.store.update({ calling: CallingState.NoCall })
|
||||
this.callArgs = null
|
||||
// TODO: We have it separated, right? (check)
|
||||
//this.toggleAnnotation(false)
|
||||
}
|
||||
private onRemoteCallEnd = () => {
|
||||
if ([CallingState.Requesting, CallingState.Connecting].includes(this.store.get().calling)) {
|
||||
this.callArgs && this.callArgs.onReject()
|
||||
this.callConnection[0] && this.callConnection[0].close()
|
||||
this.store.update({ calling: CallingState.NoCall })
|
||||
this.callArgs = null
|
||||
} else {
|
||||
this.handleCallEnd()
|
||||
}
|
||||
}
|
||||
|
||||
initiateCallEnd = async () => {
|
||||
this.emitData("call_end", appStore.getState().getIn([ 'user', 'account', 'name']))
|
||||
this.handleCallEnd()
|
||||
// TODO: We have it separated, right? (check)
|
||||
// const remoteControl = this.store.get().remoteControl
|
||||
// if (remoteControl === RemoteControlStatus.Enabled) {
|
||||
// this.socket.emit("release_control")
|
||||
// this.toggleRemoteControl(false)
|
||||
// }
|
||||
}
|
||||
|
||||
private emitData = (event: string, data?: any) => {
|
||||
if (this.getAssistVersion() === 1) {
|
||||
this.socket?.emit(event, data)
|
||||
} else {
|
||||
this.socket?.emit(event, { meta: { tabId: this.store.get().currentTab }, data })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private callArgs: {
|
||||
localStream: LocalStream,
|
||||
onStream: (s: MediaStream)=>void,
|
||||
onCallEnd: () => void,
|
||||
onReject: () => void,
|
||||
onError?: ()=> void,
|
||||
} | null = null
|
||||
|
||||
setCallArgs(
|
||||
localStream: LocalStream,
|
||||
onStream: (s: MediaStream)=>void,
|
||||
onCallEnd: () => void,
|
||||
onReject: () => void,
|
||||
onError?: (e?: any)=> void,
|
||||
) {
|
||||
this.callArgs = {
|
||||
localStream,
|
||||
onStream,
|
||||
onCallEnd,
|
||||
onReject,
|
||||
onError,
|
||||
}
|
||||
}
|
||||
|
||||
call(thirdPartyPeers?: string[]): { end: () => void } {
|
||||
if (thirdPartyPeers && thirdPartyPeers.length > 0) {
|
||||
this.addPeerCall(thirdPartyPeers)
|
||||
} else {
|
||||
this._callSessionPeer()
|
||||
}
|
||||
return {
|
||||
end: this.initiateCallEnd,
|
||||
}
|
||||
}
|
||||
|
||||
toggleVideoLocalStream(enabled: boolean) {
|
||||
this.getPeer().then((peer) => {
|
||||
this.emitData('videofeed', { streamId: peer.id, enabled })
|
||||
})
|
||||
if (reconnecting) {
|
||||
// 'messages' come frequently, so it is better to have Reconnecting
|
||||
this._callSessionPeer();
|
||||
reconnecting = false;
|
||||
}
|
||||
});
|
||||
socket.on('disconnect', () => {
|
||||
this.store.update({ calling: CallingState.NoCall });
|
||||
});
|
||||
this.assistVersion = this.getAssistVersion();
|
||||
}
|
||||
|
||||
private getPeer(): Promise<Peer> {
|
||||
if (this._peer && !this._peer.disconnected) {
|
||||
return Promise.resolve(this._peer);
|
||||
}
|
||||
|
||||
/** Connecting to the other agents that are already
|
||||
* in the call with the user
|
||||
*/
|
||||
addPeerCall(thirdPartyPeers: string[]) {
|
||||
thirdPartyPeers.forEach(peer => this._peerConnection(peer))
|
||||
}
|
||||
// @ts-ignore
|
||||
const urlObject = new URL(window.env.API_EDP || window.location.origin);
|
||||
|
||||
/** Connecting to the app user */
|
||||
private _callSessionPeer() {
|
||||
if (![CallingState.NoCall, CallingState.Reconnecting].includes(this.store.get().calling)) { return }
|
||||
this.store.update({ calling: CallingState.Connecting })
|
||||
const tab = this.store.get().currentTab
|
||||
if (!this.store.get().currentTab) {
|
||||
console.warn('No tab data to connect to peer')
|
||||
}
|
||||
const peerId = this.getAssistVersion() === 1 ? this.peerID : `${this.peerID}-${tab || Object.keys(this.store.get().tabs)[0]}`
|
||||
console.log(peerId, this.getAssistVersion())
|
||||
void this._peerConnection(peerId);
|
||||
this.emitData("_agent_name", appStore.getState().getIn([ 'user', 'account', 'name']))
|
||||
}
|
||||
// @ts-ignore TODO: set module in ts settings
|
||||
return import('peerjs').then(({ default: Peer }) => {
|
||||
if (this.cleaned) {
|
||||
return Promise.reject('Already cleaned');
|
||||
}
|
||||
const peerOpts: Peer.PeerJSOption = {
|
||||
host: urlObject.hostname,
|
||||
path: '/assist',
|
||||
port:
|
||||
urlObject.port === ''
|
||||
? location.protocol === 'https:'
|
||||
? 443
|
||||
: 80
|
||||
: parseInt(urlObject.port),
|
||||
};
|
||||
if (this.config) {
|
||||
peerOpts['config'] = {
|
||||
iceServers: this.config,
|
||||
//@ts-ignore
|
||||
sdpSemantics: 'unified-plan',
|
||||
iceTransportPolicy: 'all',
|
||||
};
|
||||
}
|
||||
const peer = (this._peer = new Peer(peerOpts));
|
||||
peer.on('call', (call) => {
|
||||
console.log('getting call from', call.peer);
|
||||
call.answer(this.callArgs.localStream.stream);
|
||||
this.callConnection.push(call);
|
||||
|
||||
private async _peerConnection(remotePeerId: string) {
|
||||
try {
|
||||
const peer = await this.getPeer();
|
||||
const call = peer.call(remotePeerId, this.callArgs.localStream.stream)
|
||||
this.callConnection.push(call)
|
||||
this.callArgs.localStream.onVideoTrack((vTrack) => {
|
||||
const sender = call.peerConnection.getSenders().find((s) => s.track?.kind === 'video');
|
||||
if (!sender) {
|
||||
console.warn('No video sender found');
|
||||
return;
|
||||
}
|
||||
sender.replaceTrack(vTrack);
|
||||
});
|
||||
|
||||
this.callArgs.localStream.onVideoTrack(vTrack => {
|
||||
const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video")
|
||||
if (!sender) {
|
||||
console.warn("No video sender found")
|
||||
return
|
||||
}
|
||||
sender.replaceTrack(vTrack)
|
||||
})
|
||||
call.on('stream', (stream) => {
|
||||
this.videoStreams[call.peer] = stream.getVideoTracks()[0];
|
||||
this.callArgs && this.callArgs.onStream(stream);
|
||||
});
|
||||
// call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))
|
||||
|
||||
call.on('stream', stream => {
|
||||
this.store.get().calling !== CallingState.OnCall && this.store.update({ calling: CallingState.OnCall })
|
||||
call.on('close', this.onRemoteCallEnd);
|
||||
call.on('error', (e) => {
|
||||
console.error('PeerJS error (on call):', e);
|
||||
this.initiateCallEnd();
|
||||
this.callArgs && this.callArgs.onError && this.callArgs.onError();
|
||||
});
|
||||
});
|
||||
peer.on('error', (e) => {
|
||||
if (e.type === 'disconnected') {
|
||||
return peer.reconnect();
|
||||
} else if (e.type !== 'peer-unavailable') {
|
||||
console.error(`PeerJS error (on peer). Type ${e.type}`, e);
|
||||
}
|
||||
|
||||
this.videoStreams[call.peer] = stream.getVideoTracks()[0]
|
||||
// call-reconnection connected
|
||||
// if (['peer-unavailable', 'network', 'webrtc'].includes(e.type)) {
|
||||
// this.setStatus(this.connectionAttempts++ < MAX_RECONNECTION_COUNT
|
||||
// ? ConnectionStatus.Connecting
|
||||
// : ConnectionStatus.Disconnected);
|
||||
// Reconnect...
|
||||
});
|
||||
|
||||
this.callArgs && this.callArgs.onStream(stream)
|
||||
});
|
||||
// call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))
|
||||
return new Promise((resolve) => {
|
||||
peer.on('open', () => resolve(peer));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
call.on("close", this.onRemoteCallEnd)
|
||||
call.on("error", (e) => {
|
||||
console.error("PeerJS error (on call):", e)
|
||||
this.initiateCallEnd();
|
||||
this.callArgs && this.callArgs.onError && this.callArgs.onError();
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
private handleCallEnd() {
|
||||
this.callArgs && this.callArgs.onCallEnd();
|
||||
this.callConnection[0] && this.callConnection[0].close();
|
||||
this.store.update({ calling: CallingState.NoCall });
|
||||
this.callArgs = null;
|
||||
// TODO: We have it separated, right? (check)
|
||||
//this.toggleAnnotation(false)
|
||||
}
|
||||
|
||||
private cleaned: boolean = false
|
||||
clean() {
|
||||
this.cleaned = true // sometimes cleaned before modules loaded
|
||||
this.initiateCallEnd()
|
||||
if (this._peer) {
|
||||
console.log("destroying peer...")
|
||||
const peer = this._peer; // otherwise it calls reconnection on data chan close
|
||||
this._peer = null;
|
||||
peer.disconnect();
|
||||
peer.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
private onRemoteCallEnd = () => {
|
||||
if ([CallingState.Requesting, CallingState.Connecting].includes(this.store.get().calling)) {
|
||||
this.callArgs && this.callArgs.onReject();
|
||||
this.callConnection[0] && this.callConnection[0].close();
|
||||
this.store.update({ calling: CallingState.NoCall });
|
||||
this.callArgs = null;
|
||||
} else {
|
||||
this.handleCallEnd();
|
||||
}
|
||||
};
|
||||
|
||||
initiateCallEnd = async () => {
|
||||
this.emitData('call_end', appStore.getState().getIn(['user', 'account', 'name']));
|
||||
this.handleCallEnd();
|
||||
// TODO: We have it separated, right? (check)
|
||||
// const remoteControl = this.store.get().remoteControl
|
||||
// if (remoteControl === RemoteControlStatus.Enabled) {
|
||||
// this.socket.emit("release_control")
|
||||
// this.toggleRemoteControl(false)
|
||||
// }
|
||||
};
|
||||
|
||||
private emitData = (event: string, data?: any) => {
|
||||
if (this.getAssistVersion() === 1) {
|
||||
this.socket?.emit(event, data);
|
||||
} else {
|
||||
this.socket?.emit(event, { meta: { tabId: this.store.get().currentTab }, data });
|
||||
}
|
||||
};
|
||||
|
||||
private callArgs: {
|
||||
localStream: LocalStream;
|
||||
onStream: (s: MediaStream) => void;
|
||||
onCallEnd: () => void;
|
||||
onReject: () => void;
|
||||
onError?: () => void;
|
||||
} | null = null;
|
||||
|
||||
setCallArgs(
|
||||
localStream: LocalStream,
|
||||
onStream: (s: MediaStream) => void,
|
||||
onCallEnd: () => void,
|
||||
onReject: () => void,
|
||||
onError?: (e?: any) => void
|
||||
) {
|
||||
this.callArgs = {
|
||||
localStream,
|
||||
onStream,
|
||||
onCallEnd,
|
||||
onReject,
|
||||
onError,
|
||||
};
|
||||
}
|
||||
|
||||
call(thirdPartyPeers?: string[]): { end: () => void } {
|
||||
if (thirdPartyPeers && thirdPartyPeers.length > 0) {
|
||||
this.addPeerCall(thirdPartyPeers);
|
||||
} else {
|
||||
this._callSessionPeer();
|
||||
}
|
||||
return {
|
||||
end: this.initiateCallEnd,
|
||||
};
|
||||
}
|
||||
|
||||
toggleVideoLocalStream(enabled: boolean) {
|
||||
this.getPeer().then((peer) => {
|
||||
this.emitData('videofeed', { streamId: peer.id, enabled });
|
||||
});
|
||||
}
|
||||
|
||||
/** Connecting to the other agents that are already
|
||||
* in the call with the user
|
||||
*/
|
||||
addPeerCall(thirdPartyPeers: string[]) {
|
||||
thirdPartyPeers.forEach((peer) => this._peerConnection(peer));
|
||||
}
|
||||
|
||||
/** Connecting to the app user */
|
||||
private _callSessionPeer() {
|
||||
if (![CallingState.NoCall, CallingState.Reconnecting].includes(this.store.get().calling)) {
|
||||
return;
|
||||
}
|
||||
this.store.update({ calling: CallingState.Connecting });
|
||||
const tab = this.store.get().currentTab;
|
||||
if (!this.store.get().currentTab) {
|
||||
console.warn('No tab data to connect to peer');
|
||||
}
|
||||
const peerId =
|
||||
this.getAssistVersion() === 1
|
||||
? this.peerID
|
||||
: `${this.peerID}-${tab || Object.keys(this.store.get().tabs)[0]}`;
|
||||
console.log(peerId, this.getAssistVersion());
|
||||
void this._peerConnection(peerId);
|
||||
this.emitData('_agent_name', appStore.getState().getIn(['user', 'account', 'name']));
|
||||
}
|
||||
|
||||
private async _peerConnection(remotePeerId: string) {
|
||||
try {
|
||||
const peer = await this.getPeer();
|
||||
const call = peer.call(remotePeerId, this.callArgs.localStream.stream);
|
||||
this.callConnection.push(call);
|
||||
|
||||
this.callArgs.localStream.onVideoTrack((vTrack) => {
|
||||
const sender = call.peerConnection.getSenders().find((s) => s.track?.kind === 'video');
|
||||
if (!sender) {
|
||||
console.warn('No video sender found');
|
||||
return;
|
||||
}
|
||||
sender.replaceTrack(vTrack);
|
||||
});
|
||||
|
||||
call.on('stream', (stream) => {
|
||||
this.store.get().calling !== CallingState.OnCall &&
|
||||
this.store.update({ calling: CallingState.OnCall });
|
||||
|
||||
this.videoStreams[call.peer] = stream.getVideoTracks()[0];
|
||||
|
||||
this.callArgs && this.callArgs.onStream(stream);
|
||||
});
|
||||
// call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))
|
||||
|
||||
call.on('close', this.onRemoteCallEnd);
|
||||
call.on('error', (e) => {
|
||||
console.error('PeerJS error (on call):', e);
|
||||
this.initiateCallEnd();
|
||||
this.callArgs && this.callArgs.onError && this.callArgs.onError();
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
private cleaned: boolean = false;
|
||||
|
||||
clean() {
|
||||
this.cleaned = true; // sometimes cleaned before modules loaded
|
||||
this.initiateCallEnd();
|
||||
if (this._peer) {
|
||||
console.log('destroying peer...');
|
||||
const peer = this._peer; // otherwise it calls reconnection on data chan close
|
||||
this._peer = null;
|
||||
peer.disconnect();
|
||||
peer.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,16 +1,18 @@
|
|||
import { audioContextManager } from 'App/utils/screenRecorder';
|
||||
|
||||
declare global {
|
||||
interface HTMLCanvasElement {
|
||||
captureStream(frameRate?: number): MediaStream;
|
||||
interface HTMLCanvasElement {
|
||||
captureStream(frameRate?: number): MediaStream;
|
||||
}
|
||||
}
|
||||
|
||||
function dummyTrack(): MediaStreamTrack {
|
||||
const canvas = document.createElement("canvas")//, { width: 0, height: 0})
|
||||
canvas.width=canvas.height=2 // Doesn't work when 1 (?!)
|
||||
function dummyTrack(): MediaStreamTrack {
|
||||
const canvas = document.createElement('canvas'); //, { width: 0, height: 0})
|
||||
canvas.width = canvas.height = 2; // Doesn't work when 1 (?!)
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx?.fillRect(0, 0, canvas.width, canvas.height);
|
||||
requestAnimationFrame(function draw(){
|
||||
ctx?.fillRect(0,0, canvas.width, canvas.height)
|
||||
requestAnimationFrame(function draw() {
|
||||
ctx?.fillRect(0, 0, canvas.width, canvas.height);
|
||||
requestAnimationFrame(draw);
|
||||
});
|
||||
// Also works. Probably it should be done once connected.
|
||||
|
|
@ -19,68 +21,72 @@ function dummyTrack(): MediaStreamTrack {
|
|||
}
|
||||
|
||||
export function RequestLocalStream(): Promise<LocalStream> {
|
||||
return navigator.mediaDevices.getUserMedia({ audio:true })
|
||||
.then(aStream => {
|
||||
const aTrack = aStream.getAudioTracks()[0]
|
||||
if (!aTrack) { throw new Error("No audio tracks provided") }
|
||||
return new _LocalStream(aTrack)
|
||||
})
|
||||
return navigator.mediaDevices.getUserMedia({ audio: true }).then((aStream) => {
|
||||
const aTrack = aStream.getAudioTracks()[0];
|
||||
if (!aTrack) {
|
||||
throw new Error('No audio tracks provided');
|
||||
}
|
||||
return new _LocalStream(aTrack);
|
||||
});
|
||||
}
|
||||
|
||||
class _LocalStream {
|
||||
private mediaRequested: boolean = false
|
||||
readonly stream: MediaStream
|
||||
private readonly vdTrack: MediaStreamTrack
|
||||
private mediaRequested: boolean = false;
|
||||
readonly stream: MediaStream;
|
||||
private readonly vdTrack: MediaStreamTrack;
|
||||
|
||||
constructor(aTrack: MediaStreamTrack) {
|
||||
this.vdTrack = dummyTrack()
|
||||
this.stream = new MediaStream([ aTrack, this.vdTrack ])
|
||||
this.vdTrack = dummyTrack();
|
||||
this.stream = new MediaStream([aTrack, this.vdTrack]);
|
||||
}
|
||||
|
||||
toggleVideo(): Promise<boolean> {
|
||||
if (!this.mediaRequested) {
|
||||
return navigator.mediaDevices.getUserMedia({video:true})
|
||||
.then(vStream => {
|
||||
const vTrack = vStream.getVideoTracks()[0]
|
||||
if (!vTrack) {
|
||||
throw new Error("No video track provided")
|
||||
}
|
||||
this.stream.addTrack(vTrack)
|
||||
this.stream.removeTrack(this.vdTrack)
|
||||
this.mediaRequested = true
|
||||
if (this.onVideoTrackCb) {
|
||||
this.onVideoTrackCb(vTrack)
|
||||
}
|
||||
return true
|
||||
})
|
||||
.catch(e => {
|
||||
// TODO: log
|
||||
console.error(e)
|
||||
return false
|
||||
})
|
||||
return navigator.mediaDevices
|
||||
.getUserMedia({ video: true })
|
||||
.then((vStream) => {
|
||||
const vTrack = vStream.getVideoTracks()[0];
|
||||
if (!vTrack) {
|
||||
throw new Error('No video track provided');
|
||||
}
|
||||
this.stream.addTrack(vTrack);
|
||||
this.stream.removeTrack(this.vdTrack);
|
||||
this.mediaRequested = true;
|
||||
if (this.onVideoTrackCb) {
|
||||
this.onVideoTrackCb(vTrack);
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.catch((e) => {
|
||||
// TODO: log
|
||||
console.error(e);
|
||||
return false;
|
||||
});
|
||||
}
|
||||
let enabled = true
|
||||
this.stream.getVideoTracks().forEach(track => {
|
||||
track.enabled = enabled = enabled && !track.enabled
|
||||
})
|
||||
return Promise.resolve(enabled)
|
||||
let enabled = true;
|
||||
this.stream.getVideoTracks().forEach((track) => {
|
||||
track.enabled = enabled = enabled && !track.enabled;
|
||||
});
|
||||
return Promise.resolve(enabled);
|
||||
}
|
||||
|
||||
toggleAudio(): boolean {
|
||||
let enabled = true
|
||||
this.stream.getAudioTracks().forEach(track => {
|
||||
track.enabled = enabled = enabled && !track.enabled
|
||||
})
|
||||
return enabled
|
||||
let enabled = true;
|
||||
this.stream.getAudioTracks().forEach((track) => {
|
||||
track.enabled = enabled = enabled && !track.enabled;
|
||||
});
|
||||
return enabled;
|
||||
}
|
||||
|
||||
private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null
|
||||
private onVideoTrackCb: ((t: MediaStreamTrack) => void) | null = null;
|
||||
|
||||
onVideoTrack(cb: (t: MediaStreamTrack) => void) {
|
||||
this.onVideoTrackCb = cb
|
||||
this.onVideoTrackCb = cb;
|
||||
}
|
||||
|
||||
stop() {
|
||||
this.stream.getTracks().forEach(t => t.stop())
|
||||
this.stream.getTracks().forEach((t) => t.stop());
|
||||
}
|
||||
}
|
||||
|
||||
export type LocalStream = InstanceType<typeof _LocalStream>
|
||||
export type LocalStream = InstanceType<typeof _LocalStream>;
|
||||
|
|
|
|||
|
|
@ -93,7 +93,10 @@ export default class MFileReader extends RawMessageReader {
|
|||
this.startTime = rMsg.timestamp
|
||||
}
|
||||
this.currentTime = rMsg.timestamp - this.startTime
|
||||
return this.readNext()
|
||||
return {
|
||||
tp: 9999,
|
||||
time: this.currentTime,
|
||||
}
|
||||
}
|
||||
|
||||
const index = this.noIndexes ? 0 : this.getLastMessageID()
|
||||
|
|
|
|||
|
|
@ -80,8 +80,8 @@ export default Record(
|
|||
const offset = period.timezoneOffset || 0
|
||||
if (!period.rangeName || period.rangeName === CUSTOM_RANGE) {
|
||||
const range = moment.range(
|
||||
moment(period.start || 0),
|
||||
moment(period.end || 0)
|
||||
moment(period.start || 0, "x"),
|
||||
moment(period.end || 0, "x")
|
||||
);
|
||||
return {
|
||||
...period,
|
||||
|
|
|
|||
|
|
@ -88,6 +88,9 @@ export default Record({
|
|||
const range = getDateRangeFromValue(rValue);
|
||||
startDate = range.start.unix() * 1000;
|
||||
endDate = range.end.unix() * 1000;
|
||||
} else if (filter.startDate && filter.endDate) {
|
||||
startDate = filter.startDate;
|
||||
endDate = filter.endDate;
|
||||
}
|
||||
return {
|
||||
...filter,
|
||||
|
|
|
|||
|
|
@ -426,3 +426,43 @@ export function deleteCookie(name: string, path: string, domain: string) {
|
|||
(domain ? ';domain=' + domain : '') +
|
||||
';expires=Thu, 01 Jan 1970 00:00:01 GMT';
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a specified query parameter exists in the URL and if its value is set to 'true'.
|
||||
* If a storageKey is provided, stores the result in localStorage under that key.
|
||||
*
|
||||
* @function
|
||||
* @param {string} paramName - The name of the URL parameter to check.
|
||||
* @param {string} [storageKey] - The optional key to use for storing the result in localStorage.
|
||||
* @returns {boolean} - Returns true if the parameter exists and its value is 'true'. Otherwise, returns false.
|
||||
*
|
||||
* @example
|
||||
* // Assuming URL is: http://example.com/?iframe=true
|
||||
* const isIframeEnabled = checkParam('iframe'); // Returns true, doesn't store in localStorage
|
||||
* const isIframeEnabledWithStorage = checkParam('iframe', 'storageKey'); // Returns true, stores in localStorage
|
||||
*
|
||||
* @description
|
||||
* The function inspects the current URL's query parameters. If the specified parameter exists
|
||||
* and its value is set to 'true', and a storageKey is provided, the function stores 'true' under
|
||||
* the provided storage key in the localStorage. If the condition is not met, or if the parameter
|
||||
* does not exist, and a storageKey is provided, any existing localStorage entry with the storageKey
|
||||
* is removed.
|
||||
*/
|
||||
export const checkParam = (paramName: string, storageKey?: string): boolean => {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const paramValue = urlParams.get(paramName);
|
||||
|
||||
const existsAndTrue = paramValue && paramValue === 'true' || paramValue?.length > 0;
|
||||
|
||||
if (storageKey) {
|
||||
if (existsAndTrue) {
|
||||
localStorage.setItem(storageKey, 'true');
|
||||
} else {
|
||||
localStorage.removeItem(storageKey);
|
||||
}
|
||||
}
|
||||
|
||||
return existsAndTrue;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,29 @@
|
|||
import { toast } from 'react-toastify';
|
||||
|
||||
class AudioContextManager {
|
||||
context = new AudioContext();
|
||||
destination = this.context.createMediaStreamDestination();
|
||||
|
||||
getAllTracks() {
|
||||
return this.destination.stream.getAudioTracks() || [];
|
||||
}
|
||||
|
||||
mergeAudioStreams(stream: MediaStream) {
|
||||
const source = this.context.createMediaStreamSource(stream);
|
||||
const gain = this.context.createGain();
|
||||
gain.gain.value = 0.7;
|
||||
return source.connect(gain).connect(this.destination);
|
||||
}
|
||||
|
||||
clear() {
|
||||
// when everything is removed, tracks will be stopped automatically (hopefully)
|
||||
this.context = new AudioContext();
|
||||
this.destination = this.context.createMediaStreamDestination();
|
||||
}
|
||||
}
|
||||
|
||||
export const audioContextManager = new AudioContextManager();
|
||||
|
||||
const FILE_TYPE = 'video/webm';
|
||||
const FRAME_RATE = 30;
|
||||
|
||||
|
|
@ -16,7 +40,7 @@ function createFileRecorder(
|
|||
|
||||
let recordedChunks: BlobPart[] = [];
|
||||
const SAVE_INTERVAL_MS = 200;
|
||||
const mediaRecorder = new MediaRecorder(stream);
|
||||
const mediaRecorder = new MediaRecorder(stream, { mimeType: 'video/webm; codecs=vp8,opus' });
|
||||
|
||||
mediaRecorder.ondataavailable = function (e) {
|
||||
if (e.data.size > 0) {
|
||||
|
|
@ -29,7 +53,7 @@ function createFileRecorder(
|
|||
|
||||
ended = true;
|
||||
saveFile(recordedChunks, mimeType, start, recName, sessionId, saveCb);
|
||||
onStop()
|
||||
onStop();
|
||||
recordedChunks = [];
|
||||
}
|
||||
|
||||
|
|
@ -74,13 +98,24 @@ function saveFile(
|
|||
}
|
||||
|
||||
async function recordScreen() {
|
||||
return await navigator.mediaDevices.getDisplayMedia({
|
||||
audio: true,
|
||||
const desktopStreams = await navigator.mediaDevices.getDisplayMedia({
|
||||
audio: {
|
||||
// @ts-ignore
|
||||
restrictOwnAudio: false,
|
||||
echoCancellation: true,
|
||||
noiseSuppression: false,
|
||||
sampleRate: 44100,
|
||||
},
|
||||
video: { frameRate: FRAME_RATE },
|
||||
// potential chrome hack
|
||||
// @ts-ignore
|
||||
preferCurrentTab: true,
|
||||
});
|
||||
audioContextManager.mergeAudioStreams(desktopStreams);
|
||||
return new MediaStream([
|
||||
...desktopStreams.getVideoTracks(),
|
||||
...audioContextManager.getAllTracks(),
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -94,7 +129,18 @@ async function recordScreen() {
|
|||
*
|
||||
* @returns a promise that resolves to a function that stops the recording
|
||||
*/
|
||||
export async function screenRecorder(recName: string, sessionId: string, saveCb: (saveObj: { name: string; duration: number }, blob: Blob) => void, onStop: () => void) {
|
||||
export async function screenRecorder(
|
||||
recName: string,
|
||||
sessionId: string,
|
||||
saveCb: (
|
||||
saveObj: {
|
||||
name: string;
|
||||
duration: number;
|
||||
},
|
||||
blob: Blob
|
||||
) => void,
|
||||
onStop: () => void
|
||||
) {
|
||||
try {
|
||||
const stream = await recordScreen();
|
||||
const mediaRecorder = createFileRecorder(stream, FILE_TYPE, recName, sessionId, saveCb, onStop);
|
||||
|
|
@ -102,11 +148,13 @@ export async function screenRecorder(recName: string, sessionId: string, saveCb:
|
|||
return () => {
|
||||
if (mediaRecorder.state !== 'inactive') {
|
||||
mediaRecorder.stop();
|
||||
onStop()
|
||||
onStop();
|
||||
}
|
||||
}
|
||||
};
|
||||
} catch (e) {
|
||||
toast.error('Screen recording is not permitted by your system and/or browser. Make sure to enable it in your browser as well as in your system settings.');
|
||||
toast.error(
|
||||
'Screen recording is not permitted by your system and/or browser. Make sure to enable it in your browser as well as in your system settings.'
|
||||
);
|
||||
throw new Error('OpenReplay recording: ' + e);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ export const getFiltersFromQuery = (search: string, filter: any) => {
|
|||
const period: any = getPeriodFromEntries(entires);
|
||||
const filters = getFiltersFromEntries(entires);
|
||||
|
||||
return Filter({ filters, rangeValue: period.rangeName });
|
||||
return Filter({ filters, rangeValue: period.rangeName, startDate: period.start, endDate: period.end });
|
||||
};
|
||||
|
||||
const getFiltersFromEntries = (entires: any) => {
|
||||
|
|
|
|||
|
|
@ -71,13 +71,9 @@ spec:
|
|||
value: '{{ .Values.global.postgresql.postgresqlPassword }}'
|
||||
{{- end}}
|
||||
- name: SITE_URL
|
||||
value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}'
|
||||
value: '{{- include "openreplay.domainURL" . -}}'
|
||||
- name: S3_HOST
|
||||
{{- if contains "minio" .Values.global.s3.endpoint }}
|
||||
value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}'
|
||||
{{- else}}
|
||||
value: '{{ .Values.global.s3.endpoint }}'
|
||||
{{- end}}
|
||||
value: '{{- include "openreplay.s3Endpoint" . }}'
|
||||
- name: S3_KEY
|
||||
{{- if .Values.global.s3.existingSecret }}
|
||||
valueFrom:
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
version: 0.1.2
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.1"
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
version: 0.1.2
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.1"
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.7
|
||||
version: 0.1.8
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.1"
|
||||
|
|
|
|||
|
|
@ -84,13 +84,9 @@ spec:
|
|||
value: '{{ .Values.global.postgresql.postgresqlPassword }}'
|
||||
{{- end}}
|
||||
- name: SITE_URL
|
||||
value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}'
|
||||
value: '{{- include "openreplay.domainURL" . }}'
|
||||
- name: S3_HOST
|
||||
{{- if contains "minio" .Values.global.s3.endpoint }}
|
||||
value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}'
|
||||
{{- else}}
|
||||
value: '{{ .Values.global.s3.endpoint }}'
|
||||
{{- end}}
|
||||
value: '{{- include "openreplay.s3Endpoint" . }}'
|
||||
- name: S3_KEY
|
||||
{{- if .Values.global.s3.existingSecret }}
|
||||
valueFrom:
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
version: 0.1.3
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.2"
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
version: 0.1.2
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.1"
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (frontends://semver.org/)
|
||||
version: 0.1.10
|
||||
version: 0.1.19
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.9"
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
version: 0.1.3
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.2"
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
version: 0.1.2
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.1"
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
version: 0.1.2
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.1"
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
version: 0.1.2
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.1"
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ type: application
|
|||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
version: 0.1.3
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
AppVersion: "v1.14.0"
|
||||
AppVersion: "v1.14.2"
|
||||
|
|
|
|||
|
|
@ -55,13 +55,9 @@ spec:
|
|||
value: '{{ .Values.global.postgresql.postgresqlPassword }}'
|
||||
{{- end}}
|
||||
- name: SITE_URL
|
||||
value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}'
|
||||
value: '{{- include "openreplay.domainURL" . }}'
|
||||
- name: S3_HOST
|
||||
{{- if contains "minio" .Values.global.s3.endpoint }}
|
||||
value: 'https://{{ .Values.global.domainName }}:{{ .Values.global.ingress.controller.service.ports.https}}'
|
||||
{{- else}}
|
||||
value: '{{ .Values.global.s3.endpoint }}'
|
||||
{{- end}}
|
||||
value: '{{- include "openreplay.s3Endpoint" . }}'
|
||||
- name: S3_KEY
|
||||
{{- if .Values.global.s3.existingSecret }}
|
||||
valueFrom:
|
||||
|
|
|
|||
|
|
@ -55,13 +55,9 @@ spec:
|
|||
value: '{{ .Values.global.postgresql.postgresqlPassword }}'
|
||||
{{- end}}
|
||||
- name: SITE_URL
|
||||
value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}'
|
||||
value: '{{- include "openreplay.domainURL" . }}'
|
||||
- name: S3_HOST
|
||||
{{- if contains "minio" .Values.global.s3.endpoint }}
|
||||
value: 'https://{{ .Values.global.domainName }}:{{ .Values.global.ingress.controller.service.ports.https}}'
|
||||
{{- else}}
|
||||
value: '{{ .Values.global.s3.endpoint }}'
|
||||
{{- end}}
|
||||
value: '{{- include "openreplay.s3Endpoint" . }}'
|
||||
- name: S3_KEY
|
||||
{{- if .Values.global.s3.existingSecret }}
|
||||
valueFrom:
|
||||
|
|
|
|||
|
|
@ -55,13 +55,9 @@ spec:
|
|||
value: '{{ .Values.global.postgresql.postgresqlPassword }}'
|
||||
{{- end}}
|
||||
- name: SITE_URL
|
||||
value: '{{ ternary "https" "http" .Values.global.ORSecureAccess}}://{{ .Values.global.domainName }}:{{ ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess }}'
|
||||
value: '{{- include "openreplay.domainURL" . }}'
|
||||
- name: S3_HOST
|
||||
{{- if contains "minio" .Values.global.s3.endpoint }}
|
||||
value: 'https://{{ .Values.global.domainName }}:{{ .Values.global.ingress.controller.service.ports.https}}'
|
||||
{{- else}}
|
||||
value: '{{ .Values.global.s3.endpoint }}'
|
||||
{{- end}}
|
||||
value: '{{- include "openreplay.s3Endpoint" . }}'
|
||||
- name: S3_KEY
|
||||
{{- if .Values.global.s3.existingSecret }}
|
||||
valueFrom:
|
||||
|
|
|
|||
|
|
@ -5,6 +5,37 @@ Expand the name of the chart.
|
|||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/* Get domain name with/without port */}}
|
||||
{{- define "openreplay.domainURL" -}}
|
||||
{{- $scheme := ternary "https" "http" .Values.global.ORSecureAccess -}}
|
||||
{{- $internalPort := ternary .Values.global.ingress.controller.service.ports.https .Values.global.ingress.controller.service.ports.http .Values.global.ORSecureAccess -}}
|
||||
{{/* If you're running OR behind proxy
|
||||
ingress-nginx: &ingress-nginx
|
||||
externalProxyPorts:
|
||||
http: 80
|
||||
https: 443
|
||||
*/}}
|
||||
{{- $externalPort := $internalPort -}}
|
||||
{{- if .Values.global.ingress.externalProxyPorts }}
|
||||
{{- $externalPort = ternary .Values.global.ingress.externalProxyPorts.https .Values.global.ingress.externalProxyPorts.http .Values.global.ORSecureAccess -}}
|
||||
{{- end }}
|
||||
{{- $port := toString $externalPort -}}
|
||||
{{- if or (eq $port "80") (eq $port "443") -}}
|
||||
{{- printf "%s://%s" $scheme .Values.global.domainName -}}
|
||||
{{- else -}}
|
||||
{{- printf "%s://%s:%s" $scheme .Values.global.domainName $port -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
{{/* Get the S3 endpoint value */}}
|
||||
{{- define "openreplay.s3Endpoint" -}}
|
||||
{{- if contains "minio" .Values.global.s3.endpoint -}}
|
||||
{{- include "openreplay.domainURL" . -}}
|
||||
{{- else -}}
|
||||
{{- .Values.global.s3.endpoint -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
|
|
|
|||
|
|
@ -65,6 +65,10 @@ minio:
|
|||
secretKey: "changeMeMinioPassword"
|
||||
|
||||
ingress-nginx: &ingress-nginx
|
||||
# If you're using an external proxy in front of OpenReplay, update the proxy ports below.
|
||||
# externalProxyPorts:
|
||||
# http: 80
|
||||
# https: 443
|
||||
controller:
|
||||
ingressClassResource:
|
||||
# -- Name of the ingressClass
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue