diff --git a/api/chalicelib/core/integration_base.py b/api/chalicelib/core/integration_base.py
index 45e1891a1..f8edaad62 100644
--- a/api/chalicelib/core/integration_base.py
+++ b/api/chalicelib/core/integration_base.py
@@ -1,4 +1,5 @@
from abc import ABC, abstractmethod
+
from chalicelib.utils import pg_client, helper
@@ -37,7 +38,7 @@ class BaseIntegration(ABC):
pass
@abstractmethod
- def update(self, changes):
+ def update(self, changes, obfuscate=False):
pass
@abstractmethod
diff --git a/api/chalicelib/core/integration_github.py b/api/chalicelib/core/integration_github.py
index a13946e46..a05c946f4 100644
--- a/api/chalicelib/core/integration_github.py
+++ b/api/chalicelib/core/integration_github.py
@@ -1,6 +1,6 @@
-from chalicelib.utils import pg_client, helper
-from chalicelib.core.integration_github_issue import GithubIntegrationIssue
from chalicelib.core import integration_base
+from chalicelib.core.integration_github_issue import GithubIntegrationIssue
+from chalicelib.utils import pg_client, helper
PROVIDER = "GITHUB"
@@ -15,8 +15,6 @@ class GitHubIntegration(integration_base.BaseIntegration):
def provider(self):
return PROVIDER
-
-
def get_obfuscated(self):
integration = self.get()
if integration is None:
@@ -24,7 +22,7 @@ class GitHubIntegration(integration_base.BaseIntegration):
token = "*" * (len(integration["token"]) - 4) + integration["token"][-4:]
return {"token": token, "provider": self.provider.lower()}
- def update(self, changes):
+ def update(self, changes, obfuscate=False):
with pg_client.PostgresClient() as cur:
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()]
cur.execute(
@@ -71,8 +69,11 @@ class GitHubIntegration(integration_base.BaseIntegration):
if s is not None:
return self.update(
changes={
- "token": data["token"]
- }
+ "token": data["token"] \
+ if data.get("token") and len(data["token"]) > 0 and data["token"].find("***") == -1 \
+ else s["token"]
+ },
+ obfuscate=True
)
else:
return self.add(token=data["token"])
diff --git a/api/chalicelib/core/integration_jira_cloud.py b/api/chalicelib/core/integration_jira_cloud.py
index 65bad1e4a..ea9c6c24e 100644
--- a/api/chalicelib/core/integration_jira_cloud.py
+++ b/api/chalicelib/core/integration_jira_cloud.py
@@ -1,10 +1,14 @@
-from chalicelib.utils import pg_client, helper
-from chalicelib.core.integration_jira_cloud_issue import JIRACloudIntegrationIssue
from chalicelib.core import integration_base
+from chalicelib.core.integration_jira_cloud_issue import JIRACloudIntegrationIssue
+from chalicelib.utils import pg_client, helper
PROVIDER = "JIRA"
+def obfuscate_string(string):
+ return "*" * (len(string) - 4) + string[-4:]
+
+
class JIRAIntegration(integration_base.BaseIntegration):
def __init__(self, tenant_id, user_id):
self.__tenant_id = tenant_id
@@ -36,11 +40,11 @@ class JIRAIntegration(integration_base.BaseIntegration):
integration = self.get()
if integration is None:
return None
- integration["token"] = "*" * (len(integration["token"]) - 4) + integration["token"][-4:]
+ integration["token"] = obfuscate_string(integration["token"])
integration["provider"] = self.provider.lower()
return integration
- def update(self, changes):
+ def update(self, changes, obfuscate=False):
with pg_client.PostgresClient() as cur:
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()]
cur.execute(
@@ -53,6 +57,8 @@ class JIRAIntegration(integration_base.BaseIntegration):
**changes})
)
w = helper.dict_to_camel_case(cur.fetchone())
+ if obfuscate:
+ w["token"] = obfuscate_string(w["token"])
return w
# TODO: make this generic for all issue tracking integrations
@@ -89,9 +95,12 @@ class JIRAIntegration(integration_base.BaseIntegration):
return self.update(
changes={
"username": data["username"],
- "token": data["token"],
+ "token": data["token"] \
+ if data.get("token") and len(data["token"]) > 0 and data["token"].find("***") == -1 \
+ else s["token"],
"url": data["url"]
- }
+ },
+ obfuscate=True
)
else:
return self.add(
diff --git a/api/chalicelib/core/saved_search.py b/api/chalicelib/core/saved_search.py
index d1e8fe15f..1999c6758 100644
--- a/api/chalicelib/core/saved_search.py
+++ b/api/chalicelib/core/saved_search.py
@@ -49,16 +49,6 @@ def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema):
def get_all(project_id, user_id, details=False):
with pg_client.PostgresClient() as cur:
- print(cur.mogrify(
- f"""\
- SELECT search_id, project_id, user_id, name, created_at, deleted_at, is_public
- {",filter" if details else ""}
- FROM public.searches
- WHERE project_id = %(project_id)s
- AND deleted_at IS NULL
- AND (user_id = %(user_id)s OR is_public);""",
- {"project_id": project_id, "user_id": user_id}
- ))
cur.execute(
cur.mogrify(
f"""\
diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py
index 164cf366e..c095ce344 100644
--- a/api/chalicelib/core/sessions.py
+++ b/api/chalicelib/core/sessions.py
@@ -7,7 +7,6 @@ SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
--- s.user_agent,
s.user_os,
s.user_browser,
s.user_device,
@@ -30,10 +29,10 @@ COALESCE((SELECT TRUE
def __group_metadata(session, project_metadata):
- meta = []
+ meta = {}
for m in project_metadata.keys():
if project_metadata[m] is not None and session.get(m) is not None:
- meta.append({project_metadata[m]: session[m]})
+ meta[project_metadata[m]] = session[m]
session.pop(m)
return meta
@@ -162,12 +161,16 @@ def _isAny_opreator(op: schemas.SearchEventOperator):
return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any]
+def _isUndefined_operator(op: schemas.SearchEventOperator):
+ return op in [schemas.SearchEventOperator._is_undefined]
+
+
@dev.timed
def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, favorite_only=False, errors_only=False,
error_status="ALL", count_only=False, issue=None):
full_args, query_part, sort = search_query_parts(data, error_status, errors_only, favorite_only, issue, project_id,
user_id)
-
+ meta_keys = []
with pg_client.PostgresClient() as cur:
if errors_only:
main_query = cur.mogrify(f"""SELECT DISTINCT er.error_id, ser.status, ser.parent_error_id, ser.payload,
@@ -186,13 +189,16 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
COUNT(DISTINCT s.user_uuid) AS count_users
{query_part};""", full_args)
elif data.group_by_user:
- main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, jsonb_agg(users_sessions) FILTER ( WHERE rn <= 200 ) AS sessions
+ meta_keys = metadata.get(project_id=project_id)
+ main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS sessions
FROM (SELECT user_id,
count(full_sessions) AS user_sessions_count,
jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session,
+ MIN(full_sessions.start_ts) AS first_session_ts,
ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn
FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY start_ts DESC) AS rn
- FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
+ FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS},
+ {",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions
@@ -200,9 +206,11 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
ORDER BY user_sessions_count DESC) AS users_sessions;""",
full_args)
else:
+ meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn <= 200), '[]'::JSONB) AS sessions
FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY favorite DESC, issue_score DESC, session_id desc, start_ts desc) AS rn
- FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
+ FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS},
+ {",".join([f'metadata_{m["index"]}' for m in meta_keys])}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""",
@@ -237,6 +245,16 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
if errors_only:
return sessions
+ if data.group_by_user:
+ for i, s in enumerate(sessions):
+ sessions[i] = {**s.pop("last_session")[0], **s}
+ sessions[i].pop("rn")
+ sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \
+ if sessions[i][f'metadata_{k["index"]}'] is not None}
+ else:
+ for i, s in enumerate(sessions):
+ sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \
+ if sessions[i][f'metadata_{k["index"]}'] is not None}
if not data.group_by_user and data.sort is not None and data.sort != "session_id":
sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)],
reverse=data.order.upper() == "DESC")
@@ -250,7 +268,7 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
view_type: schemas.MetricViewType):
step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate,
- density=density, factor=1, decimal=True))
+ density=density, factor=1, decimal=True))
full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None)
@@ -310,7 +328,8 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
op = __get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.events_count] else f.operator
is_any = _isAny_opreator(f.operator)
- if not is_any and len(f.value) == 0:
+ is_undefined = _isUndefined_operator(f.operator)
+ if not is_any and not is_undefined and len(f.value) == 0:
continue
is_not = False
if __is_negation_operator(f.operator):
@@ -359,6 +378,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if is_any:
extra_constraints.append('s.utm_source IS NOT NULL')
ss_constraints.append('ms.utm_source IS NOT NULL')
+ elif is_undefined:
+ extra_constraints.append('s.utm_source IS NULL')
+ ss_constraints.append('ms.utm_source IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
@@ -370,6 +392,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if is_any:
extra_constraints.append('s.utm_medium IS NOT NULL')
ss_constraints.append('ms.utm_medium IS NOT NULL')
+ elif is_undefined:
+ extra_constraints.append('s.utm_medium IS NULL')
+ ss_constraints.append('ms.utm_medium IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
@@ -381,6 +406,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if is_any:
extra_constraints.append('s.utm_campaign IS NOT NULL')
ss_constraints.append('ms.utm_campaign IS NOT NULL')
+ elif is_undefined:
+ extra_constraints.append('s.utm_campaign IS NULL')
+ ss_constraints.append('ms.utm_campaign IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
@@ -414,6 +442,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if is_any:
extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL")
ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL")
+ elif is_undefined:
+ extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NULL")
+ ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NULL")
else:
extra_constraints.append(
_multiple_conditions(
@@ -427,6 +458,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if is_any:
extra_constraints.append('s.user_id IS NOT NULL')
ss_constraints.append('ms.user_id IS NOT NULL')
+ elif is_undefined:
+ extra_constraints.append('s.user_id IS NULL')
+ ss_constraints.append('ms.user_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
@@ -437,6 +471,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if is_any:
extra_constraints.append('s.user_anonymous_id IS NOT NULL')
ss_constraints.append('ms.user_anonymous_id IS NOT NULL')
+ elif is_undefined:
+ extra_constraints.append('s.user_anonymous_id IS NULL')
+ ss_constraints.append('ms.user_anonymous_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
@@ -448,6 +485,9 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if is_any:
extra_constraints.append('s.rev_id IS NOT NULL')
ss_constraints.append('ms.rev_id IS NOT NULL')
+ elif is_undefined:
+ extra_constraints.append('s.rev_id IS NULL')
+ ss_constraints.append('ms.rev_id IS NULL')
else:
extra_constraints.append(
_multiple_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
@@ -945,7 +985,6 @@ def get_favorite_sessions(project_id, user_id, include_viewed=False):
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
- -- s.user_agent,
s.user_os,
s.user_browser,
s.user_device,
@@ -982,7 +1021,6 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
- -- s.user_agent,
s.user_os,
s.user_browser,
s.user_device,
diff --git a/api/chalicelib/utils/jira_client.py b/api/chalicelib/utils/jira_client.py
index a7ab92932..d3b637373 100644
--- a/api/chalicelib/utils/jira_client.py
+++ b/api/chalicelib/utils/jira_client.py
@@ -1,8 +1,9 @@
-from jira import JIRA
-from jira.exceptions import JIRAError
import time
from datetime import datetime
+
import requests
+from jira import JIRA
+from jira.exceptions import JIRAError
from requests.auth import HTTPBasicAuth
fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels"
@@ -15,7 +16,11 @@ class JiraManager:
def __init__(self, url, username, password, project_id=None):
self._config = {"JIRA_PROJECT_ID": project_id, "JIRA_URL": url, "JIRA_USERNAME": username,
"JIRA_PASSWORD": password}
- self._jira = JIRA({'server': url}, basic_auth=(username, password), logging=True)
+ try:
+ self._jira = JIRA({'server': url}, basic_auth=(username, password), logging=True, max_retries=1)
+ except Exception as e:
+ print("!!! JIRA AUTH ERROR")
+ print(e)
def set_jira_project_id(self, project_id):
self._config["JIRA_PROJECT_ID"] = project_id
diff --git a/api/requirements.txt b/api/requirements.txt
index 0a239790c..4af962f4f 100644
--- a/api/requirements.txt
+++ b/api/requirements.txt
@@ -8,8 +8,8 @@ jira==2.0.0
-fastapi==0.70.1
-uvicorn[standard]==0.16.0
-python-decouple==3.5
+fastapi==0.74.1
+uvicorn[standard]==0.17.5
+python-decouple==3.6
pydantic[email]==1.8.2
apscheduler==3.8.1
\ No newline at end of file
diff --git a/api/routers/core.py b/api/routers/core.py
index 669294232..9af9fec63 100644
--- a/api/routers/core.py
+++ b/api/routers/core.py
@@ -29,7 +29,9 @@ def get_favorite_sessions(projectId: int, context: schemas.CurrentContext = Depe
@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"])
-def get_session2(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
+def get_session2(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)):
+ if isinstance(sessionId, str):
+ return {"errors": ["session not found"]}
data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id,
include_fav_viewed=True, group_metadata=True)
if data is None:
diff --git a/api/schemas.py b/api/schemas.py
index 56eb24cf7..400dd94cc 100644
--- a/api/schemas.py
+++ b/api/schemas.py
@@ -1,7 +1,7 @@
from enum import Enum
from typing import Optional, List, Union, Literal
-from pydantic import BaseModel, Field, EmailStr, HttpUrl, root_validator
+from pydantic import BaseModel, Field, EmailStr, HttpUrl, root_validator, validator
from chalicelib.utils.TimeUTC import TimeUTC
@@ -107,7 +107,11 @@ class JiraGithubSchema(BaseModel):
provider: str = Field(...)
username: str = Field(...)
token: str = Field(...)
- url: str = Field(...)
+ url: HttpUrl = Field(...)
+
+ @validator('url')
+ def transform_url(cls, v: HttpUrl):
+ return HttpUrl.build(scheme=v.scheme, host=v.host)
class CreateEditWebhookSchema(BaseModel):
@@ -435,6 +439,7 @@ class SearchEventOperator(str, Enum):
_on = "on"
_on_any = "onAny"
_is_not = "isNot"
+ _is_undefined = "isUndefined"
_not_on = "notOn"
_contains = "contains"
_not_contains = "notContains"
diff --git a/backend/pkg/db/cache/messages_common.go b/backend/pkg/db/cache/messages_common.go
index 65c8bf4e1..3983982fe 100644
--- a/backend/pkg/db/cache/messages_common.go
+++ b/backend/pkg/db/cache/messages_common.go
@@ -65,7 +65,8 @@ func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error {
keyNo := project.GetMetadataNo(metadata.Key)
if keyNo == 0 {
- // insert project metadata
+ // TODO: insert project metadata
+ return nil
}
if err := c.Conn.InsertMetadata(sessionID, keyNo, metadata.Value); err != nil {
return err
diff --git a/backend/pkg/db/cache/messages_web.go b/backend/pkg/db/cache/messages_web.go
index 21b3ac866..4aa4dfa7b 100644
--- a/backend/pkg/db/cache/messages_web.go
+++ b/backend/pkg/db/cache/messages_web.go
@@ -29,7 +29,7 @@ func (c *PGCache) InsertWebSessionStart(sessionID uint64, s *SessionStart) error
UserDeviceType: s.UserDeviceType,
UserDeviceMemorySize: s.UserDeviceMemorySize,
UserDeviceHeapSize: s.UserDeviceHeapSize,
- UserID: &s.UserID,
+ UserID: &s.UserID,
}
if err := c.Conn.InsertSessionStart(sessionID, c.sessions[sessionID]); err != nil {
c.sessions[sessionID] = nil
diff --git a/backend/pkg/db/postgres/connector.go b/backend/pkg/db/postgres/connector.go
index cfa8f28f8..9e4e82633 100644
--- a/backend/pkg/db/postgres/connector.go
+++ b/backend/pkg/db/postgres/connector.go
@@ -15,7 +15,8 @@ func getTimeoutContext() context.Context {
}
type Conn struct {
- c *pgxpool.Pool // TODO: conditional usage of Pool/Conn (use interface?)
+ c *pgxpool.Pool // TODO: conditional usage of Pool/Conn (use interface?)
+ batches map[uint64]*pgx.Batch
}
func NewConn(url string) *Conn {
@@ -24,7 +25,8 @@ func NewConn(url string) *Conn {
log.Println(err)
log.Fatalln("pgxpool.Connect Error")
}
- return &Conn{c}
+ batches := make(map[uint64]*pgx.Batch)
+ return &Conn{c, batches}
}
func (conn *Conn) Close() error {
@@ -32,6 +34,31 @@ func (conn *Conn) Close() error {
return nil
}
+func (conn *Conn) batchQueue(sessionID uint64, sql string, args ...interface{}) error {
+ batch, ok := conn.batches[sessionID]
+ if !ok {
+ conn.batches[sessionID] = &pgx.Batch{}
+ batch = conn.batches[sessionID]
+ }
+ batch.Queue(sql, args...)
+ return nil
+}
+
+func (conn *Conn) CommitBatches() {
+ for _, b := range conn.batches {
+ br := conn.c.SendBatch(getTimeoutContext(), b)
+ l := b.Len()
+ for i := 0; i < l; i++ {
+ if ct, err := br.Exec(); err != nil {
+ // TODO: ct info
+ log.Printf("Error in PG batch (command tag %v): %v \n", ct.String(), err)
+ }
+ }
+ br.Close() // returns err
+ }
+ conn.batches = make(map[uint64]*pgx.Batch)
+}
+
func (conn *Conn) query(sql string, args ...interface{}) (pgx.Rows, error) {
return conn.c.Query(getTimeoutContext(), sql, args...)
}
@@ -56,7 +83,7 @@ func (conn *Conn) begin() (_Tx, error) {
func (tx _Tx) exec(sql string, args ...interface{}) error {
_, err := tx.Exec(context.Background(), sql, args...)
- return err;
+ return err
}
func (tx _Tx) rollback() error {
@@ -66,5 +93,3 @@ func (tx _Tx) rollback() error {
func (tx _Tx) commit() error {
return tx.Commit(context.Background())
}
-
-
diff --git a/backend/pkg/db/postgres/messages_common.go b/backend/pkg/db/postgres/messages_common.go
index df539e05c..1f4f781ce 100644
--- a/backend/pkg/db/postgres/messages_common.go
+++ b/backend/pkg/db/postgres/messages_common.go
@@ -1,13 +1,13 @@
package postgres
import (
+ "fmt"
"log"
"strings"
- "fmt"
+ "openreplay/backend/pkg/db/types"
"openreplay/backend/pkg/hashid"
"openreplay/backend/pkg/messages"
- "openreplay/backend/pkg/db/types"
)
func getAutocompleteType(baseType string, platform string) string {
@@ -22,7 +22,7 @@ func (conn *Conn) insertAutocompleteValue(sessionID uint64, tp string, value str
if len(value) == 0 {
return
}
- if err := conn.exec(`
+ if err := conn.batchQueue(sessionID, `
INSERT INTO autocomplete (
value,
type,
@@ -31,7 +31,7 @@ func (conn *Conn) insertAutocompleteValue(sessionID uint64, tp string, value str
$1, $2, project_id
FROM sessions
WHERE session_id = $3
- ) ON CONFLICT DO NOTHING`,
+ ) ON CONFLICT DO NOTHING`,
value, tp, sessionID,
); err != nil {
log.Printf("Insert autocomplete error: %v", err)
@@ -59,16 +59,16 @@ func (conn *Conn) InsertSessionStart(sessionID uint64, s *types.Session) error {
NULLIF($14, ''), NULLIF($15, ''), NULLIF($16, ''), NULLIF($17, 0), NULLIF($18, 0::bigint),
NULLIF($19, '')
)`,
- sessionID, s.ProjectID, s.Timestamp,
+ sessionID, s.ProjectID, s.Timestamp,
s.UserUUID, s.UserDevice, s.UserDeviceType, s.UserCountry,
s.UserOS, s.UserOSVersion,
- s.RevID,
+ s.RevID,
s.TrackerVersion, s.Timestamp/1000,
s.Platform,
s.UserAgent, s.UserBrowser, s.UserBrowserVersion, s.UserDeviceMemorySize, s.UserDeviceHeapSize,
s.UserID,
); err != nil {
- return err;
+ return err
}
conn.insertAutocompleteValue(sessionID, getAutocompleteType("USEROS", s.Platform), s.UserOS)
conn.insertAutocompleteValue(sessionID, getAutocompleteType("USERDEVICE", s.Platform), s.UserDevice)
@@ -81,7 +81,7 @@ func (conn *Conn) InsertSessionStart(sessionID uint64, s *types.Session) error {
func (conn *Conn) InsertSessionEnd(sessionID uint64, timestamp uint64) (uint64, error) {
// Search acceleration
- if err := conn.exec(`
+ if err := conn.batchQueue(sessionID, `
UPDATE sessions
SET issue_types=(SELECT
CASE WHEN errors_count > 0 THEN
@@ -96,7 +96,7 @@ func (conn *Conn) InsertSessionEnd(sessionID uint64, timestamp uint64) (uint64,
`,
sessionID,
); err != nil {
- log.Printf("Error while updating issue_types %v", sessionID)
+ log.Printf("Error while updating issue_types: %v. SessionID: %v", err, sessionID)
}
var dur uint64
@@ -113,33 +113,33 @@ func (conn *Conn) InsertSessionEnd(sessionID uint64, timestamp uint64) (uint64,
}
func (conn *Conn) InsertRequest(sessionID uint64, timestamp uint64, index uint64, url string, duration uint64, success bool) error {
- return conn.exec(`
+ return conn.batchQueue(sessionID, `
INSERT INTO events_common.requests (
session_id, timestamp, seq_index, url, duration, success
) VALUES (
$1, $2, $3, $4, $5, $6
)`,
- sessionID, timestamp,
+ sessionID, timestamp,
getSqIdx(index),
url, duration, success,
)
}
func (conn *Conn) InsertCustomEvent(sessionID uint64, timestamp uint64, index uint64, name string, payload string) error {
- return conn.exec(`
+ return conn.batchQueue(sessionID, `
INSERT INTO events_common.customs (
session_id, timestamp, seq_index, name, payload
) VALUES (
$1, $2, $3, $4, $5
)`,
- sessionID, timestamp,
- getSqIdx(index),
+ sessionID, timestamp,
+ getSqIdx(index),
name, payload,
)
}
func (conn *Conn) InsertUserID(sessionID uint64, userID string) error {
- return conn.exec(`
+ return conn.batchQueue(sessionID, `
UPDATE sessions SET user_id = $1
WHERE session_id = $2`,
userID, sessionID,
@@ -147,16 +147,15 @@ func (conn *Conn) InsertUserID(sessionID uint64, userID string) error {
}
func (conn *Conn) InsertUserAnonymousID(sessionID uint64, userAnonymousID string) error {
- return conn.exec(`
+ return conn.batchQueue(sessionID, `
UPDATE sessions SET user_anonymous_id = $1
WHERE session_id = $2`,
userAnonymousID, sessionID,
)
}
-
func (conn *Conn) InsertMetadata(sessionID uint64, keyNo uint, value string) error {
- return conn.exec(fmt.Sprintf(`
+ return conn.batchQueue(sessionID, fmt.Sprintf(`
UPDATE sessions SET metadata_%v = $1
WHERE session_id = $2`, keyNo),
value, sessionID,
@@ -173,11 +172,11 @@ func (conn *Conn) InsertIssueEvent(sessionID uint64, projectID uint32, e *messag
issueID := hashid.IssueID(projectID, e)
// TEMP. TODO: nullable & json message field type
- payload := &e.Payload;
+ payload := &e.Payload
if *payload == "" || *payload == "{}" {
payload = nil
}
- context := &e.Context;
+ context := &e.Context
if *context == "" || *context == "{}" {
context = nil
}
@@ -189,7 +188,7 @@ func (conn *Conn) InsertIssueEvent(sessionID uint64, projectID uint32, e *messag
project_id, $2, $3, $4, CAST($5 AS jsonb)
FROM sessions
WHERE session_id = $1
- )ON CONFLICT DO NOTHING`,
+ )ON CONFLICT DO NOTHING`,
sessionID, issueID, e.Type, e.ContextString, context,
); err != nil {
return err
@@ -199,8 +198,8 @@ func (conn *Conn) InsertIssueEvent(sessionID uint64, projectID uint32, e *messag
session_id, issue_id, timestamp, seq_index, payload
) VALUES (
$1, $2, $3, $4, CAST($5 AS jsonb)
- )`,
- sessionID, issueID, e.Timestamp,
+ )`,
+ sessionID, issueID, e.Timestamp,
getSqIdx(e.MessageID),
payload,
); err != nil {
@@ -228,5 +227,3 @@ func (conn *Conn) InsertIssueEvent(sessionID uint64, projectID uint32, e *messag
}
return tx.commit()
}
-
-
diff --git a/backend/pkg/db/postgres/messages_web.go b/backend/pkg/db/postgres/messages_web.go
index f7354252b..64d7ba561 100644
--- a/backend/pkg/db/postgres/messages_web.go
+++ b/backend/pkg/db/postgres/messages_web.go
@@ -68,16 +68,19 @@ func (conn *Conn) InsertWebPageEvent(sessionID uint64, e *PageEvent) error {
if err := tx.exec(`
INSERT INTO events.pages (
session_id, message_id, timestamp, referrer, base_referrer, host, path, base_path,
- dom_content_loaded_time, load_time, response_end, first_paint_time, first_contentful_paint_time, speed_index, visually_complete, time_to_interactive,
+ dom_content_loaded_time, load_time, response_end, first_paint_time, first_contentful_paint_time,
+ speed_index, visually_complete, time_to_interactive,
response_time, dom_building_time
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8,
- NULLIF($9, 0), NULLIF($10, 0), NULLIF($11, 0), NULLIF($12, 0), NULLIF($13, 0), NULLIF($14, 0), NULLIF($15, 0), NULLIF($16, 0),
+ NULLIF($9, 0), NULLIF($10, 0), NULLIF($11, 0), NULLIF($12, 0), NULLIF($13, 0),
+ NULLIF($14, 0), NULLIF($15, 0), NULLIF($16, 0),
NULLIF($17, 0), NULLIF($18, 0)
)
`,
sessionID, e.MessageID, e.Timestamp, e.Referrer, url.DiscardURLQuery(e.Referrer), host, path, url.DiscardURLQuery(path),
- e.DomContentLoadedEventEnd, e.LoadEventEnd, e.ResponseEnd, e.FirstPaint, e.FirstContentfulPaint, e.SpeedIndex, e.VisuallyComplete, e.TimeToInteractive,
+ e.DomContentLoadedEventEnd, e.LoadEventEnd, e.ResponseEnd, e.FirstPaint, e.FirstContentfulPaint,
+ e.SpeedIndex, e.VisuallyComplete, e.TimeToInteractive,
calcResponseTime(e), calcDomBuildingTime(e),
); err != nil {
return err
diff --git a/backend/pkg/db/postgres/messages_web_stats.go b/backend/pkg/db/postgres/messages_web_stats.go
index 9e3b5bc77..933442b0b 100644
--- a/backend/pkg/db/postgres/messages_web_stats.go
+++ b/backend/pkg/db/postgres/messages_web_stats.go
@@ -1,21 +1,18 @@
-package postgres
+package postgres
import (
- "openreplay/backend/pkg/url"
. "openreplay/backend/pkg/messages"
+ "openreplay/backend/pkg/url"
)
-
-
func (conn *Conn) InsertWebStatsLongtask(sessionID uint64, l *LongTask) error {
- return nil // Do we even use them?
- // conn.exec(``);
+ return nil // Do we even use them?
+ // conn.exec(``);
}
-
func (conn *Conn) InsertWebStatsPerformance(sessionID uint64, p *PerformanceTrackAggr) error {
- timestamp := (p.TimestampEnd + p.TimestampStart) /2
- return conn.exec(`
+ timestamp := (p.TimestampEnd + p.TimestampStart) / 2
+ return conn.batchQueue(sessionID, `
INSERT INTO events.performance (
session_id, timestamp, message_id,
min_fps, avg_fps, max_fps,
@@ -34,7 +31,7 @@ func (conn *Conn) InsertWebStatsPerformance(sessionID uint64, p *PerformanceTrac
p.MinCPU, p.AvgCPU, p.MinCPU,
p.MinTotalJSHeapSize, p.AvgTotalJSHeapSize, p.MaxTotalJSHeapSize,
p.MinUsedJSHeapSize, p.AvgUsedJSHeapSize, p.MaxUsedJSHeapSize,
- );
+ )
}
func (conn *Conn) InsertWebStatsResourceEvent(sessionID uint64, e *ResourceEvent) error {
@@ -42,7 +39,7 @@ func (conn *Conn) InsertWebStatsResourceEvent(sessionID uint64, e *ResourceEvent
if err != nil {
return err
}
- return conn.exec(`
+ return conn.batchQueue(sessionID, `
INSERT INTO events.resources (
session_id, timestamp, message_id,
type,
@@ -58,11 +55,11 @@ func (conn *Conn) InsertWebStatsResourceEvent(sessionID uint64, e *ResourceEvent
NULLIF($10, '')::events.resource_method,
NULLIF($11, 0), NULLIF($12, 0), NULLIF($13, 0), NULLIF($14, 0), NULLIF($15, 0)
)`,
- sessionID, e.Timestamp, e.MessageID,
+ sessionID, e.Timestamp, e.MessageID,
e.Type,
e.URL, host, url.DiscardURLQuery(e.URL),
- e.Success, e.Status,
+ e.Success, e.Status,
url.EnsureMethod(e.Method),
e.Duration, e.TTFB, e.HeaderSize, e.EncodedBodySize, e.DecodedBodySize,
)
-}
\ No newline at end of file
+}
diff --git a/backend/services/db/main.go b/backend/services/db/main.go
index b60f7e7db..d6190a4f0 100644
--- a/backend/services/db/main.go
+++ b/backend/services/db/main.go
@@ -11,11 +11,11 @@ import (
"openreplay/backend/pkg/db/cache"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/env"
+ logger "openreplay/backend/pkg/log"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/queue/types"
"openreplay/backend/services/db/heuristics"
- logger "openreplay/backend/pkg/log"
)
var pg *cache.PGCache
@@ -29,7 +29,6 @@ func main() {
heurFinder := heuristics.NewHandler()
-
statsLogger := logger.NewQueueStats(env.Int("LOG_QUEUE_STATS_INTERVAL_SEC"))
consumer := queue.NewMessageConsumer(
@@ -91,6 +90,7 @@ func main() {
consumer.Close()
os.Exit(0)
case <-tick:
+ pg.CommitBatches()
if err := commitStats(); err != nil {
log.Printf("Error on stats commit: %v", err)
}
diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py
index 2d4effc0e..c8c1c7669 100644
--- a/ee/api/chalicelib/core/users.py
+++ b/ee/api/chalicelib/core/users.py
@@ -21,7 +21,10 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal
query = cur.mogrify(f"""\
WITH u AS (
INSERT INTO public.users (tenant_id, email, role, name, data, role_id)
- VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(role_id)s)
+ VALUES (%(tenant_id)s, %(email)s, %(role)s, %(name)s, %(data)s,
+ (SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s),
+ (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1),
+ (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1))))
RETURNING tenant_id,user_id,email,role,name,appearance, role_id
),
au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at)
@@ -42,8 +45,8 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal
roles.name AS role_name,
roles.permissions,
TRUE AS has_password
- FROM au,u LEFT JOIN roles USING(tenant_id) WHERE roles.role_id IS NULL OR roles.role_id = %(role_id)s;""",
- {"tenantId": tenant_id, "email": email,
+ FROM au,u LEFT JOIN roles USING(tenant_id) WHERE roles.role_id IS NULL OR roles.role_id = (SELECT u.role_id FROM u);""",
+ {"tenant_id": tenant_id, "email": email,
"role": "owner" if owner else "admin" if admin else "member", "name": name,
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}),
"invitation_token": invitation_token, "role_id": role_id})
@@ -63,7 +66,9 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own
created_at = timezone('utc'::text, now()),
tenant_id= %(tenant_id)s,
api_key= generate_api_key(20),
- role_id= %(role_id)s
+ role_id= (SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s),
+ (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1),
+ (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1)))
WHERE user_id=%(user_id)s
RETURNING user_id AS id,
email,
@@ -145,6 +150,10 @@ def update(tenant_id, user_id, changes):
if key == "appearance":
sub_query_users.append(f"appearance = %(appearance)s::jsonb")
changes["appearance"] = json.dumps(changes[key])
+ elif helper.key_to_snake_case(key) == "role_id":
+ sub_query_users.append("""role_id=(SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s),
+ (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1),
+ (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1)))""")
else:
sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s")
@@ -280,11 +289,11 @@ def get(user_id, tenant_id):
LEFT JOIN public.roles USING (role_id)
WHERE
users.user_id = %(userId)s
- AND users.tenant_id = %(tenantId)s
+ AND users.tenant_id = %(tenant_id)s
AND users.deleted_at IS NULL
- AND (roles.role_id IS NULL OR roles.deleted_at IS NULL AND roles.tenant_id = %(tenantId)s)
+ AND (roles.role_id IS NULL OR roles.deleted_at IS NULL AND roles.tenant_id = %(tenant_id)s)
LIMIT 1;""",
- {"userId": user_id, "tenantId": tenant_id})
+ {"userId": user_id, "tenant_id": tenant_id})
)
r = cur.fetchone()
return helper.dict_to_camel_case(r, ignore_keys=["appearance"])
@@ -418,9 +427,9 @@ def get_members(tenant_id):
FROM public.users
LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
LEFT JOIN public.roles USING (role_id)
- WHERE users.tenant_id = %(tenantId)s AND users.deleted_at IS NULL
+ WHERE users.tenant_id = %(tenant_id)s AND users.deleted_at IS NULL
ORDER BY name, id""",
- {"tenantId": tenant_id})
+ {"tenant_id": tenant_id})
)
r = cur.fetchall()
if len(r):
@@ -534,8 +543,8 @@ def count_members(tenant_id):
cur.mogrify(
"""SELECT
COUNT(user_id)
- FROM public.users WHERE tenant_id = %(tenantId)s AND deleted_at IS NULL;""",
- {"tenantId": tenant_id})
+ FROM public.users WHERE tenant_id = %(tenant_id)s AND deleted_at IS NULL;""",
+ {"tenant_id": tenant_id})
)
r = cur.fetchone()
return r["count"]
@@ -598,8 +607,8 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
- f"SELECT user_id AS id,jwt_iat, changed_at FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE user_id = %(userId)s AND tenant_id = %(tenantId)s AND deleted_at IS NULL LIMIT 1;",
- {"userId": user_id, "tenantId": tenant_id})
+ f"SELECT user_id AS id,jwt_iat, changed_at FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE user_id = %(userId)s AND tenant_id = %(tenant_id)s AND deleted_at IS NULL LIMIT 1;",
+ {"userId": user_id, "tenant_id": tenant_id})
)
r = cur.fetchone()
return r is not None \
@@ -716,7 +725,10 @@ def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id=
query = cur.mogrify(f"""\
WITH u AS (
INSERT INTO public.users (tenant_id, email, role, name, data, origin, internal_id, role_id)
- VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(origin)s, %(internal_id)s, %(role_id)s)
+ VALUES (%(tenant_id)s, %(email)s, %(role)s, %(name)s, %(data)s, %(origin)s, %(internal_id)s,
+ (SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s),
+ (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1),
+ (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1))))
RETURNING *
),
au AS (
@@ -734,7 +746,7 @@ def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id=
u.appearance,
origin
FROM u;""",
- {"tenantId": tenant_id, "email": email, "internal_id": internal_id,
+ {"tenant_id": tenant_id, "email": email, "internal_id": internal_id,
"role": "admin" if admin else "member", "name": name, "origin": origin,
"role_id": role_id, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()})})
cur.execute(
@@ -748,13 +760,15 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in
query = cur.mogrify(f"""\
WITH u AS (
UPDATE public.users
- SET tenant_id= %(tenantId)s,
+ SET tenant_id= %(tenant_id)s,
role= %(role)s,
name= %(name)s,
data= %(data)s,
origin= %(origin)s,
internal_id= %(internal_id)s,
- role_id= %(role_id)s,
+ role_id= (SELECT COALESCE((SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND role_id = %(role_id)s),
+ (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name = 'Member' LIMIT 1),
+ (SELECT role_id FROM roles WHERE tenant_id = %(tenant_id)s AND name != 'Owner' LIMIT 1))),
deleted_at= NULL,
created_at= default,
api_key= default,
@@ -787,7 +801,7 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in
u.appearance,
origin
FROM u;""",
- {"tenantId": tenant_id, "email": email, "internal_id": internal_id,
+ {"tenant_id": tenant_id, "email": email, "internal_id": internal_id,
"role": "admin" if admin else "member", "name": name, "origin": origin,
"role_id": role_id, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}),
"user_id": user_id})
diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt
index 82329a1f2..84a372567 100644
--- a/ee/api/requirements.txt
+++ b/ee/api/requirements.txt
@@ -8,9 +8,9 @@ jira==2.0.0
clickhouse-driver==0.2.2
python3-saml==1.12.0
-fastapi==0.70.1
+fastapi==0.74.1
python-multipart==0.0.5
-uvicorn[standard]==0.16.0
-python-decouple==3.5
+uvicorn[standard]==0.17.5
+python-decouple==3.6
pydantic[email]==1.8.2
apscheduler==3.8.1
\ No newline at end of file
diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.5.1/1.5.1.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.5.1/1.5.1.sql
new file mode 100644
index 000000000..48a326e9c
--- /dev/null
+++ b/ee/scripts/helm/db/init_dbs/postgresql/1.5.1/1.5.1.sql
@@ -0,0 +1,8 @@
+BEGIN;
+CREATE OR REPLACE FUNCTION openreplay_version()
+ RETURNS text AS
+$$
+SELECT 'v1.5.1-ee'
+$$ LANGUAGE sql IMMUTABLE;
+
+COMMIT;
\ No newline at end of file
diff --git a/frontend/app/Router.js b/frontend/app/Router.js
index c3a1721a6..0c0e7433a 100644
--- a/frontend/app/Router.js
+++ b/frontend/app/Router.js
@@ -11,6 +11,7 @@ import UpdatePassword from 'Components/UpdatePassword/UpdatePassword';
import ClientPure from 'Components/Client/Client';
import OnboardingPure from 'Components/Onboarding/Onboarding';
import SessionPure from 'Components/Session/Session';
+import AssistPure from 'Components/Assist';
import BugFinderPure from 'Components/BugFinder/BugFinder';
import DashboardPure from 'Components/Dashboard/Dashboard';
import ErrorsPure from 'Components/Errors/Errors';
@@ -18,6 +19,7 @@ import Header from 'Components/Header/Header';
// import ResultsModal from 'Shared/Results/ResultsModal';
import FunnelDetails from 'Components/Funnels/FunnelDetails';
import FunnelIssueDetails from 'Components/Funnels/FunnelIssueDetails';
+import { fetchList as fetchIntegrationVariables } from 'Duck/customField';
import APIClient from './api_client';
import * as routes from './routes';
@@ -29,6 +31,7 @@ import { setSessionPath } from 'Duck/sessions';
const BugFinder = withSiteIdUpdater(BugFinderPure);
const Dashboard = withSiteIdUpdater(DashboardPure);
const Session = withSiteIdUpdater(SessionPure);
+const Assist = withSiteIdUpdater(AssistPure);
const Client = withSiteIdUpdater(ClientPure);
const Onboarding = withSiteIdUpdater(OnboardingPure);
const Errors = withSiteIdUpdater(ErrorsPure);
@@ -39,6 +42,7 @@ const withObTab = routes.withObTab;
const DASHBOARD_PATH = routes.dashboard();
const SESSIONS_PATH = routes.sessions();
+const ASSIST_PATH = routes.assist();
const ERRORS_PATH = routes.errors();
const ERROR_PATH = routes.error();
const FUNNEL_PATH = routes.funnel();
@@ -74,7 +78,7 @@ const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB);
onboarding: state.getIn([ 'user', 'onboarding' ])
};
}, {
- fetchUserInfo, fetchTenants, setSessionPath
+ fetchUserInfo, fetchTenants, setSessionPath, fetchIntegrationVariables
})
class Router extends React.Component {
state = {
@@ -83,7 +87,11 @@ class Router extends React.Component {
constructor(props) {
super(props);
if (props.isLoggedIn) {
- Promise.all([props.fetchUserInfo()])
+ Promise.all([
+ props.fetchUserInfo().then(() => {
+ props.fetchIntegrationVariables()
+ }),
+ ])
// .then(() => this.onLoginLogout());
}
props.fetchTenants();
@@ -145,6 +153,7 @@ class Router extends React.Component {
}
+
diff --git a/frontend/app/components/Assist/Assist.tsx b/frontend/app/components/Assist/Assist.tsx
index 74f2095f8..77730f7b1 100644
--- a/frontend/app/components/Assist/Assist.tsx
+++ b/frontend/app/components/Assist/Assist.tsx
@@ -1,11 +1,25 @@
import React from 'react';
-import ChatWindow from './ChatWindow';
+import LiveSessionList from 'Shared/LiveSessionList';
+import LiveSessionSearch from 'Shared/LiveSessionSearch';
+import cn from 'classnames'
+import withPageTitle from 'HOCs/withPageTitle';
+import withPermissions from 'HOCs/withPermissions'
-
-export default function Assist() {
+// @withPageTitle("Assist - OpenReplay")
+function Assist() {
return (
-
- {/*
*/}
+
)
}
+
+export default withPageTitle("Assist - OpenReplay")(withPermissions(['ASSIST_LIVE', 'SESSION_REPLAY'])(Assist));
diff --git a/frontend/app/components/Assist/ChatControls/ChatControls.css b/frontend/app/components/Assist/ChatControls/ChatControls.css
index 7ec77f758..b5a03ed10 100644
--- a/frontend/app/components/Assist/ChatControls/ChatControls.css
+++ b/frontend/app/components/Assist/ChatControls/ChatControls.css
@@ -15,7 +15,7 @@
&.disabled {
/* background-color: red; */
& svg {
- fill: red;
+ fill: $red;
}
}
}
diff --git a/frontend/app/components/Assist/ChatControls/ChatControls.tsx b/frontend/app/components/Assist/ChatControls/ChatControls.tsx
index 61803bc2f..fd0430282 100644
--- a/frontend/app/components/Assist/ChatControls/ChatControls.tsx
+++ b/frontend/app/components/Assist/ChatControls/ChatControls.tsx
@@ -28,17 +28,17 @@ function ChatControls({ stream, endCall, videoEnabled, setVideoEnabled } : Props
return (
-
-