main branch pull and resolved conflicts
This commit is contained in:
commit
af03b9929a
45 changed files with 441 additions and 312 deletions
13
LICENSE
13
LICENSE
|
|
@ -1,12 +1,21 @@
|
|||
Copyright (c) 2022 Asayer, Inc.
|
||||
|
||||
OpenReplay monorepo uses multiple licenses. Portions of this software are licensed as follows:
|
||||
|
||||
- All content that resides under the "ee/" directory of this repository, is licensed under the license defined in "ee/LICENSE".
|
||||
- Content outside of the above mentioned directories or restrictions above is available under the "Elastic License 2.0 (ELv2)" license as defined below.
|
||||
- Some directories have a specific LICENSE file and are licensed under the "MIT" license, as defined below.
|
||||
- Content outside of the above mentioned directories or restrictions defaults to the "Elastic License 2.0 (ELv2)" license, as defined below.
|
||||
|
||||
Reach out (license@openreplay.com) if you have any questions regarding licenses.
|
||||
|
||||
------------------------------------------------------------------------------------
|
||||
MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
------------------------------------------------------------------------------------
|
||||
Elastic License 2.0 (ELv2)
|
||||
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ Check out our [roadmap](https://www.notion.so/openreplay/Roadmap-889d2c3d968b478
|
|||
|
||||
## License
|
||||
|
||||
This repo is under the Elastic License 2.0 (ELv2), with the exception of the `ee` directory.
|
||||
This monorepo uses several licenses. See [LICENSE](/LICENSE) for more details.
|
||||
|
||||
## Contributors
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ RUN cd /work_tmp && npm install
|
|||
|
||||
WORKDIR /work
|
||||
COPY . .
|
||||
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/.
|
||||
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. && chmod 644 /mappings.wasm
|
||||
|
||||
RUN adduser -u 1001 openreplay -D
|
||||
USER 1001
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
|
|||
RUN apk add --no-cache build-base tini
|
||||
ARG envarg
|
||||
ENV APP_NAME=alerts \
|
||||
pg_minconn=2 \
|
||||
pg_minconn=1 \
|
||||
pg_maxconn=10 \
|
||||
ENTERPRISE_BUILD=${envarg}
|
||||
|
||||
|
|
|
|||
|
|
@ -138,7 +138,10 @@ def send_by_email(notification, destination):
|
|||
|
||||
|
||||
def send_by_email_batch(notifications_list):
|
||||
if not helper.has_smtp():
|
||||
logging.info("no SMTP configuration for email notifications")
|
||||
if notifications_list is None or len(notifications_list) == 0:
|
||||
logging.info("no email notifications")
|
||||
return
|
||||
for n in notifications_list:
|
||||
send_by_email(notification=n.get("notification"), destination=n.get("destination"))
|
||||
|
|
|
|||
|
|
@ -43,16 +43,24 @@ def __create(tenant_id, name):
|
|||
|
||||
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(f"""\
|
||||
SELECT
|
||||
s.project_id, s.name, s.project_key, s.save_request_payloads
|
||||
{',s.gdpr' if gdpr else ''}
|
||||
{',COALESCE((SELECT TRUE FROM public.sessions WHERE sessions.project_id = s.project_id LIMIT 1), FALSE) AS recorded' if recorded else ''}
|
||||
{',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''}
|
||||
FROM public.projects AS s
|
||||
{'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''}
|
||||
WHERE s.deleted_at IS NULL
|
||||
ORDER BY s.project_id;""")
|
||||
recorded_q = ""
|
||||
if recorded:
|
||||
recorded_q = """, COALESCE((SELECT TRUE
|
||||
FROM public.sessions
|
||||
WHERE sessions.project_id = s.project_id
|
||||
AND sessions.start_ts >= (EXTRACT(EPOCH FROM s.created_at) * 1000 - 24 * 60 * 60 * 1000)
|
||||
AND sessions.start_ts <= %(now)s
|
||||
LIMIT 1), FALSE) AS recorded"""
|
||||
query = cur.mogrify(f"""SELECT
|
||||
s.project_id, s.name, s.project_key, s.save_request_payloads
|
||||
{',s.gdpr' if gdpr else ''}
|
||||
{recorded_q}
|
||||
{',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''}
|
||||
FROM public.projects AS s
|
||||
{'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''}
|
||||
WHERE s.deleted_at IS NULL
|
||||
ORDER BY s.project_id;""", {"now": TimeUTC.now()})
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
if recording_state:
|
||||
project_ids = [f'({r["project_id"]})' for r in rows]
|
||||
|
|
|
|||
|
|
@ -712,13 +712,13 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
|
|||
event.value, value_key=e_k))
|
||||
elif event_type == events.event_type.ERROR.ui_type:
|
||||
event_from = event_from % f"{events.event_type.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
|
||||
event.source = tuple(event.source)
|
||||
event.source = list(set(event.source))
|
||||
if not is_any and event.value not in [None, "*", ""]:
|
||||
event_where.append(
|
||||
_multiple_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
|
||||
event.value, value_key=e_k))
|
||||
if event.source[0] not in [None, "*", ""]:
|
||||
event_where.append(_multiple_conditions(f"main1.source = %({s_k})s", event.value, value_key=s_k))
|
||||
event_where.append(_multiple_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k))
|
||||
|
||||
|
||||
# ----- IOS
|
||||
|
|
@ -877,7 +877,8 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
|
|||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._duration:
|
||||
event_where.append(
|
||||
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value, value_key=e_k_f))
|
||||
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value,
|
||||
value_key=e_k_f))
|
||||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._request_body:
|
||||
event_where.append(
|
||||
|
|
@ -885,7 +886,8 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
|
|||
apply = True
|
||||
elif f.type == schemas.FetchFilterType._response_body:
|
||||
event_where.append(
|
||||
_multiple_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value, value_key=e_k_f))
|
||||
_multiple_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value,
|
||||
value_key=e_k_f))
|
||||
apply = True
|
||||
else:
|
||||
print(f"undefined FETCH filter: {f.type}")
|
||||
|
|
|
|||
|
|
@ -1,13 +1,15 @@
|
|||
import base64
|
||||
import logging
|
||||
import re
|
||||
from email.header import Header
|
||||
from email.mime.image import MIMEImage
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
from chalicelib.utils import helper, smtp
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.utils import smtp
|
||||
|
||||
|
||||
def __get_subject(subject):
|
||||
return subject
|
||||
|
|
@ -64,11 +66,11 @@ def send_html(BODY_HTML, SUBJECT, recipient, bcc=None):
|
|||
if bcc is not None and len(bcc) > 0:
|
||||
r += [bcc]
|
||||
try:
|
||||
print(f"Email sending to: {r}")
|
||||
logging.info(f"Email sending to: {r}")
|
||||
s.sendmail(msg['FROM'], r, msg.as_string().encode('ascii'))
|
||||
except Exception as e:
|
||||
print("!!! Email error!")
|
||||
print(e)
|
||||
logging.error("!!! Email error!")
|
||||
logging.error(e)
|
||||
|
||||
|
||||
def send_text(recipients, text, subject):
|
||||
|
|
@ -82,8 +84,8 @@ def send_text(recipients, text, subject):
|
|||
try:
|
||||
s.sendmail(msg['FROM'], recipients, msg.as_string().encode('ascii'))
|
||||
except Exception as e:
|
||||
print("!! Text-email failed: " + subject),
|
||||
print(e)
|
||||
logging.error("!! Text-email failed: " + subject),
|
||||
logging.error(e)
|
||||
|
||||
|
||||
def __escape_text_html(text):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.email_handler import __get_html_from_file, send_html, __escape_text_html
|
||||
from chalicelib.utils.email_handler import __get_html_from_file, send_html
|
||||
|
||||
|
||||
def send_team_invitation(recipient, client_id, sender_name, invitation_link):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import logging
|
||||
import time
|
||||
from threading import Semaphore
|
||||
|
||||
|
|
@ -6,6 +7,9 @@ import psycopg2.extras
|
|||
from decouple import config
|
||||
from psycopg2 import pool
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
_PG_CONFIG = {"host": config("pg_host"),
|
||||
"database": config("pg_dbname"),
|
||||
"user": config("pg_user"),
|
||||
|
|
@ -44,31 +48,34 @@ RETRY = 0
|
|||
|
||||
|
||||
def make_pool():
|
||||
if not config('PG_POOL', cast=bool, default=True):
|
||||
return
|
||||
global postgreSQL_pool
|
||||
global RETRY
|
||||
if postgreSQL_pool is not None:
|
||||
try:
|
||||
postgreSQL_pool.closeall()
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print("Error while closing all connexions to PostgreSQL", error)
|
||||
logging.error("Error while closing all connexions to PostgreSQL", error)
|
||||
try:
|
||||
postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20),
|
||||
config("pg_maxconn", cast=int, default=80),
|
||||
**PG_CONFIG)
|
||||
if (postgreSQL_pool):
|
||||
print("Connection pool created successfully")
|
||||
logging.info("Connection pool created successfully")
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print("Error while connecting to PostgreSQL", error)
|
||||
logging.error("Error while connecting to PostgreSQL", error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
print(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
logging.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
make_pool()
|
||||
if config('PG_POOL', cast=bool, default=True):
|
||||
make_pool()
|
||||
|
||||
|
||||
class PostgresClient:
|
||||
|
|
@ -87,8 +94,14 @@ class PostgresClient:
|
|||
elif long_query:
|
||||
long_config = dict(_PG_CONFIG)
|
||||
long_config["application_name"] += "-LONG"
|
||||
long_config["options"] = f"-c statement_timeout={config('pg_long_timeout', cast=int, default=5 * 60) * 1000}"
|
||||
long_config["options"] = f"-c statement_timeout=" \
|
||||
f"{config('pg_long_timeout', cast=int, default=5 * 60) * 1000}"
|
||||
self.connection = psycopg2.connect(**long_config)
|
||||
elif not config('PG_POOL', cast=bool, default=True):
|
||||
single_config = dict(_PG_CONFIG)
|
||||
single_config["application_name"] += "-NOPOOL"
|
||||
single_config["options"] = f"-c statement_timeout={config('pg_timeout', cast=int, default=3 * 60) * 1000}"
|
||||
self.connection = psycopg2.connect(**single_config)
|
||||
else:
|
||||
self.connection = postgreSQL_pool.getconn()
|
||||
|
||||
|
|
@ -104,14 +117,19 @@ class PostgresClient:
|
|||
if self.long_query or self.unlimited_query:
|
||||
self.connection.close()
|
||||
except Exception as error:
|
||||
print("Error while committing/closing PG-connection", error)
|
||||
if str(error) == "connection already closed" and not self.long_query and not self.unlimited_query:
|
||||
print("Recreating the connexion pool")
|
||||
logging.error("Error while committing/closing PG-connection", error)
|
||||
if str(error) == "connection already closed" \
|
||||
and not self.long_query \
|
||||
and not self.unlimited_query \
|
||||
and config('PG_POOL', cast=bool, default=True):
|
||||
logging.info("Recreating the connexion pool")
|
||||
make_pool()
|
||||
else:
|
||||
raise error
|
||||
finally:
|
||||
if not self.long_query:
|
||||
if config('PG_POOL', cast=bool, default=True) \
|
||||
and not self.long_query \
|
||||
and not self.unlimited_query:
|
||||
postgreSQL_pool.putconn(self.connection)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,14 @@
|
|||
import logging
|
||||
import smtplib
|
||||
from smtplib import SMTPAuthenticationError
|
||||
|
||||
from decouple import config
|
||||
from starlette.exceptions import HTTPException
|
||||
|
||||
|
||||
class EmptySMTP:
|
||||
def sendmail(self, from_addr, to_addrs, msg, mail_options=(), rcpt_options=()):
|
||||
print("!! CANNOT SEND EMAIL, NO VALID SMTP CONFIGURATION FOUND")
|
||||
logging.error("!! CANNOT SEND EMAIL, NO VALID SMTP CONFIGURATION FOUND")
|
||||
|
||||
|
||||
class SMTPClient:
|
||||
|
|
@ -30,7 +34,11 @@ class SMTPClient:
|
|||
self.server.starttls()
|
||||
# stmplib docs recommend calling ehlo() before & after starttls()
|
||||
self.server.ehlo()
|
||||
self.server.login(user=config("EMAIL_USER"), password=config("EMAIL_PASSWORD"))
|
||||
if len(config("EMAIL_USER", default="")) > 0 and len(config("EMAIL_PASSWORD", default="")) > 0:
|
||||
try:
|
||||
self.server.login(user=config("EMAIL_USER"), password=config("EMAIL_PASSWORD"))
|
||||
except SMTPAuthenticationError:
|
||||
raise HTTPException(401, "SMTP Authentication Error")
|
||||
return self.server
|
||||
|
||||
def __exit__(self, *args):
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ pg_minconn=20
|
|||
pg_maxconn=50
|
||||
PG_RETRY_MAX=50
|
||||
PG_RETRY_INTERVAL=2
|
||||
PG_POOL=true
|
||||
put_S3_TTL=20
|
||||
sentryURL=
|
||||
sessions_bucket=mobs
|
||||
|
|
|
|||
|
|
@ -231,7 +231,7 @@ def delete_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_c
|
|||
|
||||
|
||||
@app.get('/{projectId}/integrations/sentry/events/{eventId}', tags=["integrations"])
|
||||
def proxy_sentry(projectId: int, eventId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
def proxy_sentry(projectId: int, eventId: str, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": log_tool_sentry.proxy_get(tenant_id=context.tenant_id, project_id=projectId, event_id=eventId)}
|
||||
|
||||
|
||||
|
|
@ -1148,14 +1148,6 @@ def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context
|
|||
return {"data": users.generate_new_api_key(user_id=context.user_id)}
|
||||
|
||||
|
||||
@app.post('/account', tags=["account"])
|
||||
@app.put('/account', tags=["account"])
|
||||
def edit_account(data: schemas.EditUserSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data,
|
||||
editor_id=context.user_id)
|
||||
|
||||
|
||||
@app.post('/account/password', tags=["account"])
|
||||
@app.put('/account/password', tags=["account"])
|
||||
def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
|
||||
|
|
|
|||
|
|
@ -43,6 +43,14 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
}
|
||||
|
||||
|
||||
@app.post('/account', tags=["account"])
|
||||
@app.put('/account', tags=["account"])
|
||||
def edit_account(data: schemas.EditUserSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data,
|
||||
editor_id=context.user_id)
|
||||
|
||||
|
||||
@app.get('/projects/limit', tags=['projects'])
|
||||
def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": {
|
||||
|
|
|
|||
|
|
@ -562,6 +562,8 @@ class _SessionSearchEventRaw(__MixedSearchFilter):
|
|||
assert len(values["source"]) > 0 and isinstance(values["source"][0], int), \
|
||||
f"source of type int if required for {PerformanceEventType.time_between_events}"
|
||||
else:
|
||||
assert "source" in values, f"source is required for {values.get('type')}"
|
||||
assert isinstance(values["source"], list), f"source of type list is required for {values.get('type')}"
|
||||
for c in values["source"]:
|
||||
assert isinstance(c, int), f"source value should be of type int for {values.get('type')}"
|
||||
elif values.get("type") == EventType.error and values.get("source") is None:
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ RUN cd /work_tmp && npm install
|
|||
|
||||
WORKDIR /work
|
||||
COPY . .
|
||||
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/.
|
||||
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/. && chmod 644 /mappings.wasm
|
||||
|
||||
RUN adduser -u 1001 openreplay -D
|
||||
USER 1001
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
|
|||
RUN apk add --no-cache build-base tini
|
||||
ARG envarg
|
||||
ENV APP_NAME=alerts \
|
||||
pg_minconn=2 \
|
||||
pg_minconn=1 \
|
||||
pg_maxconn=10 \
|
||||
ENTERPRISE_BUILD=${envarg}
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@ ENV APP_NAME=crons \
|
|||
pg_minconn=2 \
|
||||
pg_maxconn=10 \
|
||||
ENTERPRISE_BUILD=${envarg} \
|
||||
ACTION=""
|
||||
ACTION="" \
|
||||
PG_POOL=false
|
||||
|
||||
WORKDIR /work_tmp
|
||||
COPY requirements-crons.txt /work_tmp/requirements.txt
|
||||
|
|
|
|||
|
|
@ -52,30 +52,28 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
|
|||
AND users.tenant_id = %(tenant_id)s
|
||||
AND (roles.all_projects OR roles_projects.project_id = s.project_id)
|
||||
) AS role_project ON (TRUE)"""
|
||||
pre_select = ""
|
||||
recorded_q = ""
|
||||
if recorded:
|
||||
pre_select = """WITH recorded_p AS (SELECT DISTINCT projects.project_id
|
||||
FROM projects INNER JOIN sessions USING (project_id)
|
||||
WHERE tenant_id =%(tenant_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND duration > 0)"""
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
{pre_select}
|
||||
recorded_q = """, COALESCE((SELECT TRUE
|
||||
FROM public.sessions
|
||||
WHERE sessions.project_id = s.project_id
|
||||
AND sessions.start_ts >= (EXTRACT(EPOCH FROM s.created_at) * 1000 - 24 * 60 * 60 * 1000)
|
||||
AND sessions.start_ts <= %(now)s
|
||||
LIMIT 1), FALSE) AS recorded"""
|
||||
query = cur.mogrify(f"""\
|
||||
SELECT
|
||||
s.project_id, s.name, s.project_key, s.save_request_payloads
|
||||
{',s.gdpr' if gdpr else ''}
|
||||
{',EXISTS(SELECT 1 FROM recorded_p WHERE recorded_p.project_id = s.project_id) AS recorded' if recorded else ''}
|
||||
{recorded_q}
|
||||
{',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''}
|
||||
FROM public.projects AS s
|
||||
{'LEFT JOIN recorded_p USING (project_id)' if recorded else ''}
|
||||
{'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''}
|
||||
{role_query if user_id is not None else ""}
|
||||
WHERE s.tenant_id =%(tenant_id)s
|
||||
AND s.deleted_at IS NULL
|
||||
ORDER BY s.project_id;""",
|
||||
{"tenant_id": tenant_id, "user_id": user_id})
|
||||
)
|
||||
{"tenant_id": tenant_id, "user_id": user_id, "now": TimeUTC.now()})
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
if recording_state:
|
||||
project_ids = [f'({r["project_id"]})' for r in rows]
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ pg_minconn=20
|
|||
pg_maxconn=50
|
||||
PG_RETRY_MAX=50
|
||||
PG_RETRY_INTERVAL=2
|
||||
PG_POOL=true
|
||||
put_S3_TTL=20
|
||||
sentryURL=
|
||||
sessions_bucket=mobs
|
||||
|
|
|
|||
|
|
@ -46,6 +46,14 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)):
|
|||
}
|
||||
|
||||
|
||||
@app.post('/account', tags=["account"])
|
||||
@app.put('/account', tags=["account"])
|
||||
def edit_account(data: schemas_ee.EditUserSchema = Body(...),
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data,
|
||||
editor_id=context.user_id)
|
||||
|
||||
|
||||
@app.get('/projects/limit', tags=['projects'])
|
||||
def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
|
||||
return {"data": {
|
||||
|
|
|
|||
|
|
@ -2,14 +2,12 @@ import React from 'react';
|
|||
import cn from 'classnames';
|
||||
import { connect } from 'react-redux';
|
||||
import withPageTitle from 'HOCs/withPageTitle';
|
||||
import {
|
||||
fetchFavoriteList as fetchFavoriteSessionList
|
||||
} from 'Duck/sessions';
|
||||
import { fetchFavoriteList as fetchFavoriteSessionList } from 'Duck/sessions';
|
||||
import { applyFilter, clearEvents, addAttribute } from 'Duck/filters';
|
||||
import { KEYS } from 'Types/filter/customFilter';
|
||||
import SessionList from './SessionList';
|
||||
import stl from './bugFinder.module.css';
|
||||
import withLocationHandlers from "HOCs/withLocationHandlers";
|
||||
import withLocationHandlers from 'HOCs/withLocationHandlers';
|
||||
import { fetch as fetchFilterVariables } from 'Duck/sources';
|
||||
import { fetchSources } from 'Duck/customField';
|
||||
import { setActiveTab } from 'Duck/search';
|
||||
|
|
@ -21,113 +19,113 @@ import { clearSearch, fetchSessions, addFilterByKeyAndValue } from 'Duck/search'
|
|||
import { FilterKey } from 'Types/filter/filterType';
|
||||
|
||||
const weakEqual = (val1, val2) => {
|
||||
if (!!val1 === false && !!val2 === false) return true;
|
||||
if (!val1 !== !val2) return false;
|
||||
return `${ val1 }` === `${ val2 }`;
|
||||
}
|
||||
if (!!val1 === false && !!val2 === false) return true;
|
||||
if (!val1 !== !val2) return false;
|
||||
return `${val1}` === `${val2}`;
|
||||
};
|
||||
|
||||
const allowedQueryKeys = [
|
||||
'userOs',
|
||||
'userId',
|
||||
'userBrowser',
|
||||
'userDevice',
|
||||
'userCountry',
|
||||
'startDate',
|
||||
'endDate',
|
||||
'minDuration',
|
||||
'maxDuration',
|
||||
'referrer',
|
||||
'sort',
|
||||
'order',
|
||||
'userOs',
|
||||
'userId',
|
||||
'userBrowser',
|
||||
'userDevice',
|
||||
'userCountry',
|
||||
'startDate',
|
||||
'endDate',
|
||||
'minDuration',
|
||||
'maxDuration',
|
||||
'referrer',
|
||||
'sort',
|
||||
'order',
|
||||
];
|
||||
|
||||
@withLocationHandlers()
|
||||
@connect(state => ({
|
||||
filter: state.getIn([ 'filters', 'appliedFilter' ]),
|
||||
variables: state.getIn([ 'customFields', 'list' ]),
|
||||
sources: state.getIn([ 'customFields', 'sources' ]),
|
||||
filterValues: state.get('filterValues'),
|
||||
favoriteList: state.getIn([ 'sessions', 'favoriteList' ]),
|
||||
currentProjectId: state.getIn([ 'site', 'siteId' ]),
|
||||
sites: state.getIn([ 'site', 'list' ]),
|
||||
watchdogs: state.getIn(['watchdogs', 'list']),
|
||||
activeFlow: state.getIn([ 'filters', 'activeFlow' ]),
|
||||
sessions: state.getIn([ 'sessions', 'list' ]),
|
||||
}), {
|
||||
fetchFavoriteSessionList,
|
||||
applyFilter,
|
||||
addAttribute,
|
||||
fetchFilterVariables,
|
||||
fetchSources,
|
||||
clearEvents,
|
||||
setActiveTab,
|
||||
clearSearch,
|
||||
fetchSessions,
|
||||
addFilterByKeyAndValue,
|
||||
})
|
||||
@withPageTitle("Sessions - OpenReplay")
|
||||
@connect(
|
||||
(state) => ({
|
||||
filter: state.getIn(['filters', 'appliedFilter']),
|
||||
variables: state.getIn(['customFields', 'list']),
|
||||
sources: state.getIn(['customFields', 'sources']),
|
||||
filterValues: state.get('filterValues'),
|
||||
favoriteList: state.getIn(['sessions', 'favoriteList']),
|
||||
currentProjectId: state.getIn(['site', 'siteId']),
|
||||
sites: state.getIn(['site', 'list']),
|
||||
watchdogs: state.getIn(['watchdogs', 'list']),
|
||||
activeFlow: state.getIn(['filters', 'activeFlow']),
|
||||
sessions: state.getIn(['sessions', 'list']),
|
||||
}),
|
||||
{
|
||||
fetchFavoriteSessionList,
|
||||
applyFilter,
|
||||
addAttribute,
|
||||
fetchFilterVariables,
|
||||
fetchSources,
|
||||
clearEvents,
|
||||
setActiveTab,
|
||||
clearSearch,
|
||||
fetchSessions,
|
||||
addFilterByKeyAndValue,
|
||||
}
|
||||
)
|
||||
@withPageTitle('Sessions - OpenReplay')
|
||||
export default class BugFinder extends React.PureComponent {
|
||||
state = {showRehydratePanel: false}
|
||||
constructor(props) {
|
||||
super(props);
|
||||
state = { showRehydratePanel: false };
|
||||
constructor(props) {
|
||||
super(props);
|
||||
|
||||
// TODO should cache the response
|
||||
// props.fetchSources().then(() => {
|
||||
// defaultFilters[6] = {
|
||||
// category: 'Collaboration',
|
||||
// type: 'CUSTOM',
|
||||
// keys: this.props.sources.filter(({type}) => type === 'collaborationTool').map(({ label, key }) => ({ type: 'CUSTOM', source: key, label: label, key, icon: 'integrations/' + key, isFilter: false })).toJS()
|
||||
// };
|
||||
// defaultFilters[7] = {
|
||||
// category: 'Logging Tools',
|
||||
// type: 'ERROR',
|
||||
// keys: this.props.sources.filter(({type}) => type === 'logTool').map(({ label, key }) => ({ type: 'ERROR', source: key, label: label, key, icon: 'integrations/' + key, isFilter: false })).toJS()
|
||||
// };
|
||||
// });
|
||||
if (props.sessions.size === 0) {
|
||||
props.fetchSessions();
|
||||
// TODO should cache the response
|
||||
// props.fetchSources().then(() => {
|
||||
// defaultFilters[6] = {
|
||||
// category: 'Collaboration',
|
||||
// type: 'CUSTOM',
|
||||
// keys: this.props.sources.filter(({type}) => type === 'collaborationTool').map(({ label, key }) => ({ type: 'CUSTOM', source: key, label: label, key, icon: 'integrations/' + key, isFilter: false })).toJS()
|
||||
// };
|
||||
// defaultFilters[7] = {
|
||||
// category: 'Logging Tools',
|
||||
// type: 'ERROR',
|
||||
// keys: this.props.sources.filter(({type}) => type === 'logTool').map(({ label, key }) => ({ type: 'ERROR', source: key, label: label, key, icon: 'integrations/' + key, isFilter: false })).toJS()
|
||||
// };
|
||||
// });
|
||||
// if (props.sessions.size === 0) {
|
||||
// props.fetchSessions();
|
||||
// }
|
||||
|
||||
const queryFilter = this.props.query.all(allowedQueryKeys);
|
||||
if (queryFilter.hasOwnProperty('userId')) {
|
||||
props.addFilterByKeyAndValue(FilterKey.USERID, queryFilter.userId);
|
||||
} else {
|
||||
if (props.sessions.size === 0) {
|
||||
props.fetchSessions();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const queryFilter = this.props.query.all(allowedQueryKeys);
|
||||
if (queryFilter.hasOwnProperty('userId')) {
|
||||
props.addFilterByKeyAndValue(FilterKey.USERID, queryFilter.userId);
|
||||
} else {
|
||||
if (props.sessions.size === 0) {
|
||||
props.fetchSessions();
|
||||
}
|
||||
}
|
||||
}
|
||||
toggleRehydratePanel = () => {
|
||||
this.setState({ showRehydratePanel: !this.state.showRehydratePanel });
|
||||
};
|
||||
|
||||
toggleRehydratePanel = () => {
|
||||
this.setState({ showRehydratePanel: !this.state.showRehydratePanel })
|
||||
}
|
||||
setActiveTab = (tab) => {
|
||||
this.props.setActiveTab(tab);
|
||||
};
|
||||
|
||||
setActiveTab = tab => {
|
||||
this.props.setActiveTab(tab);
|
||||
}
|
||||
render() {
|
||||
const { showRehydratePanel } = this.state;
|
||||
|
||||
render() {
|
||||
const { showRehydratePanel } = this.state;
|
||||
|
||||
return (
|
||||
<div className="page-margin container-90 flex relative">
|
||||
<div className="flex-1 flex">
|
||||
<div className="side-menu">
|
||||
<SessionsMenu
|
||||
onMenuItemClick={this.setActiveTab}
|
||||
toggleRehydratePanel={ this.toggleRehydratePanel }
|
||||
/>
|
||||
</div>
|
||||
<div className={cn("side-menu-margined", stl.searchWrapper) }>
|
||||
<NoSessionsMessage />
|
||||
<div className="mb-5">
|
||||
<MainSearchBar />
|
||||
<SessionSearch />
|
||||
return (
|
||||
<div className="page-margin container-90 flex relative">
|
||||
<div className="flex-1 flex">
|
||||
<div className="side-menu">
|
||||
<SessionsMenu onMenuItemClick={this.setActiveTab} toggleRehydratePanel={this.toggleRehydratePanel} />
|
||||
</div>
|
||||
<div className={cn('side-menu-margined', stl.searchWrapper)}>
|
||||
<NoSessionsMessage />
|
||||
<div className="mb-5">
|
||||
<MainSearchBar />
|
||||
<SessionSearch />
|
||||
</div>
|
||||
<SessionList onMenuItemClick={this.setActiveTab} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<SessionList onMenuItemClick={this.setActiveTab} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ function SessionListHeader({ activeTab, count, applyFilter, filter }) {
|
|||
}, [label]);
|
||||
|
||||
const { startDate, endDate, rangeValue } = filter;
|
||||
console.log('startDate', startDate);
|
||||
const period = new Record({ start: startDate, end: endDate, rangeName: rangeValue, timezoneOffset: getTimeZoneOffset() });
|
||||
|
||||
const onDateChange = (e) => {
|
||||
|
|
@ -40,7 +41,7 @@ function SessionListHeader({ activeTab, count, applyFilter, filter }) {
|
|||
const dateValues = period.toJSON();
|
||||
dateValues.startDate = moment(dateValues.startDate).startOf('day').utcOffset(getTimeZoneOffset(), true).valueOf();
|
||||
dateValues.endDate = moment(dateValues.endDate).endOf('day').utcOffset(getTimeZoneOffset(), true).valueOf();
|
||||
applyFilter(dateValues);
|
||||
// applyFilter(dateValues);
|
||||
}
|
||||
}, [label]);
|
||||
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ function UserForm(props: Props) {
|
|||
<Form.Field>
|
||||
<label htmlFor="role">{ 'Role' }</label>
|
||||
<Select
|
||||
placeholder="Selct Role"
|
||||
placeholder="Select Role"
|
||||
selection
|
||||
options={ roles }
|
||||
name="roleId"
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import { NoContent } from 'UI';
|
|||
import { Styles } from '../../common';
|
||||
import {
|
||||
BarChart, Bar, CartesianGrid, Tooltip,
|
||||
LineChart, Line, Legend, ResponsiveContainer,
|
||||
Legend, ResponsiveContainer,
|
||||
XAxis, YAxis
|
||||
} from 'recharts';
|
||||
|
||||
|
|
@ -13,7 +13,8 @@ interface Props {
|
|||
metric?: any
|
||||
}
|
||||
function ErrorsByOrigin(props: Props) {
|
||||
const { data, metric } = props;
|
||||
const { metric } = props;
|
||||
|
||||
return (
|
||||
<NoContent
|
||||
size="small"
|
||||
|
|
|
|||
|
|
@ -79,8 +79,8 @@ function WidgetChart(props: Props) {
|
|||
const debounceRequest: any = React.useCallback(debounce(fetchMetricChartData, 500), []);
|
||||
useEffect(() => {
|
||||
if (prevMetricRef.current && prevMetricRef.current.name !== metric.name) {
|
||||
prevMetricRef.current = metric;
|
||||
return
|
||||
prevMetricRef.current = metric;
|
||||
return
|
||||
};
|
||||
prevMetricRef.current = metric;
|
||||
const timestmaps = drillDownPeriod.toTimestamps();
|
||||
|
|
@ -106,10 +106,11 @@ function WidgetChart(props: Props) {
|
|||
}
|
||||
|
||||
if (metricType === 'predefined') {
|
||||
const defaultMetric = metric.data.chart.length === 0 ? metricWithData : metric
|
||||
if (isOverviewWidget) {
|
||||
return <CustomMetricOverviewChart data={data} />
|
||||
}
|
||||
return <WidgetPredefinedChart isTemplate={isTemplate} metric={metric} data={data} predefinedKey={metric.predefinedKey} />
|
||||
return <WidgetPredefinedChart isTemplate={isTemplate} metric={defaultMetric} data={data} predefinedKey={metric.predefinedKey} />
|
||||
}
|
||||
|
||||
if (metricType === 'timeseries') {
|
||||
|
|
|
|||
|
|
@ -80,6 +80,7 @@ export default class Fetch extends React.PureComponent {
|
|||
render() {
|
||||
const { listNow } = this.props;
|
||||
const { current, currentIndex, showFetchDetails, filteredList } = this.state;
|
||||
const hasErrors = filteredList.some((r) => r.status >= 400);
|
||||
return (
|
||||
<React.Fragment>
|
||||
<SlideModal
|
||||
|
|
@ -140,7 +141,7 @@ export default class Fetch extends React.PureComponent {
|
|||
show={filteredList.length === 0}
|
||||
>
|
||||
{/* <NoContent size="small" show={filteredList.length === 0}> */}
|
||||
<TimeTable rows={filteredList} onRowClick={this.onRowClick} hoverable navigation activeIndex={listNow.length - 1}>
|
||||
<TimeTable rows={filteredList} onRowClick={this.onRowClick} hoverable navigation={hasErrors} activeIndex={listNow.length - 1}>
|
||||
{[
|
||||
{
|
||||
label: 'Status',
|
||||
|
|
|
|||
|
|
@ -118,7 +118,9 @@ export default class TimeTable extends React.PureComponent<Props, State> {
|
|||
autoScroll = true;
|
||||
|
||||
componentDidMount() {
|
||||
this.scroller.current.scrollToRow(this.props.activeIndex);
|
||||
if (this.scroller.current) {
|
||||
this.scroller.current.scrollToRow(this.props.activeIndex);
|
||||
}
|
||||
}
|
||||
|
||||
componentDidUpdate(prevProps: any, prevState: any) {
|
||||
|
|
@ -135,7 +137,7 @@ export default class TimeTable extends React.PureComponent<Props, State> {
|
|||
...computeTimeLine(this.props.rows, this.state.firstVisibleRowIndex, this.visibleCount),
|
||||
});
|
||||
}
|
||||
if (this.props.activeIndex >= 0 && prevProps.activeIndex !== this.props.activeIndex) {
|
||||
if (this.props.activeIndex >= 0 && prevProps.activeIndex !== this.props.activeIndex && this.scroller.current) {
|
||||
this.scroller.current.scrollToRow(this.props.activeIndex);
|
||||
}
|
||||
}
|
||||
|
|
@ -227,8 +229,24 @@ export default class TimeTable extends React.PureComponent<Props, State> {
|
|||
<div className={cn(className, 'relative')}>
|
||||
{navigation && (
|
||||
<div className={cn(autoscrollStl.navButtons, 'flex items-center')}>
|
||||
<Button variant="text-primary" icon="chevron-up" onClick={this.onPrevClick} />
|
||||
<Button variant="text-primary" icon="chevron-down" onClick={this.onNextClick} />
|
||||
<Button
|
||||
variant="text-primary"
|
||||
icon="chevron-up"
|
||||
tooltip={{
|
||||
title: 'Previous Error',
|
||||
delay: 0,
|
||||
}}
|
||||
onClick={this.onPrevClick}
|
||||
/>
|
||||
<Button
|
||||
variant="text-primary"
|
||||
icon="chevron-down"
|
||||
tooltip={{
|
||||
title: 'Next Error',
|
||||
delay: 0,
|
||||
}}
|
||||
onClick={this.onNextClick}
|
||||
/>
|
||||
{/* <IconButton
|
||||
size="small"
|
||||
icon="chevron-up"
|
||||
|
|
|
|||
|
|
@ -29,11 +29,11 @@ function FilterItem(props: Props) {
|
|||
};
|
||||
|
||||
const onOperatorChange = (e: any, { name, value }: any) => {
|
||||
props.onUpdate({ ...filter, operator: value.value });
|
||||
props.onUpdate({ ...filter, operator: value });
|
||||
};
|
||||
|
||||
const onSourceOperatorChange = (e: any, { name, value }: any) => {
|
||||
props.onUpdate({ ...filter, sourceOperator: value.value });
|
||||
props.onUpdate({ ...filter, sourceOperator: value });
|
||||
};
|
||||
|
||||
const onUpdateSubFilter = (subFilter: any, subFilterIndex: any) => {
|
||||
|
|
@ -73,7 +73,7 @@ function FilterItem(props: Props) {
|
|||
)}
|
||||
|
||||
{/* Filter values */}
|
||||
{!isSubFilter && (
|
||||
{!isSubFilter && filter.operatorOptions && (
|
||||
<>
|
||||
<FilterOperator
|
||||
options={filter.operatorOptions}
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ function FilterOperator(props: Props) {
|
|||
placeholder="Select"
|
||||
isDisabled={isDisabled}
|
||||
value={value ? options.find((i: any) => i.value === value) : null}
|
||||
onChange={({ value }: any) => onChange(null, { name: 'operator', value })}
|
||||
onChange={({ value }: any) => onChange(null, { name: 'operator', value: value.value })}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ function LiveSessionList(props: Props) {
|
|||
useEffect(() => {
|
||||
if (metaListLoading) return;
|
||||
const _filter = { ...filter };
|
||||
if (sortOptions[1]) {
|
||||
if (sortOptions[1] && !filter.sort) {
|
||||
_filter.sort = sortOptions[1].value;
|
||||
}
|
||||
props.applyFilter(_filter);
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ function SessionSearch(props: Props) {
|
|||
|
||||
export default connect(
|
||||
(state: any) => ({
|
||||
saveRequestPayloads: state.getIn(['site', 'active', 'saveRequestPayloads']),
|
||||
saveRequestPayloads: state.getIn(['site', 'instance', 'saveRequestPayloads']),
|
||||
appliedFilter: state.getIn(['search', 'instance']),
|
||||
}),
|
||||
{ edit, addFilter }
|
||||
|
|
|
|||
|
|
@ -1,76 +1,84 @@
|
|||
import React, { useEffect, useState } from 'react';
|
||||
import { Icon, Toggler, Button, Input, Loader } from 'UI';
|
||||
import { Icon, Toggler, Button, Input, Loader, Popup } from 'UI';
|
||||
import { useStore } from 'App/mstore';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { connect } from 'react-redux';
|
||||
import cn from 'classnames';
|
||||
|
||||
function CaptureRate() {
|
||||
function CaptureRate({ isAdmin = false }) {
|
||||
const { settingsStore } = useStore();
|
||||
const [changed, setChanged] = useState(false);
|
||||
const [sessionSettings] = useState(settingsStore.sessionSettings)
|
||||
const [loading] = useState(settingsStore.loadingCaptureRate)
|
||||
const [sessionSettings] = useState(settingsStore.sessionSettings);
|
||||
const [loading] = useState(settingsStore.loadingCaptureRate);
|
||||
|
||||
const captureRate = sessionSettings.captureRate;
|
||||
const setCaptureRate = sessionSettings.changeCaptureRate
|
||||
const captureAll = sessionSettings.captureAll
|
||||
const setCaptureAll = sessionSettings.changeCaptureAll
|
||||
const setCaptureRate = sessionSettings.changeCaptureRate;
|
||||
const captureAll = sessionSettings.captureAll;
|
||||
const setCaptureAll = sessionSettings.changeCaptureAll;
|
||||
|
||||
useEffect(() => {
|
||||
settingsStore.fetchCaptureRate()
|
||||
}, [])
|
||||
settingsStore.fetchCaptureRate();
|
||||
}, []);
|
||||
|
||||
const changeCaptureRate = (input: string) => {
|
||||
setChanged(true);
|
||||
setCaptureRate(input);
|
||||
}
|
||||
};
|
||||
|
||||
const toggleRate = () => {
|
||||
const newValue = !captureAll;
|
||||
setChanged(true)
|
||||
if (newValue === true) {
|
||||
setChanged(true);
|
||||
if (newValue === true) {
|
||||
const updateObj = {
|
||||
rate:"100",
|
||||
rate: '100',
|
||||
captureAll: true,
|
||||
}
|
||||
settingsStore.saveCaptureRate(updateObj)
|
||||
};
|
||||
settingsStore.saveCaptureRate(updateObj);
|
||||
} else {
|
||||
setCaptureAll(newValue);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Loader loading={loading}>
|
||||
<h3 className="text-lg">Recordings</h3>
|
||||
<div className="my-1">The percentage of session you want to capture</div>
|
||||
<div className="mt-2 mb-4 mr-1 flex items-center">
|
||||
<Toggler
|
||||
checked={captureAll}
|
||||
name="test"
|
||||
onChange={toggleRate}
|
||||
/>
|
||||
<span className="ml-2" style={{ color: captureAll ? '#000000' : '#999' }}>100%</span>
|
||||
</div>
|
||||
{!captureAll && (
|
||||
<div className="flex items-center">
|
||||
<div className="relative">
|
||||
<Input
|
||||
type="number"
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) => changeCaptureRate(e.target.value)}
|
||||
value={captureRate.toString()}
|
||||
style={{ height: '38px', width: '100px'}}
|
||||
disabled={captureAll}
|
||||
min={0}
|
||||
max={100}
|
||||
/>
|
||||
<Icon className="absolute right-0 mr-6 top-0 bottom-0 m-auto" name="percent" color="gray-medium" size="18" />
|
||||
</div>
|
||||
<Popup content="You don't have permission to change." disabled={isAdmin} delay={0}>
|
||||
<div className={cn('mt-2 mb-4 mr-1 flex items-center', { disabled: !isAdmin })}>
|
||||
<Toggler checked={captureAll} name="test" onChange={toggleRate} />
|
||||
<span className="ml-2" style={{ color: captureAll ? '#000000' : '#999' }}>
|
||||
100%
|
||||
</span>
|
||||
</div>
|
||||
</Popup>
|
||||
{!captureAll && (
|
||||
<div className="flex items-center">
|
||||
<Popup content="You don't have permission to change." disabled={isAdmin} delay={0}>
|
||||
<div className={cn("relative", { 'disabled' : !isAdmin })}>
|
||||
<Input
|
||||
type="number"
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) => changeCaptureRate(e.target.value)}
|
||||
value={captureRate.toString()}
|
||||
style={{ height: '38px', width: '100px' }}
|
||||
disabled={captureAll}
|
||||
min={0}
|
||||
max={100}
|
||||
/>
|
||||
<Icon className="absolute right-0 mr-6 top-0 bottom-0 m-auto" name="percent" color="gray-medium" size="18" />
|
||||
</div>
|
||||
</Popup>
|
||||
<span className="mx-3">of the sessions</span>
|
||||
<Button
|
||||
disabled={!changed}
|
||||
variant="outline"
|
||||
onClick={() => settingsStore.saveCaptureRate({
|
||||
rate: captureRate,
|
||||
captureAll,
|
||||
}).finally(() => setChanged(false))}
|
||||
onClick={() =>
|
||||
settingsStore
|
||||
.saveCaptureRate({
|
||||
rate: captureRate,
|
||||
captureAll,
|
||||
})
|
||||
.finally(() => setChanged(false))
|
||||
}
|
||||
>
|
||||
Update
|
||||
</Button>
|
||||
|
|
@ -80,4 +88,6 @@ function CaptureRate() {
|
|||
);
|
||||
}
|
||||
|
||||
export default observer(CaptureRate);
|
||||
export default connect((state: any) => ({
|
||||
isAdmin: state.getIn(['user', 'account', 'admin']) || state.getIn(['user', 'account', 'superAdmin']),
|
||||
}))(observer(CaptureRate));
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import React from 'react';
|
||||
import cn from 'classnames';
|
||||
import { CircularLoader, Icon } from 'UI';
|
||||
import { CircularLoader, Icon, Popup } from 'UI';
|
||||
|
||||
interface Props {
|
||||
className?: string;
|
||||
|
|
@ -12,6 +12,7 @@ interface Props {
|
|||
loading?: boolean;
|
||||
icon?: string;
|
||||
rounded?: boolean;
|
||||
tooltip?: any;
|
||||
[x: string]: any;
|
||||
}
|
||||
export default (props: Props) => {
|
||||
|
|
@ -25,6 +26,7 @@ export default (props: Props) => {
|
|||
children,
|
||||
loading = false,
|
||||
rounded = false,
|
||||
tooltip = null,
|
||||
...rest
|
||||
} = props;
|
||||
|
||||
|
|
@ -70,7 +72,7 @@ export default (props: Props) => {
|
|||
classes = classes.map((c) => c.replace('rounded', 'rounded-full h-10 w-10 justify-center'));
|
||||
}
|
||||
|
||||
return (
|
||||
const render = () => (
|
||||
<button {...rest} type={type} className={cn(classes, className)}>
|
||||
{icon && <Icon className={cn({ 'mr-2': children })} name={icon} color={iconColor} size="16" />}
|
||||
{loading && (
|
||||
|
|
@ -81,4 +83,6 @@ export default (props: Props) => {
|
|||
<div className={cn({ 'opacity-0': loading }, 'flex items-center')}>{children}</div>
|
||||
</button>
|
||||
);
|
||||
|
||||
return tooltip ? <Popup content={tooltip.title} {...tooltip}>{render()}</Popup> : render();
|
||||
};
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ const FETCH_SESSION_LIST = fetchListType(`${name}/FETCH_SESSION_LIST`);
|
|||
|
||||
const initialState = Map({
|
||||
list: List(),
|
||||
instance: new Filter({ filters: [], sort: 'timestamp' }),
|
||||
instance: new Filter({ filters: [], sort: '' }),
|
||||
filterSearchList: {},
|
||||
currentPage: 1,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -86,6 +86,8 @@ function reducer(state = initialState, action = {}) {
|
|||
return state.set('filterSearchList', groupedList);
|
||||
case APPLY_SAVED_SEARCH:
|
||||
return state.set('savedSearch', action.filter);
|
||||
case CLEAR_SEARCH:
|
||||
return state.set('savedSearch', new SavedFilter({}));
|
||||
case EDIT_SAVED_SEARCH:
|
||||
return state.mergeIn(['savedSearch'], action.instance);
|
||||
case UPDATE_CURRENT_PAGE:
|
||||
|
|
@ -286,8 +288,9 @@ export function fetchFilterSearch(params) {
|
|||
}
|
||||
|
||||
export const clearSearch = () => (dispatch, getState) => {
|
||||
dispatch(applySavedSearch(new SavedFilter({})));
|
||||
dispatch(edit(new Filter({ filters: [] })));
|
||||
const filter = getState().getIn(['search', 'instance']);
|
||||
// dispatch(applySavedSearch(new SavedFilter({})));
|
||||
dispatch(edit(new Filter({ startDate: filter.startDate, endDate: filter.endDate, rangeValue: filter.rangeValue, filters: [] })));
|
||||
return dispatch({
|
||||
type: CLEAR_SEARCH,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import Session from "./types/session";
|
|||
import Error from "./types/error";
|
||||
import { FilterKey } from "Types/filter/filterType";
|
||||
|
||||
export interface IDashboardSotre {
|
||||
export interface IDashboardStore {
|
||||
dashboards: IDashboard[];
|
||||
selectedDashboard: IDashboard | null;
|
||||
dashboardInstance: IDashboard;
|
||||
|
|
@ -91,12 +91,12 @@ export interface IDashboardSotre {
|
|||
): Promise<any>;
|
||||
setPeriod(period: any): void;
|
||||
}
|
||||
export default class DashboardStore implements IDashboardSotre {
|
||||
export default class DashboardStore implements IDashboardStore {
|
||||
siteId: any = null;
|
||||
// Dashbaord / Widgets
|
||||
dashboards: Dashboard[] = [];
|
||||
selectedDashboard: Dashboard | null = null;
|
||||
dashboardInstance: IDashboard = new Dashboard();
|
||||
dashboardInstance: Dashboard = new Dashboard();
|
||||
selectedWidgets: IWidget[] = [];
|
||||
currentWidget: Widget = new Widget();
|
||||
widgetCategories: any[] = [];
|
||||
|
|
@ -226,7 +226,7 @@ export default class DashboardStore implements IDashboardSotre {
|
|||
}
|
||||
|
||||
fetch(dashboardId: string): Promise<any> {
|
||||
this.fetchingDashboard = true;
|
||||
this.setFetchingDashboard(true);
|
||||
return dashboardService
|
||||
.getDashboard(dashboardId)
|
||||
.then((response) => {
|
||||
|
|
@ -235,10 +235,14 @@ export default class DashboardStore implements IDashboardSotre {
|
|||
});
|
||||
})
|
||||
.finally(() => {
|
||||
this.fetchingDashboard = false;
|
||||
this.setFetchingDashboard(false);
|
||||
});
|
||||
}
|
||||
|
||||
setFetchingDashboard(value: boolean) {
|
||||
this.fetchingDashboard = value;
|
||||
}
|
||||
|
||||
save(dashboard: IDashboard): Promise<any> {
|
||||
this.isSaving = true;
|
||||
const isCreating = !dashboard.dashboardId;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import React from 'react';
|
||||
import DashboardStore, { IDashboardSotre } from './dashboardStore';
|
||||
import DashboardStore, { IDashboardStore } from './dashboardStore';
|
||||
import MetricStore, { IMetricStore } from './metricStore';
|
||||
import UserStore from './userStore';
|
||||
import RoleStore from './roleStore';
|
||||
|
|
@ -12,7 +12,7 @@ import NotificationStore from './notificationStore';
|
|||
import ErrorStore from './errorStore';
|
||||
|
||||
export class RootStore {
|
||||
dashboardStore: IDashboardSotre;
|
||||
dashboardStore: IDashboardStore;
|
||||
metricStore: IMetricStore;
|
||||
funnelStore: FunnelStore;
|
||||
settingsStore: SettingsStore;
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ export default class NotificationStore {
|
|||
fetchNotifications: action,
|
||||
ignoreAllNotifications: action,
|
||||
ignoreNotification: action,
|
||||
setNotificationsCount: action,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -74,15 +75,19 @@ export default class NotificationStore {
|
|||
});
|
||||
}
|
||||
|
||||
setNotificationsCount(count: number) {
|
||||
this.notificationsCount = count;
|
||||
}
|
||||
|
||||
fetchNotificationsCount(): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
userService.getNotificationsCount()
|
||||
.then((response: any) => {
|
||||
this.notificationsCount = response.count;
|
||||
this.setNotificationsCount(response.count);
|
||||
resolve(response);
|
||||
}).catch((error: any) => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,21 +1,25 @@
|
|||
import { makeAutoObservable, runInAction, observable, action, reaction } from "mobx"
|
||||
import { FilterKey, FilterType } from 'Types/filter/filterType'
|
||||
import { filtersMap } from 'Types/filter/newFilter'
|
||||
import { makeAutoObservable, runInAction, observable, action, reaction } from 'mobx';
|
||||
import { FilterKey, FilterType, FilterCategory } from 'Types/filter/filterType';
|
||||
import { filtersMap } from 'Types/filter/newFilter';
|
||||
|
||||
export default class FilterItem {
|
||||
type: string = ''
|
||||
key: string = ''
|
||||
label: string = ''
|
||||
value: any = [""]
|
||||
isEvent: boolean = false
|
||||
operator: string = ''
|
||||
source: string = ''
|
||||
filters: FilterItem[] = []
|
||||
operatorOptions: any[] = []
|
||||
options: any[] = []
|
||||
isActive: boolean = true
|
||||
completed: number = 0
|
||||
dropped: number = 0
|
||||
type: string = '';
|
||||
category: FilterCategory = FilterCategory.METADATA;
|
||||
key: string = '';
|
||||
label: string = '';
|
||||
value: any = [''];
|
||||
isEvent: boolean = false;
|
||||
operator: string = '';
|
||||
hasSource: boolean = false;
|
||||
source: string = '';
|
||||
sourceOperator: string = '';
|
||||
sourceOperatorOptions: any = [];
|
||||
filters: FilterItem[] = [];
|
||||
operatorOptions: any[] = [];
|
||||
options: any[] = [];
|
||||
isActive: boolean = true;
|
||||
completed: number = 0;
|
||||
dropped: number = 0;
|
||||
|
||||
constructor(data: any = {}) {
|
||||
makeAutoObservable(this, {
|
||||
|
|
@ -26,9 +30,11 @@ export default class FilterItem {
|
|||
source: observable,
|
||||
filters: observable,
|
||||
isActive: observable,
|
||||
sourceOperator: observable,
|
||||
category: observable,
|
||||
|
||||
merge: action
|
||||
})
|
||||
merge: action,
|
||||
});
|
||||
|
||||
if (Array.isArray(data.filters)) {
|
||||
data.filters = data.filters.map(function (i) {
|
||||
|
|
@ -36,55 +42,64 @@ export default class FilterItem {
|
|||
});
|
||||
}
|
||||
|
||||
this.merge(data)
|
||||
this.merge(data);
|
||||
}
|
||||
|
||||
updateKey(key: string, value: any) {
|
||||
this[key] = value
|
||||
this[key] = value;
|
||||
}
|
||||
|
||||
merge(data) {
|
||||
Object.keys(data).forEach(key => {
|
||||
this[key] = data[key]
|
||||
})
|
||||
merge(data: any) {
|
||||
Object.keys(data).forEach((key) => {
|
||||
this[key] = data[key];
|
||||
});
|
||||
}
|
||||
|
||||
fromJson(json, mainFilterKey = '') {
|
||||
let _filter = filtersMap[json.type] || {}
|
||||
fromJson(json: any, mainFilterKey = '') {
|
||||
const isMetadata = json.type === FilterKey.METADATA;
|
||||
let _filter: any = (isMetadata ? filtersMap[json.source] : filtersMap[json.type]) || {};
|
||||
|
||||
if (mainFilterKey) {
|
||||
const mainFilter = filtersMap[mainFilterKey];
|
||||
const subFilterMap = {}
|
||||
mainFilter.filters.forEach(option => {
|
||||
subFilterMap[option.key] = option
|
||||
})
|
||||
_filter = subFilterMap[json.type]
|
||||
const subFilterMap = {};
|
||||
mainFilter.filters.forEach((option: any) => {
|
||||
subFilterMap[option.key] = option;
|
||||
});
|
||||
_filter = subFilterMap[json.type];
|
||||
}
|
||||
this.type = _filter.type
|
||||
this.key = _filter.key
|
||||
this.label = _filter.label
|
||||
this.operatorOptions = _filter.operatorOptions
|
||||
this.options = _filter.options
|
||||
this.isEvent = _filter.isEvent
|
||||
this.type = _filter.type;
|
||||
this.key = _filter.key;
|
||||
this.label = _filter.label;
|
||||
this.operatorOptions = _filter.operatorOptions;
|
||||
this.hasSource = _filter.hasSource;
|
||||
this.category = _filter.category;
|
||||
this.sourceOperatorOptions = _filter.sourceOperatorOptions;
|
||||
this.options = _filter.options;
|
||||
this.isEvent = _filter.isEvent;
|
||||
|
||||
this.value = json.value.length === 0 || !json.value ? [""] : json.value,
|
||||
this.operator = json.operator
|
||||
|
||||
this.filters = _filter.type === FilterType.SUB_FILTERS && json.filters ? json.filters.map(i => new FilterItem().fromJson(i, json.type)) : []
|
||||
(this.value = json.value.length === 0 || !json.value ? [''] : json.value), (this.operator = json.operator);
|
||||
this.source = json.source;
|
||||
this.sourceOperator = json.sourceOperator;
|
||||
|
||||
this.completed = json.completed
|
||||
this.dropped = json.dropped
|
||||
return this
|
||||
this.filters =
|
||||
_filter.type === FilterType.SUB_FILTERS && json.filters ? json.filters.map((i: any) => new FilterItem().fromJson(i, json.type)) : [];
|
||||
|
||||
this.completed = json.completed;
|
||||
this.dropped = json.dropped;
|
||||
return this;
|
||||
}
|
||||
|
||||
toJson() {
|
||||
toJson(): any {
|
||||
const isMetadata = this.category === FilterCategory.METADATA;
|
||||
const json = {
|
||||
type: this.key,
|
||||
type: isMetadata ? FilterKey.METADATA : this.key,
|
||||
isEvent: this.isEvent,
|
||||
value: this.value,
|
||||
operator: this.operator,
|
||||
source: this.source,
|
||||
filters: Array.isArray(this.filters) ? this.filters.map(i => i.toJson()) : [],
|
||||
}
|
||||
return json
|
||||
source: isMetadata ? this.key : this.source,
|
||||
sourceOperator: this.sourceOperator,
|
||||
filters: Array.isArray(this.filters) ? this.filters.map((i) => i.toJson()) : [],
|
||||
};
|
||||
return json;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ export default class UserStore {
|
|||
updateUser: action,
|
||||
updateKey: action,
|
||||
initUser: action,
|
||||
setLimits: action,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -30,7 +31,7 @@ export default class UserStore {
|
|||
return new Promise((resolve, reject) => {
|
||||
userService.getLimits()
|
||||
.then((response: any) => {
|
||||
this.limits = response;
|
||||
this.setLimits(response);
|
||||
resolve(response);
|
||||
}).catch((error: any) => {
|
||||
reject(error);
|
||||
|
|
@ -38,6 +39,10 @@ export default class UserStore {
|
|||
});
|
||||
}
|
||||
|
||||
setLimits(limits: any) {
|
||||
this.limits = limits;
|
||||
}
|
||||
|
||||
initUser(user?: any ): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (user) {
|
||||
|
|
@ -175,4 +180,4 @@ export default class UserStore {
|
|||
|
||||
return promise;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,11 +12,7 @@ COPY nginx.conf /usr/local/openresty${RESTY_DEB_FLAVOR}/nginx/conf/nginx.conf
|
|||
COPY default.conf /etc/nginx/conf.d/default.conf
|
||||
COPY compression.conf /etc/nginx/conf.d/compression.conf
|
||||
COPY location.list /etc/nginx/conf.d/location.list
|
||||
RUN chmod 0644 /usr/local/openresty${RESTY_DEB_FLAVOR}/nginx/conf/nginx.conf
|
||||
RUN chmod 0644 /usr/local/openresty${RESTY_DEB_FLAVOR}/nginx/conf/nginx.conf
|
||||
|
||||
RUN chown -R 1001 /var/run/openresty /usr/local/openresty
|
||||
|
||||
# copy nginx-selfsigned.crt /tmp/root.cert
|
||||
# RUN cat /tmp/root.cert >> /etc/ssl/certs/ca-certificates.crt
|
||||
|
||||
USER 1001
|
||||
|
|
|
|||
|
|
@ -122,14 +122,13 @@ healthCheck:
|
|||
timeoutSeconds: 10
|
||||
|
||||
|
||||
persistence:
|
||||
# Spec of spec.template.spec.containers[*].volumeMounts
|
||||
mounts:
|
||||
- mountPath: /mnt/efs
|
||||
name: datadir
|
||||
# Spec of spec.template.spec.volumes
|
||||
volumes:
|
||||
- hostPath:
|
||||
path: /openreplay/storage/nfs
|
||||
type: DirectoryOrCreate
|
||||
name: datadir
|
||||
persistence: {}
|
||||
# # Spec of spec.template.spec.containers[*].volumeMounts
|
||||
# mounts:
|
||||
# - name: kafka-ssl
|
||||
# mountPath: /opt/kafka/ssl
|
||||
# # Spec of spec.template.spec.volumes
|
||||
# volumes:
|
||||
# - name: kafka-ssl
|
||||
# secret:
|
||||
# secretName: kafka-ssl
|
||||
|
|
|
|||
|
|
@ -230,7 +230,7 @@ export default class Assist {
|
|||
endAgentCall(id)
|
||||
})
|
||||
|
||||
socket.on('_agent_name', (id, name) => {
|
||||
socket.on('_agent_name', (id, name) => {
|
||||
callingAgents.set(id, name)
|
||||
updateCallerNames()
|
||||
})
|
||||
|
|
@ -285,7 +285,7 @@ export default class Assist {
|
|||
if (callConfirmAnswer) { // Already asking
|
||||
return callConfirmAnswer
|
||||
}
|
||||
callConfirmWindow = new ConfirmWindow(callConfirmDefault(this.options.callConfirm || {
|
||||
callConfirmWindow = new ConfirmWindow(callConfirmDefault(this.options.callConfirm || {
|
||||
text: this.options.confirmText,
|
||||
style: this.options.confirmStyle,
|
||||
})) // TODO: reuse ?
|
||||
|
|
@ -345,7 +345,7 @@ export default class Assist {
|
|||
// Request local stream for the new connection
|
||||
try {
|
||||
// lStreams are reusable so fare we don't delete them in the `endAgentCall`
|
||||
if (!lStreams[call.peer]) {
|
||||
if (!lStreams[call.peer]) {
|
||||
app.debug.log('starting new stream for', call.peer)
|
||||
lStreams[call.peer] = await RequestLocalStream()
|
||||
}
|
||||
|
|
@ -367,8 +367,8 @@ export default class Assist {
|
|||
annot.mount()
|
||||
}
|
||||
// have to be updated
|
||||
callUI.setLocalStreams(Object.values(lStreams))
|
||||
|
||||
callUI.setLocalStreams(Object.values(lStreams))
|
||||
|
||||
call.on('error', e => {
|
||||
app.debug.warn('Call error:', e)
|
||||
initiateCallEnd()
|
||||
|
|
|
|||
|
|
@ -174,6 +174,13 @@ const getValue = function (obj, key) {
|
|||
return undefined;
|
||||
}
|
||||
const sortPaginate = function (list, filters) {
|
||||
if (typeof (list) === "object" && !Array.isArray(list)) {
|
||||
for (const [key, value] of Object.entries(list)) {
|
||||
list[key] = sortPaginate(value, filters);
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
const total = list.length;
|
||||
list.sort((a, b) => {
|
||||
const tA = getValue(a, "timestamp");
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue