diff --git a/.github/workflows/frontend-ee.yaml b/.github/workflows/frontend-ee.yaml
deleted file mode 100644
index f4de13db1..000000000
--- a/.github/workflows/frontend-ee.yaml
+++ /dev/null
@@ -1,51 +0,0 @@
-name: S3 Deploy EE
-on:
- push:
- branches:
- - dev
- paths:
- - ee/frontend/**
-
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
-
- - name: Cache node modules
- uses: actions/cache@v1
- with:
- path: node_modules
- key: ${{ runner.OS }}-build-${{ hashFiles('**/package-lock.json') }}
- restore-keys: |
- ${{ runner.OS }}-build-
- ${{ runner.OS }}-
-
- - uses: azure/k8s-set-context@v1
- with:
- method: kubeconfig
- kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
- id: setcontext
- - name: Install
- run: npm install
-
- - name: Build and deploy
- run: |
- cd frontend
- bash build.sh
- cp -arl public frontend
- minio_pod=$(kubectl get po -n db -l app.kubernetes.io/name=minio -n db --output custom-columns=name:.metadata.name | tail -n+2)
- echo $minio_pod
- echo copying frontend to container.
- kubectl -n db cp frontend $minio_pod:/data/
- rm -rf frontend
-
- # - name: Debug Job
- # if: ${{ failure() }}
- # uses: mxschmitt/action-tmate@v3
- # env:
- # AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- # AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- # AWS_REGION: eu-central-1
- # AWS_S3_BUCKET_NAME: ${{ secrets.AWS_S3_BUCKET_NAME }}
diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml
index 84af48e6a..8c5038f5f 100644
--- a/.github/workflows/frontend.yaml
+++ b/.github/workflows/frontend.yaml
@@ -1,4 +1,4 @@
-name: S3 Deploy
+name: Frontend FOSS Deployment
on:
push:
branches:
@@ -27,8 +27,8 @@ jobs:
method: kubeconfig
kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
- - name: Install
- run: npm install
+# - name: Install
+# run: npm install
- name: Build and deploy
run: |
diff --git a/api/.env.default b/api/.env.default
index 8f4ddc01c..6ae959a7d 100644
--- a/api/.env.default
+++ b/api/.env.default
@@ -28,7 +28,8 @@ jwt_algorithm=HS512
jwt_exp_delta_seconds=2592000
jwt_issuer=openreplay-default-foss
jwt_secret="SET A RANDOM STRING HERE"
-peers=http://utilities-openreplay.app.svc.cluster.local:9000/assist/%s/peers
+peersList=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-list
+peers=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-live
pg_dbname=postgres
pg_host=postgresql.db.svc.cluster.local
pg_password=asayerPostgres
diff --git a/api/chalicelib/core/alerts.py b/api/chalicelib/core/alerts.py
index 6fe799c19..e5316ba06 100644
--- a/api/chalicelib/core/alerts.py
+++ b/api/chalicelib/core/alerts.py
@@ -18,7 +18,7 @@ def get(id):
{"id": id})
)
a = helper.dict_to_camel_case(cur.fetchone())
- return __process_circular(a)
+ return helper.custom_alert_to_front(__process_circular(a))
def get_all(project_id):
@@ -31,8 +31,8 @@ def get_all(project_id):
{"project_id": project_id})
cur.execute(query=query)
all = helper.list_to_camel_case(cur.fetchall())
- for a in all:
- a = __process_circular(a)
+ for i in range(len(all)):
+ all[i] = helper.custom_alert_to_front(__process_circular(all[i]))
return all
@@ -58,7 +58,7 @@ def create(project_id, data: schemas.AlertSchema):
{"project_id": project_id, **data})
)
a = helper.dict_to_camel_case(cur.fetchone())
- return {"data": helper.dict_to_camel_case(__process_circular(a))}
+ return {"data": helper.custom_alert_to_front(helper.dict_to_camel_case(__process_circular(a)))}
def update(id, data: schemas.AlertSchema):
@@ -81,7 +81,7 @@ def update(id, data: schemas.AlertSchema):
{"id": id, **data})
cur.execute(query=query)
a = helper.dict_to_camel_case(cur.fetchone())
- return {"data": __process_circular(a)}
+ return {"data": helper.custom_alert_to_front(__process_circular(a))}
def process_notifications(data):
@@ -166,5 +166,5 @@ def get_predefined_values():
"unit": "count" if v.endswith(".count") else "ms",
"predefined": True,
"metricId": None,
- "seriesId": None} for v in values]
+ "seriesId": None} for v in values if v != schemas.AlertColumn.custom]
return values
diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py
index cd76d0be4..aee8a97f5 100644
--- a/api/chalicelib/core/assist.py
+++ b/api/chalicelib/core/assist.py
@@ -1,10 +1,8 @@
-import schemas
-from chalicelib.utils import pg_client, helper
-from chalicelib.core import projects, sessions, sessions_metas
import requests
from decouple import config
-from chalicelib.core import projects, sessions, sessions_metas
+import schemas
+from chalicelib.core import projects, sessions
from chalicelib.utils import pg_client, helper
SESSION_PROJECTION_COLS = """s.project_id,
@@ -66,10 +64,33 @@ def get_live_sessions(project_id, filters=None):
return helper.list_to_camel_case(results)
+def get_live_sessions_ws(project_id):
+ project_key = projects.get_project_key(project_id)
+ connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}")
+ if connected_peers.status_code != 200:
+ print("!! issue with the peer-server")
+ print(connected_peers.text)
+ return []
+ live_peers = connected_peers.json().get("data", [])
+ for s in live_peers:
+ s["live"] = True
+ s["projectId"] = project_id
+ live_peers = sorted(live_peers, key=lambda l: l.get("timestamp", 0), reverse=True)
+ return live_peers
+
+
+def get_live_session_by_id(project_id, session_id):
+ all_live = get_live_sessions_ws(project_id)
+ for l in all_live:
+ if str(l.get("sessionID")) == str(session_id):
+ return l
+ return None
+
+
def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
- connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}")
+ connected_peers = requests.get(config("peersList") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py
index 10e86024c..eb918b222 100644
--- a/api/chalicelib/core/custom_metrics.py
+++ b/api/chalicelib/core/custom_metrics.py
@@ -8,7 +8,7 @@ from chalicelib.utils.TimeUTC import TimeUTC
def try_live(project_id, data: schemas.TryCustomMetricsSchema):
results = []
- for s in data.series:
+ for i, s in enumerate(data.series):
s.filter.startDate = data.startDate
s.filter.endDate = data.endDate
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
@@ -21,16 +21,53 @@ def try_live(project_id, data: schemas.TryCustomMetricsSchema):
r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.viewType)
r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"])
+ r["seriesName"] = s.name if s.name else i + 1
+ r["seriesId"] = s.series_id if s.series_id else None
results[-1] = r
return results
+def merged_live(project_id, data: schemas.TryCustomMetricsSchema):
+ series_charts = try_live(project_id=project_id, data=data)
+ if data.viewType == schemas.MetricViewType.progress:
+ return series_charts
+ results = [{}] * len(series_charts[0])
+ for i in range(len(results)):
+ for j, series_chart in enumerate(series_charts):
+ results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
+ data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]}
+ return results
+
+
def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
if metric is None:
return None
metric: schemas.TryCustomMetricsSchema = schemas.TryCustomMetricsSchema.parse_obj({**data.dict(), **metric})
- return try_live(project_id=project_id, data=metric)
+ series_charts = try_live(project_id=project_id, data=metric)
+ if data.viewType == schemas.MetricViewType.progress:
+ return series_charts
+ results = [{}] * len(series_charts[0])
+ for i in range(len(results)):
+ for j, series_chart in enumerate(series_charts):
+ results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"],
+ metric.series[j].name: series_chart[i]["count"]}
+ return results
+
+
+def get_sessions(project_id, user_id, metric_id, data: schemas.CustomMetricRawPayloadSchema):
+ metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False)
+ if metric is None:
+ return None
+ metric: schemas.TryCustomMetricsSchema = schemas.TryCustomMetricsSchema.parse_obj({**data.dict(), **metric})
+ results = []
+ for s in metric.series:
+ s.filter.startDate = data.startDate
+ s.filter.endDate = data.endDate
+ results.append({"seriesId": s.series_id, "seriesName": s.name,
+ **sessions.search2_pg(data=s.filter, project_id=project_id, user_id=user_id)})
+
+ return results
def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema):
@@ -89,6 +126,7 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
if s.series_id is None:
n_series.append({"i": i, "s": s})
prefix = "n_"
+ s.index = i
else:
u_series.append({"i": i, "s": s})
u_series_ids.append(s.series_id)
@@ -230,3 +268,16 @@ def get_series_for_alert(project_id, user_id):
)
rows = cur.fetchall()
return helper.list_to_camel_case(rows)
+
+
+def change_state(project_id, metric_id, user_id, status):
+ with pg_client.PostgresClient() as cur:
+ cur.execute(
+ cur.mogrify("""\
+ UPDATE public.metrics
+ SET active = %(status)s
+ WHERE metric_id = %(metric_id)s
+ AND (user_id = %(user_id)s OR is_public);""",
+ {"metric_id": metric_id, "status": status, "user_id": user_id})
+ )
+ return get(metric_id=metric_id, project_id=project_id, user_id=user_id)
diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py
index 0a330d625..35baea430 100644
--- a/api/chalicelib/core/events.py
+++ b/api/chalicelib/core/events.py
@@ -245,7 +245,7 @@ class event_type:
STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name")
ERROR = Event(ui_type=schemas.EventType.error, table="events.errors",
column=None) # column=None because errors are searched by name or message
- METADATA = Event(ui_type=schemas.EventType.metadata, table="public.sessions", column=None)
+ METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None)
# IOS
CLICK_IOS = Event(ui_type=schemas.EventType.click_ios, table="events_ios.clicks", column="label")
INPUT_IOS = Event(ui_type=schemas.EventType.input_ios, table="events_ios.inputs", column="label")
diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py
index c33bed586..6e4553134 100644
--- a/api/chalicelib/core/funnels.py
+++ b/api/chalicelib/core/funnels.py
@@ -177,7 +177,8 @@ def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_dat
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
- insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
+ if len(insights) > 0:
+ insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
return {"data": {"stages": helper.list_to_camel_case(insights),
"totalDropDueToIssues": total_drop_due_to_issues}}
diff --git a/api/chalicelib/core/log_tool_elasticsearch.py b/api/chalicelib/core/log_tool_elasticsearch.py
index 5fdfccf24..f82dd57c1 100644
--- a/api/chalicelib/core/log_tool_elasticsearch.py
+++ b/api/chalicelib/core/log_tool_elasticsearch.py
@@ -53,11 +53,11 @@ def add_edit(tenant_id, project_id, data):
else:
return add(tenant_id=tenant_id,
project_id=project_id,
- host=data["host"], api_key=data["apiKeyId"], api_key_id=data["apiKey"], indexes=data["indexes"],
+ host=data["host"], api_key=data["apiKey"], api_key_id=data["apiKeyId"], indexes=data["indexes"],
port=data["port"])
-def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=29):
+def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15):
host = host.replace("http://", "").replace("https://", "")
try:
args = {
diff --git a/api/chalicelib/core/saved_search.py b/api/chalicelib/core/saved_search.py
index 732fc1596..d1e8fe15f 100644
--- a/api/chalicelib/core/saved_search.py
+++ b/api/chalicelib/core/saved_search.py
@@ -77,6 +77,8 @@ def get_all(project_id, user_id, details=False):
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
if details:
+ if isinstance(row["filter"], list) and len(row["filter"]) == 0:
+ row["filter"] = {}
row["filter"] = helper.old_search_payload_to_flat(row["filter"])
return rows
diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py
index 8eb58a29d..164cf366e 100644
--- a/api/chalicelib/core/sessions.py
+++ b/api/chalicelib/core/sessions.py
@@ -7,7 +7,7 @@ SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
-s.user_agent,
+-- s.user_agent,
s.user_os,
s.user_browser,
s.user_device,
@@ -101,7 +101,8 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
project_key=data["projectKey"])
return data
- return None
+ else:
+ return assist.get_live_session_by_id(project_id=project_id, session_id=session_id)
def __get_sql_operator(op: schemas.SearchEventOperator):
@@ -150,9 +151,10 @@ def _multiple_conditions(condition, values, value_key="value", is_not=False):
def _multiple_values(values, value_key="value"):
query_values = {}
- for i in range(len(values)):
- k = f"{value_key}_{i}"
- query_values[k] = values[i]
+ if values is not None and isinstance(values, list):
+ for i in range(len(values)):
+ k = f"{value_key}_{i}"
+ query_values[k] = values[i]
return query_values
@@ -183,10 +185,24 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
main_query = cur.mogrify(f"""SELECT COUNT(DISTINCT s.session_id) AS count_sessions,
COUNT(DISTINCT s.user_uuid) AS count_users
{query_part};""", full_args)
+ elif data.group_by_user:
+ main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, jsonb_agg(users_sessions) FILTER ( WHERE rn <= 200 ) AS sessions
+ FROM (SELECT user_id,
+ count(full_sessions) AS user_sessions_count,
+ jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session,
+ ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn
+ FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY start_ts DESC) AS rn
+ FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
+ {query_part}
+ ORDER BY s.session_id desc) AS filtred_sessions
+ ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions
+ GROUP BY user_id
+ ORDER BY user_sessions_count DESC) AS users_sessions;""",
+ full_args)
else:
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn <= 200), '[]'::JSONB) AS sessions
- FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY favorite DESC, issue_score DESC, session_id desc, start_ts desc) AS rn FROM
- (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
+ FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY favorite DESC, issue_score DESC, session_id desc, start_ts desc) AS rn
+ FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
{query_part}
ORDER BY s.session_id desc) AS filtred_sessions
ORDER BY favorite DESC, issue_score DESC, {sort} {data.order}) AS full_sessions;""",
@@ -221,7 +237,7 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
if errors_only:
return sessions
- if data.sort is not None and data.sort != "session_id":
+ if not data.group_by_user and data.sort is not None and data.sort != "session_id":
sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)],
reverse=data.order.upper() == "DESC")
return {
@@ -233,8 +249,8 @@ def search2_pg(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, f
@dev.timed
def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int,
view_type: schemas.MetricViewType):
- step_size = metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate,
- density=density, factor=1)
+ step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate,
+ density=density, factor=1, decimal=True))
full_args, query_part, sort = search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=project_id,
user_id=None)
@@ -249,7 +265,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d
LEFT JOIN LATERAL ( SELECT 1 AS s
FROM full_sessions
WHERE start_ts >= generated_timestamp
- AND start_ts < generated_timestamp + %(step_size)s) AS sessions ON (TRUE)
+ AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;""", full_args)
else:
@@ -287,47 +303,57 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
for i, f in enumerate(data.filters):
if not isinstance(f.value, list):
f.value = [f.value]
- if len(f.value) == 0 or f.value[0] is None:
- continue
filter_type = f.type
- # f.value = __get_sql_value_multiple(f.value)
f.value = helper.values_for_operator(value=f.value, op=f.operator)
f_k = f"f_value{i}"
full_args = {**full_args, **_multiple_values(f.value, value_key=f_k)}
op = __get_sql_operator(f.operator) \
if filter_type not in [schemas.FilterType.events_count] else f.operator
is_any = _isAny_opreator(f.operator)
+ if not is_any and len(f.value) == 0:
+ continue
is_not = False
if __is_negation_operator(f.operator):
is_not = True
- # op = __reverse_sql_operator(op)
if filter_type == schemas.FilterType.user_browser:
- # op = __get_sql_operator_multiple(f.operator)
- extra_constraints.append(
- _multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
- ss_constraints.append(
- _multiple_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
+ if is_any:
+ extra_constraints.append('s.user_browser IS NOT NULL')
+ ss_constraints.append('ms.user_browser IS NOT NULL')
+ else:
+ extra_constraints.append(
+ _multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
+ ss_constraints.append(
+ _multiple_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
- # op = __get_sql_operator_multiple(f.operator)
- extra_constraints.append(
- _multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
- ss_constraints.append(
- _multiple_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
+ if is_any:
+ extra_constraints.append('s.user_os IS NOT NULL')
+ ss_constraints.append('ms.user_os IS NOT NULL')
+ else:
+ extra_constraints.append(
+ _multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
+ ss_constraints.append(
+ _multiple_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
- # op = __get_sql_operator_multiple(f.operator)
- extra_constraints.append(
- _multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
- ss_constraints.append(
- _multiple_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
+ if is_any:
+ extra_constraints.append('s.user_device IS NOT NULL')
+ ss_constraints.append('ms.user_device IS NOT NULL')
+ else:
+ extra_constraints.append(
+ _multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
+ ss_constraints.append(
+ _multiple_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
- # op = __get_sql_operator_multiple(f.operator)
- extra_constraints.append(
- _multiple_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
- ss_constraints.append(
- _multiple_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
+ if is_any:
+ extra_constraints.append('s.user_country IS NOT NULL')
+ ss_constraints.append('ms.user_country IS NOT NULL')
+ else:
+ extra_constraints.append(
+ _multiple_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
+ ss_constraints.append(
+ _multiple_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.utm_source]:
if is_any:
@@ -335,9 +361,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_source IS NOT NULL')
else:
extra_constraints.append(
- _multiple_conditions(f's.utm_source {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
+ _multiple_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
+ value_key=f_k))
ss_constraints.append(
- _multiple_conditions(f'ms.utm_source {op} %({f_k})s', f.value, is_not=is_not,
+ _multiple_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_medium]:
if is_any:
@@ -345,9 +372,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_medium IS NOT NULL')
else:
extra_constraints.append(
- _multiple_conditions(f's.utm_medium {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k))
+ _multiple_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
+ value_key=f_k))
ss_constraints.append(
- _multiple_conditions(f'ms.utm_medium {op} %({f_k})s', f.value, is_not=is_not,
+ _multiple_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type in [schemas.FilterType.utm_campaign]:
if is_any:
@@ -355,10 +383,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append('ms.utm_campaign IS NOT NULL')
else:
extra_constraints.append(
- _multiple_conditions(f's.utm_campaign {op} %({f_k})s', f.value, is_not=is_not,
+ _multiple_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
ss_constraints.append(
- _multiple_conditions(f'ms.utm_campaign {op} %({f_k})s', f.value, is_not=is_not,
+ _multiple_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.duration:
@@ -371,45 +399,60 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
ss_constraints.append("ms.duration <= %(maxDuration)s")
full_args["maxDuration"] = f.value[1]
elif filter_type == schemas.FilterType.referrer:
- # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
- # op = __get_sql_operator_multiple(f.operator)
- extra_constraints.append(
- _multiple_conditions(f"p.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
+ if is_any:
+ extra_constraints.append('p.base_referrer IS NOT NULL')
+ else:
+ extra_constraints.append(
+ _multiple_conditions(f"p.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
elif filter_type == events.event_type.METADATA.ui_type:
# get metadata list only if you need it
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
- # op = __get_sql_operator(f.operator)
- if f.key in meta_keys.keys():
- extra_constraints.append(
- _multiple_conditions(f"s.{metadata.index_to_colname(meta_keys[f.key])} {op} %({f_k})s",
- f.value, is_not=is_not, value_key=f_k))
- ss_constraints.append(
- _multiple_conditions(f"ms.{metadata.index_to_colname(meta_keys[f.key])} {op} %({f_k})s",
- f.value, is_not=is_not, value_key=f_k))
+ if f.source in meta_keys.keys():
+ if is_any:
+ extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL")
+ ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL")
+ else:
+ extra_constraints.append(
+ _multiple_conditions(
+ f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
+ f.value, is_not=is_not, value_key=f_k))
+ ss_constraints.append(
+ _multiple_conditions(
+ f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text",
+ f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
- # op = __get_sql_operator(f.operator)
- extra_constraints.append(
- _multiple_conditions(f"s.user_id {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
- ss_constraints.append(
- _multiple_conditions(f"ms.user_id {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
+ if is_any:
+ extra_constraints.append('s.user_id IS NOT NULL')
+ ss_constraints.append('ms.user_id IS NOT NULL')
+ else:
+ extra_constraints.append(
+ _multiple_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
+ ss_constraints.append(
+ _multiple_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
- # op = __get_sql_operator(f.operator)
- extra_constraints.append(
- _multiple_conditions(f"s.user_anonymous_id {op} %({f_k})s", f.value, is_not=is_not,
- value_key=f_k))
- ss_constraints.append(
- _multiple_conditions(f"ms.user_anonymous_id {op} %({f_k})s", f.value, is_not=is_not,
- value_key=f_k))
+ if is_any:
+ extra_constraints.append('s.user_anonymous_id IS NOT NULL')
+ ss_constraints.append('ms.user_anonymous_id IS NOT NULL')
+ else:
+ extra_constraints.append(
+ _multiple_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
+ value_key=f_k))
+ ss_constraints.append(
+ _multiple_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not,
+ value_key=f_k))
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
- # op = __get_sql_operator(f.operator)
- extra_constraints.append(
- _multiple_conditions(f"s.rev_id {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
- ss_constraints.append(
- _multiple_conditions(f"ms.rev_id {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k))
+ if is_any:
+ extra_constraints.append('s.rev_id IS NOT NULL')
+ ss_constraints.append('ms.rev_id IS NOT NULL')
+ else:
+ extra_constraints.append(
+ _multiple_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
+ ss_constraints.append(
+ _multiple_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k))
elif filter_type == schemas.FilterType.platform:
# op = __get_sql_operator(f.operator)
extra_constraints.append(
@@ -419,12 +462,16 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
_multiple_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not,
value_key=f_k))
elif filter_type == schemas.FilterType.issue:
- extra_constraints.append(
- _multiple_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
- value_key=f_k))
- ss_constraints.append(
- _multiple_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
- value_key=f_k))
+ if is_any:
+ extra_constraints.append("array_length(s.issue_types, 1) > 0")
+ ss_constraints.append("array_length(ms.issue_types, 1) > 0")
+ else:
+ extra_constraints.append(
+ _multiple_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not,
+ value_key=f_k))
+ ss_constraints.append(
+ _multiple_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not,
+ value_key=f_k))
elif filter_type == schemas.FilterType.events_count:
extra_constraints.append(
_multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not,
@@ -445,6 +492,14 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
is_any = _isAny_opreator(event.operator)
if not isinstance(event.value, list):
event.value = [event.value]
+ if not is_any and len(event.value) == 0 \
+ or event_type in [schemas.PerformanceEventType.location_dom_complete,
+ schemas.PerformanceEventType.location_largest_contentful_paint_time,
+ schemas.PerformanceEventType.location_ttfb,
+ schemas.PerformanceEventType.location_avg_cpu_load,
+ schemas.PerformanceEventType.location_avg_memory_usage
+ ] and (event.source is None or len(event.source) == 0):
+ continue
op = __get_sql_operator(event.operator)
is_not = False
if __is_negation_operator(event.operator):
@@ -462,9 +517,12 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
if data.events_order == schemas.SearchEventOrder._then:
event_where.append(f"event_{event_index - 1}.timestamp <= main.timestamp")
e_k = f"e_value{i}"
+ s_k = e_k + "_source"
if event.type != schemas.PerformanceEventType.time_between_events:
event.value = helper.values_for_operator(value=event.value, op=event.operator)
- full_args = {**full_args, **_multiple_values(event.value, value_key=e_k)}
+ full_args = {**full_args,
+ **_multiple_values(event.value, value_key=e_k),
+ **_multiple_values(event.source, value_key=s_k)}
# if event_type not in list(events.SUPPORTED_TYPES.keys()) \
# or event.value in [None, "", "*"] \
@@ -484,10 +542,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
event_where.append(
_multiple_conditions(f"main.{events.event_type.INPUT.column} {op} %({e_k})s", event.value,
value_key=e_k))
- if event.custom is not None and len(event.custom) > 0:
- event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.custom,
+ if event.source is not None and len(event.source) > 0:
+ event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key=f"custom{i}"))
- full_args = {**full_args, **_multiple_values(event.custom, value_key=f"custom{i}")}
+ full_args = {**full_args, **_multiple_values(event.source, value_key=f"custom{i}")}
elif event_type == events.event_type.LOCATION.ui_type:
event_from = event_from % f"{events.event_type.LOCATION.table} AS main "
@@ -520,18 +578,15 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
_multiple_conditions(f"main.{events.event_type.STATEACTION.column} {op} %({e_k})s",
event.value, value_key=e_k))
elif event_type == events.event_type.ERROR.ui_type:
- # if event.source in [None, "*", ""]:
- # event.source = "js_exception"
event_from = event_from % f"{events.event_type.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
- if event.value not in [None, "*", ""]:
- if not is_any:
- event_where.append(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)")
- if event.source not in [None, "*", ""]:
- event_where.append(f"main1.source = %(source)s")
- full_args["source"] = event.source
- elif event.source not in [None, "*", ""]:
- event_where.append(f"main1.source = %(source)s")
- full_args["source"] = event.source
+ event.source = tuple(event.source)
+ if not is_any and event.value not in [None, "*", ""]:
+ event_where.append(
+ _multiple_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)",
+ event.value, value_key=e_k))
+ if event.source[0] not in [None, "*", ""]:
+ event_where.append(_multiple_conditions(f"main1.source = %({s_k})s", event.value, value_key=s_k))
+
# ----- IOS
elif event_type == events.event_type.CLICK_IOS.ui_type:
@@ -547,10 +602,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
event_where.append(
_multiple_conditions(f"main.{events.event_type.INPUT_IOS.column} {op} %({e_k})s",
event.value, value_key=e_k))
- if event.custom is not None and len(event.custom) > 0:
- event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.custom,
+ if event.source is not None and len(event.source) > 0:
+ event_where.append(_multiple_conditions(f"main.value ILIKE %(custom{i})s", event.source,
value_key="custom{i}"))
- full_args = {**full_args, **_multiple_values(event.custom, f"custom{i}")}
+ full_args = {**full_args, **_multiple_values(event.source, f"custom{i}")}
elif event_type == events.event_type.VIEW_IOS.ui_type:
event_from = event_from % f"{events.event_type.VIEW_IOS.table} AS main "
if not is_any:
@@ -594,10 +649,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
# colname = col["column"]
# tname = "main"
# e_k += "_custom"
- # full_args = {**full_args, **_multiple_values(event.custom, value_key=e_k)}
+ # full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
# event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
- # _multiple_conditions(f"{tname}.{colname} {event.customOperator} %({e_k})s",
- # event.custom, value_key=e_k))
+ # _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
+ # event.source, value_key=e_k))
elif event_type in [schemas.PerformanceEventType.location_dom_complete,
schemas.PerformanceEventType.location_largest_contentful_paint_time,
schemas.PerformanceEventType.location_ttfb,
@@ -618,11 +673,11 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
_multiple_conditions(f"main.{events.event_type.LOCATION.column} {op} %({e_k})s",
event.value, value_key=e_k))
e_k += "_custom"
- full_args = {**full_args, **_multiple_values(event.custom, value_key=e_k)}
+ full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " +
- _multiple_conditions(f"{tname}.{colname} {event.customOperator} %({e_k})s",
- event.custom, value_key=e_k))
+ _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s",
+ event.source, value_key=e_k))
elif event_type == schemas.PerformanceEventType.time_between_events:
event_from = event_from % f"{getattr(events.event_type, event.value[0].type).table} AS main INNER JOIN {getattr(events.event_type, event.value[1].type).table} AS main2 USING(session_id) "
if not isinstance(event.value[0].value, list):
@@ -653,10 +708,10 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
event.value[1].value, value_key=e_k2))
e_k += "_custom"
- full_args = {**full_args, **_multiple_values(event.custom, value_key=e_k)}
+ full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)}
event_where.append(
- _multiple_conditions(f"main2.timestamp - main.timestamp {event.customOperator} %({e_k})s",
- event.custom, value_key=e_k))
+ _multiple_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s",
+ event.source, value_key=e_k))
else:
@@ -678,7 +733,7 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
AND start_ts >= %(startDate)s
AND start_ts <= %(endDate)s
AND duration IS NOT NULL
- ) {"" if or_events else ("AS event_{event_index}" + ("ON(TRUE)" if event_index > 0 else ""))}\
+ ) {"" if or_events else (f"AS event_{event_index}" + ("ON(TRUE)" if event_index > 0 else ""))}\
""")
else:
events_query_from.append(f"""\
@@ -890,7 +945,7 @@ def get_favorite_sessions(project_id, user_id, include_viewed=False):
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
- s.user_agent,
+ -- s.user_agent,
s.user_os,
s.user_browser,
s.user_device,
@@ -927,7 +982,7 @@ def get_user_sessions(project_id, user_id, start_date, end_date):
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
- s.user_agent,
+ -- s.user_agent,
s.user_os,
s.user_browser,
s.user_device,
diff --git a/api/chalicelib/core/sessions_favorite_viewed.py b/api/chalicelib/core/sessions_favorite_viewed.py
index 1239156ab..7f503679c 100644
--- a/api/chalicelib/core/sessions_favorite_viewed.py
+++ b/api/chalicelib/core/sessions_favorite_viewed.py
@@ -1,5 +1,5 @@
-from chalicelib.utils import pg_client
from chalicelib.core import sessions
+from chalicelib.utils import pg_client
def add_favorite_session(project_id, user_id, session_id):
@@ -37,7 +37,8 @@ def add_viewed_session(project_id, user_id, session_id):
INSERT INTO public.user_viewed_sessions
(user_id, session_id)
VALUES
- (%(userId)s,%(sessionId)s);""",
+ (%(userId)s,%(sessionId)s)
+ ON CONFLICT DO NOTHING;""",
{"userId": user_id, "sessionId": session_id})
)
@@ -50,8 +51,6 @@ def favorite_session(project_id, user_id, session_id):
def view_session(project_id, user_id, session_id):
- if viewed_session_exists(user_id=user_id, session_id=session_id):
- return None
return add_viewed_session(project_id=project_id, user_id=user_id, session_id=session_id)
@@ -69,21 +68,3 @@ def favorite_session_exists(user_id, session_id):
)
r = cur.fetchone()
return r is not None
-
-
-def viewed_session_exists(user_id, session_id):
- with pg_client.PostgresClient() as cur:
- cur.execute(
- cur.mogrify(
- """SELECT
- session_id
- FROM public.user_viewed_sessions
- WHERE
- user_id = %(userId)s
- AND session_id = %(sessionId)s""",
- {"userId": user_id, "sessionId": session_id})
- )
- r = cur.fetchone()
- if r:
- return True
- return False
diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py
index f2261ce59..67b00c74a 100644
--- a/api/chalicelib/core/significance.py
+++ b/api/chalicelib/core/significance.py
@@ -31,7 +31,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
:param filter_d: dict contains events&filters&...
:return:
"""
- stages: [dict] = filter_d["events"]
+ stages: [dict] = filter_d.get("events", [])
filters: [dict] = filter_d.get("filters", [])
filter_issues = filter_d.get("issueTypes")
if filter_issues is None or len(filter_issues) == 0:
@@ -130,6 +130,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
if not isinstance(s["value"], list):
s["value"] = [s["value"]]
is_any = sessions._isAny_opreator(s["operator"])
+ if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0:
+ continue
op = sessions.__get_sql_operator(s["operator"])
event_type = s["type"].upper()
if event_type == events.event_type.CLICK.ui_type:
@@ -581,7 +583,7 @@ def get_top_insights(filter_d, project_id):
@dev.timed
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None):
output = dict({'critical_issues_count': 0})
- stages = filter_d["events"]
+ stages = filter_d.get("events", [])
# The result of the multi-stage query
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
# print(json.dumps(rows[0],indent=4))
diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py
index 1461c6e14..b4ac0f869 100644
--- a/api/chalicelib/core/users.py
+++ b/api/chalicelib/core/users.py
@@ -315,6 +315,11 @@ def edit(user_id_to_update, tenant_id, changes, editor_id):
return {"data": user}
+def edit_appearance(user_id, tenant_id, changes):
+ updated_user = update(tenant_id=tenant_id, user_id=user_id, changes=changes)
+ return {"data": updated_user}
+
+
def get_by_email_only(email):
with pg_client.PostgresClient() as cur:
cur.execute(
diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py
index 6887fa5da..8e1f5788c 100644
--- a/api/chalicelib/utils/helper.py
+++ b/api/chalicelib/utils/helper.py
@@ -216,7 +216,7 @@ def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator
return value + '%'
elif op == schemas.SearchEventOperator._ends_with:
return '%' + value
- elif op == schemas.SearchEventOperator._contains:
+ elif op == schemas.SearchEventOperator._contains or op == schemas.SearchEventOperator._not_contains:
return '%' + value + '%'
return value
@@ -377,3 +377,10 @@ def old_search_payload_to_flat(values):
v["isEvent"] = False
values["filters"] = values.pop("events") + values.get("filters", [])
return values
+
+
+def custom_alert_to_front(values):
+ # to support frontend format for payload
+ if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.custom:
+ values["query"]["left"] = values["seriesId"]
+ return values
diff --git a/api/routers/core.py b/api/routers/core.py
index bbeb30bcd..df651362d 100644
--- a/api/routers/core.py
+++ b/api/routers/core.py
@@ -34,8 +34,8 @@ def get_session2(projectId: int, sessionId: int, context: schemas.CurrentContext
include_fav_viewed=True, group_metadata=True)
if data is None:
return {"errors": ["session not found"]}
-
- sessions_favorite_viewed.view_session(project_id=projectId, user_id=context.user_id, session_id=sessionId)
+ if not data.get("live"):
+ sessions_favorite_viewed.view_session(project_id=projectId, user_id=context.user_id, session_id=sessionId)
return {
'data': data
}
@@ -99,10 +99,25 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem
@app.get('/{projectId}/events/search', tags=["events"])
-def events_search(projectId: int, q: str, type: Union[schemas.FilterType, schemas.EventType] = None, key: str = None,
+def events_search(projectId: int, q: str,
+ type: Union[schemas.FilterType, schemas.EventType, schemas.PerformanceEventType] = None,
+ key: str = None,
source: str = None, context: schemas.CurrentContext = Depends(OR_context)):
if len(q) == 0:
return {"data": []}
+ if isinstance(type, schemas.PerformanceEventType):
+ if type in [schemas.PerformanceEventType.location_dom_complete,
+ schemas.PerformanceEventType.location_largest_contentful_paint_time,
+ schemas.PerformanceEventType.location_ttfb,
+ schemas.PerformanceEventType.location_avg_cpu_load,
+ schemas.PerformanceEventType.location_avg_memory_usage
+ ]:
+ type = schemas.EventType.location
+ elif type in [schemas.PerformanceEventType.fetch_failed]:
+ type = schemas.EventType.request
+ else:
+ return {"data": []}
+
result = events.search_pg2(text=q, event_type=type, project_id=projectId, source=source, key=key)
return result
@@ -757,7 +772,7 @@ def get_funnel_issue_sessions(projectId: int, funnelId: int, issueId: str,
@app.get('/{projectId}/funnels/{funnelId}', tags=["funnels"])
def get_funnel(projectId: int, funnelId: int, context: schemas.CurrentContext = Depends(OR_context)):
- data = funnels.get(funnel_id=funnelId, project_id=projectId, user_id=context.user_id)
+ data = funnels.get(funnel_id=funnelId, project_id=projectId, user_id=context.user_id, flatten=False)
if data is None:
return {"errors": ["funnel not found"]}
return {"data": data}
@@ -815,14 +830,14 @@ def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
- data = assist.get_live_sessions(projectId)
+ data = assist.get_live_sessions_ws(projectId)
return {'data': data}
@app.post('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live_search(projectId: int, data: schemas.AssistSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
- data = assist.get_live_sessions(projectId, filters=data.filters)
+ data = assist.get_live_sessions_ws(projectId)
return {'data': data}
@@ -1054,6 +1069,13 @@ def edit_account(data: schemas.EditUserSchema = Body(...),
editor_id=context.user_id)
+@app.post('/account/appearance', tags=["account"])
+@app.put('/account/appearance', tags=["account"])
+def edit_account_appearance(data: schemas.EditUserAppearanceSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return users.edit_appearance(tenant_id=context.tenant_id, user_id=context.user_id, changes=data.dict())
+
+
@app.post('/account/password', tags=["account"])
@app.put('/account/password', tags=["account"])
def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
@@ -1067,7 +1089,15 @@ def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"])
def try_custom_metric(projectId: int, data: schemas.TryCustomMetricsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
- return {"data": custom_metrics.try_live(project_id=projectId, data=data)}
+ return {"data": custom_metrics.merged_live
+ (project_id=projectId, data=data)}
+
+
+@app.post('/{projectId}/custom_metrics/sessions', tags=["customMetrics"])
+def get_custom_metric_sessions(projectId: int, data: schemas.CustomMetricRawPayloadSchema2 = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=data.metric_id,
+ data=data)}
@app.post('/{projectId}/custom_metrics/chart', tags=["customMetrics"])
@@ -1095,6 +1125,13 @@ def get_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentCo
return {"data": custom_metrics.get(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
+@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"])
+def get_custom_metric_sessions(projectId: int, metric_id: int, data: schemas.CustomMetricRawPayloadSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return {"data": custom_metrics.get_sessions(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
+ data=data)}
+
+
@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"])
def get_custom_metric_chart(projectId: int, metric_id: int, data: schemas.CustomMetricChartPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
@@ -1110,6 +1147,16 @@ def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCus
"data": custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data)}
+@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
+@app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"])
+def update_custom_metric_state(projectId: int, metric_id: int,
+ data: schemas.UpdateCustomMetricsStatusSchema = Body(...),
+ context: schemas.CurrentContext = Depends(OR_context)):
+ return {
+ "data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id,
+ status=data.active)}
+
+
@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"])
def delete_custom_metric(projectId: int, metric_id: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)}
@@ -1124,7 +1171,7 @@ def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...)
@app.get('/{projectId}/saved_search', tags=["savedSearch"])
def get_saved_searches(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
- return {"data": saved_search.get_all(project_id=projectId, user_id=context.user_id)}
+ return {"data": saved_search.get_all(project_id=projectId, user_id=context.user_id, details=True)}
@app.get('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
@@ -1142,3 +1189,11 @@ def update_saved_search(projectId: int, search_id: int, data: schemas.SavedSearc
@app.delete('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
def delete_saved_search(projectId: int, search_id: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": saved_search.delete(project_id=projectId, user_id=context.user_id, search_id=search_id)}
+
+
+@public_app.get('/', tags=["health"])
+@public_app.post('/', tags=["health"])
+@public_app.put('/', tags=["health"])
+@public_app.delete('/', tags=["health"])
+def health_check():
+ return {"data": f"live {config('version_number', default='')}"}
diff --git a/api/schemas.py b/api/schemas.py
index d66b92444..56eb24cf7 100644
--- a/api/schemas.py
+++ b/api/schemas.py
@@ -36,6 +36,10 @@ class EditUserSchema(BaseModel):
appearance: Optional[dict] = Field({})
+class EditUserAppearanceSchema(BaseModel):
+ appearance: dict = Field(...)
+
+
class ForgetPasswordPayloadSchema(_Grecaptcha):
email: str = Field(...)
@@ -312,7 +316,7 @@ class MathOperator(str, Enum):
class _AlertQuerySchema(BaseModel):
- left: AlertColumn = Field(...)
+ left: Union[AlertColumn, int] = Field(...)
right: float = Field(...)
# operator: Literal["<", ">", "<=", ">="] = Field(...)
operator: MathOperator = Field(...)
@@ -331,6 +335,14 @@ class AlertSchema(BaseModel):
query: _AlertQuerySchema = Field(...)
series_id: Optional[int] = Field(None)
+ @root_validator(pre=True)
+ def transform_alert(cls, values):
+ if values.get("seriesId") is None and isinstance(values["query"]["left"], int):
+ values["seriesId"] = values["query"]["left"]
+ values["query"]["left"] = AlertColumn.custom
+
+ return values
+
@root_validator
def alert_validator(cls, values):
if values.get("query") is not None and values["query"].left == AlertColumn.custom:
@@ -371,7 +383,6 @@ class EventType(str, Enum):
graphql = "GRAPHQL"
state_action = "STATEACTION"
error = "ERROR"
- metadata = "METADATA"
click_ios = "CLICK_IOS"
input_ios = "INPUT_IOS"
view_ios = "VIEW_IOS"
@@ -461,37 +472,48 @@ class IssueType(str, Enum):
class __MixedSearchFilter(BaseModel):
is_event: bool = Field(...)
+ @root_validator(pre=True)
+ def remove_duplicate_values(cls, values):
+ if values.get("value") is not None:
+ if len(values["value"]) > 0 and isinstance(values["value"][0], int):
+ return values
+ values["value"] = list(set(values["value"]))
+ return values
+
class Config:
alias_generator = attribute_to_camel_case
class _SessionSearchEventRaw(__MixedSearchFilter):
- is_event: bool = Field(True, const=True)
- custom: Optional[List[Union[int, str]]] = Field(None, min_items=1)
- customOperator: Optional[MathOperator] = Field(None)
- key: Optional[str] = Field(None)
- value: Union[str, List[str]] = Field(...)
+ is_event: bool = Field(default=True, const=True)
+ value: List[str] = Field(...)
type: Union[EventType, PerformanceEventType] = Field(...)
operator: SearchEventOperator = Field(...)
- source: Optional[ErrorSource] = Field(default=ErrorSource.js_exception)
+ source: Optional[List[Union[ErrorSource, int, str]]] = Field(None)
+ sourceOperator: Optional[MathOperator] = Field(None)
@root_validator
def event_validator(cls, values):
if isinstance(values.get("type"), PerformanceEventType):
if values.get("type") == PerformanceEventType.fetch_failed:
return values
- assert values.get("custom") is not None, "custom should not be null for PerformanceEventType"
- assert values.get("customOperator") is not None \
- , "customOperator should not be null for PerformanceEventType"
+ # assert values.get("source") is not None, "source should not be null for PerformanceEventType"
+ # assert isinstance(values["source"], list) and len(values["source"]) > 0, \
+ # "source should not be empty for PerformanceEventType"
+ assert values.get("sourceOperator") is not None, \
+ "sourceOperator should not be null for PerformanceEventType"
if values["type"] == PerformanceEventType.time_between_events:
assert len(values.get("value", [])) == 2, \
f"must provide 2 Events as value for {PerformanceEventType.time_between_events}"
assert isinstance(values["value"][0], _SessionSearchEventRaw) \
- and isinstance(values["value"][1], _SessionSearchEventRaw) \
- , f"event should be of type _SessionSearchEventRaw for {PerformanceEventType.time_between_events}"
+ and isinstance(values["value"][1], _SessionSearchEventRaw), \
+ f"event should be of type _SessionSearchEventRaw for {PerformanceEventType.time_between_events}"
else:
- for c in values["custom"]:
- assert isinstance(c, int), f"custom value should be of type int for {values.get('type')}"
+ for c in values["source"]:
+ assert isinstance(c, int), f"source value should be of type int for {values.get('type')}"
+ elif values.get("type") == EventType.error and values.get("source") is None:
+ values["source"] = [ErrorSource.js_exception]
+
return values
@@ -501,17 +523,18 @@ class _SessionSearchEventSchema(_SessionSearchEventRaw):
class _SessionSearchFilterSchema(__MixedSearchFilter):
is_event: bool = Field(False, const=False)
- custom: Optional[List[str]] = Field(None)
- key: Optional[str] = Field(None)
value: Union[Optional[Union[IssueType, PlatformType, int, str]],
Optional[List[Union[IssueType, PlatformType, int, str]]]] = Field(...)
type: FilterType = Field(...)
operator: Union[SearchEventOperator, MathOperator] = Field(...)
- source: Optional[ErrorSource] = Field(default=ErrorSource.js_exception)
+ source: Optional[Union[ErrorSource, str]] = Field(default=ErrorSource.js_exception)
@root_validator
def filter_validator(cls, values):
- if values.get("type") == FilterType.issue:
+ if values.get("type") == FilterType.metadata:
+ assert values.get("source") is not None and len(values["source"]) > 0, \
+ "must specify a valid 'source' for metadata filter"
+ elif values.get("type") == FilterType.issue:
for v in values.get("value"):
assert isinstance(v, IssueType), f"value should be of type IssueType for {values.get('type')} filter"
elif values.get("type") == FilterType.platform:
@@ -532,14 +555,12 @@ class _SessionSearchFilterSchema(__MixedSearchFilter):
class SessionsSearchPayloadSchema(BaseModel):
events: List[_SessionSearchEventSchema] = Field([])
filters: List[_SessionSearchFilterSchema] = Field([])
- # custom:dict=Field(...)
- # rangeValue:str=Field(...)
startDate: int = Field(None)
endDate: int = Field(None)
- sort: str = Field(...)
+ sort: str = Field(default="startTs")
order: str = Field(default="DESC")
- # platform: Optional[PlatformType] = Field(None)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then)
+ group_by_user: bool = Field(default=False)
class Config:
alias_generator = attribute_to_camel_case
@@ -561,9 +582,10 @@ class FlatSessionsSearchPayloadSchema(SessionsSearchPayloadSchema):
n_filters = []
n_events = []
for v in values.get("filters", []):
- if v["isEvent"]:
+ if v.get("isEvent"):
n_events.append(v)
else:
+ v["isEvent"] = False
n_filters.append(v)
values["events"] = n_events
values["filters"] = n_filters
@@ -581,6 +603,14 @@ class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema):
range_value: Optional[str] = Field(None)
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
+ events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
+ group_by_user: Optional[bool] = Field(default=False, const=True)
+
+ @root_validator(pre=True)
+ def enforce_default_values(cls, values):
+ values["eventsOrder"] = SearchEventOrder._then
+ values["groupByUser"] = False
+ return values
class Config:
alias_generator = attribute_to_camel_case
@@ -605,6 +635,8 @@ class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema):
# class FunnelInsightsPayloadSchema(SessionsSearchPayloadSchema):
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
+ events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True)
+ group_by_user: Optional[bool] = Field(default=False, const=True)
class MetricPayloadSchema(BaseModel):
@@ -638,18 +670,23 @@ class CustomMetricSeriesFilterSchema(FlatSessionsSearchPayloadSchema):
endDate: Optional[int] = Field(None)
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
+ group_by_user: Optional[bool] = Field(default=False, const=True)
class CustomMetricCreateSeriesSchema(BaseModel):
+ series_id: Optional[int] = Field(None)
name: Optional[str] = Field(None)
index: Optional[int] = Field(None)
filter: Optional[CustomMetricSeriesFilterSchema] = Field([])
+ class Config:
+ alias_generator = attribute_to_camel_case
+
class CreateCustomMetricsSchema(BaseModel):
name: str = Field(...)
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
- is_public: Optional[bool] = Field(False)
+ is_public: Optional[bool] = Field(True)
class Config:
alias_generator = attribute_to_camel_case
@@ -660,15 +697,24 @@ class MetricViewType(str, Enum):
progress = "progress"
-class CustomMetricChartPayloadSchema(BaseModel):
+class CustomMetricRawPayloadSchema(BaseModel):
+ startDate: int = Field(TimeUTC.now(-7))
+ endDate: int = Field(TimeUTC.now())
+
+ class Config:
+ alias_generator = attribute_to_camel_case
+
+
+class CustomMetricRawPayloadSchema2(CustomMetricRawPayloadSchema):
+ metric_id: int = Field(...)
+
+
+class CustomMetricChartPayloadSchema(CustomMetricRawPayloadSchema):
startDate: int = Field(TimeUTC.now(-7))
endDate: int = Field(TimeUTC.now())
density: int = Field(7)
viewType: MetricViewType = Field(MetricViewType.line_chart)
- class Config:
- alias_generator = attribute_to_camel_case
-
class CustomMetricChartPayloadSchema2(CustomMetricChartPayloadSchema):
metric_id: int = Field(...)
@@ -689,5 +735,9 @@ class UpdateCustomMetricsSchema(CreateCustomMetricsSchema):
series: List[CustomMetricUpdateSeriesSchema] = Field(..., min_items=1)
+class UpdateCustomMetricsStatusSchema(BaseModel):
+ active: bool = Field(...)
+
+
class SavedSearchSchema(FunnelSchema):
- pass
+ filter: FlatSessionsSearchPayloadSchema = Field([])
diff --git a/backend/pkg/db/cache/messages_common.go b/backend/pkg/db/cache/messages_common.go
index dcf860835..65c8bf4e1 100644
--- a/backend/pkg/db/cache/messages_common.go
+++ b/backend/pkg/db/cache/messages_common.go
@@ -1,11 +1,11 @@
package cache
-import (
+import (
. "openreplay/backend/pkg/messages"
-// . "openreplay/backend/pkg/db/types"
+ // . "openreplay/backend/pkg/db/types"
)
-func (c *PGCache) insertSessionEnd(sessionID uint64, timestamp uint64 ) error {
+func (c *PGCache) insertSessionEnd(sessionID uint64, timestamp uint64) error {
//duration, err := c.Conn.InsertSessionEnd(sessionID, timestamp)
_, err := c.Conn.InsertSessionEnd(sessionID, timestamp)
if err != nil {
@@ -20,7 +20,6 @@ func (c *PGCache) insertSessionEnd(sessionID uint64, timestamp uint64 ) error {
return nil
}
-
func (c *PGCache) InsertIssueEvent(sessionID uint64, crash *IssueEvent) error {
session, err := c.GetSession(sessionID)
if err != nil {
@@ -29,7 +28,6 @@ func (c *PGCache) InsertIssueEvent(sessionID uint64, crash *IssueEvent) error {
return c.Conn.InsertIssueEvent(sessionID, session.ProjectID, crash)
}
-
func (c *PGCache) InsertUserID(sessionID uint64, userID *IOSUserID) error {
if err := c.Conn.InsertIOSUserID(sessionID, userID); err != nil {
return err
@@ -38,7 +36,7 @@ func (c *PGCache) InsertUserID(sessionID uint64, userID *IOSUserID) error {
if err != nil {
return err
}
- session.UserID = userID.Value
+ session.UserID = &userID.Value
return nil
}
@@ -69,11 +67,9 @@ func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error {
if keyNo == 0 {
// insert project metadata
}
-
if err := c.Conn.InsertMetadata(sessionID, keyNo, metadata.Value); err != nil {
return err
}
-
session.SetMetadata(keyNo, metadata.Value)
return nil
}
diff --git a/backend/pkg/db/cache/messages_web.go b/backend/pkg/db/cache/messages_web.go
index b259e49da..21b3ac866 100644
--- a/backend/pkg/db/cache/messages_web.go
+++ b/backend/pkg/db/cache/messages_web.go
@@ -1,42 +1,41 @@
package cache
-import (
+import (
"errors"
- . "openreplay/backend/pkg/messages"
. "openreplay/backend/pkg/db/types"
+ . "openreplay/backend/pkg/messages"
)
-
func (c *PGCache) InsertWebSessionStart(sessionID uint64, s *SessionStart) error {
- if c.sessions[ sessionID ] != nil {
+ if c.sessions[sessionID] != nil {
return errors.New("This session already in cache!")
}
- c.sessions[ sessionID ] = &Session{
- SessionID: sessionID,
- Platform: "web",
- Timestamp: s.Timestamp,
- ProjectID: uint32(s.ProjectID),
+ c.sessions[sessionID] = &Session{
+ SessionID: sessionID,
+ Platform: "web",
+ Timestamp: s.Timestamp,
+ ProjectID: uint32(s.ProjectID),
TrackerVersion: s.TrackerVersion,
- RevID: s.RevID,
- UserUUID: s.UserUUID,
- UserOS: s.UserOS,
- UserOSVersion: s.UserOSVersion,
- UserDevice: s.UserDevice,
- UserCountry: s.UserCountry,
+ RevID: s.RevID,
+ UserUUID: s.UserUUID,
+ UserOS: s.UserOS,
+ UserOSVersion: s.UserOSVersion,
+ UserDevice: s.UserDevice,
+ UserCountry: s.UserCountry,
// web properties (TODO: unite different platform types)
- UserAgent: s.UserAgent,
- UserBrowser: s.UserBrowser,
- UserBrowserVersion: s.UserBrowserVersion,
- UserDeviceType: s.UserDeviceType,
+ UserAgent: s.UserAgent,
+ UserBrowser: s.UserBrowser,
+ UserBrowserVersion: s.UserBrowserVersion,
+ UserDeviceType: s.UserDeviceType,
UserDeviceMemorySize: s.UserDeviceMemorySize,
- UserDeviceHeapSize: s.UserDeviceHeapSize,
- UserID: s.UserID,
+ UserDeviceHeapSize: s.UserDeviceHeapSize,
+ UserID: &s.UserID,
}
- if err := c.Conn.InsertSessionStart(sessionID, c.sessions[ sessionID ]); err != nil {
- c.sessions[ sessionID ] = nil
+ if err := c.Conn.InsertSessionStart(sessionID, c.sessions[sessionID]); err != nil {
+ c.sessions[sessionID] = nil
return err
}
- return nil;
+ return nil
}
func (c *PGCache) InsertWebSessionEnd(sessionID uint64, e *SessionEnd) error {
@@ -54,4 +53,3 @@ func (c *PGCache) InsertWebErrorEvent(sessionID uint64, e *ErrorEvent) error {
session.ErrorsCount += 1
return nil
}
-
diff --git a/backend/pkg/db/postgres/connector.go b/backend/pkg/db/postgres/connector.go
index 37e09f812..cfa8f28f8 100644
--- a/backend/pkg/db/postgres/connector.go
+++ b/backend/pkg/db/postgres/connector.go
@@ -3,11 +3,17 @@ package postgres
import (
"context"
"log"
+ "time"
"github.com/jackc/pgx/v4"
"github.com/jackc/pgx/v4/pgxpool"
)
+func getTimeoutContext() context.Context {
+ ctx, _ := context.WithTimeout(context.Background(), time.Duration(time.Second*10))
+ return ctx
+}
+
type Conn struct {
c *pgxpool.Pool // TODO: conditional usage of Pool/Conn (use interface?)
}
@@ -15,7 +21,8 @@ type Conn struct {
func NewConn(url string) *Conn {
c, err := pgxpool.Connect(context.Background(), url)
if err != nil {
- log.Fatalln(err)
+ log.Println(err)
+ log.Fatalln("pgxpool.Connect Error")
}
return &Conn{c}
}
@@ -26,15 +33,15 @@ func (conn *Conn) Close() error {
}
func (conn *Conn) query(sql string, args ...interface{}) (pgx.Rows, error) {
- return conn.c.Query(context.Background(), sql, args...)
+ return conn.c.Query(getTimeoutContext(), sql, args...)
}
func (conn *Conn) queryRow(sql string, args ...interface{}) pgx.Row {
- return conn.c.QueryRow(context.Background(), sql, args...)
+ return conn.c.QueryRow(getTimeoutContext(), sql, args...)
}
func (conn *Conn) exec(sql string, args ...interface{}) error {
- _, err := conn.c.Exec(context.Background(), sql, args...)
+ _, err := conn.c.Exec(getTimeoutContext(), sql, args...)
return err
}
diff --git a/backend/pkg/db/postgres/messages_web.go b/backend/pkg/db/postgres/messages_web.go
index 6e2045d99..f7354252b 100644
--- a/backend/pkg/db/postgres/messages_web.go
+++ b/backend/pkg/db/postgres/messages_web.go
@@ -1,11 +1,11 @@
package postgres
import (
- "math"
+ "math"
"openreplay/backend/pkg/hashid"
- "openreplay/backend/pkg/url"
. "openreplay/backend/pkg/messages"
+ "openreplay/backend/pkg/url"
)
// TODO: change messages and replace everywhere to e.Index
@@ -172,11 +172,12 @@ func (conn *Conn) InsertWebErrorEvent(sessionID uint64, projectID uint32, e *Err
}
defer tx.rollback()
errorID := hashid.WebErrorID(projectID, e)
+
if err = tx.exec(`
INSERT INTO errors
(error_id, project_id, source, name, message, payload)
VALUES
- ($1, $2, $3, $4, $5, $6)
+ ($1, $2, $3, $4, $5, $6::jsonb)
ON CONFLICT DO NOTHING`,
errorID, projectID, e.Source, e.Name, e.Message, e.Payload,
); err != nil {
diff --git a/backend/pkg/db/postgres/session.go b/backend/pkg/db/postgres/session.go
index c2e731088..7148d9871 100644
--- a/backend/pkg/db/postgres/session.go
+++ b/backend/pkg/db/postgres/session.go
@@ -1,11 +1,12 @@
package postgres
//import . "openreplay/backend/pkg/messages"
-import . "openreplay/backend/pkg/db/types"
+import . "openreplay/backend/pkg/db/types"
+
//import "log"
func (conn *Conn) GetSession(sessionID uint64) (*Session, error) {
- s := &Session{ SessionID: sessionID }
+ s := &Session{SessionID: sessionID}
var revID, userOSVersion *string
if err := conn.queryRow(`
SELECT platform,
@@ -21,13 +22,13 @@ func (conn *Conn) GetSession(sessionID uint64) (*Session, error) {
`,
sessionID,
).Scan(&s.Platform,
- &s.Duration, &s.ProjectID, &s.Timestamp,
- &s.UserUUID, &s.UserOS, &userOSVersion,
- &s.UserDevice, &s.UserDeviceType, &s.UserCountry,
- &revID, &s.TrackerVersion,
- &s.UserID, &s.UserAnonymousID,
- &s.Metadata1, &s.Metadata2, &s.Metadata3, &s.Metadata4, &s.Metadata5,
- &s.Metadata6, &s.Metadata7, &s.Metadata8, &s.Metadata9, &s.Metadata10); err != nil {
+ &s.Duration, &s.ProjectID, &s.Timestamp,
+ &s.UserUUID, &s.UserOS, &userOSVersion,
+ &s.UserDevice, &s.UserDeviceType, &s.UserCountry,
+ &revID, &s.TrackerVersion,
+ &s.UserID, &s.UserAnonymousID,
+ &s.Metadata1, &s.Metadata2, &s.Metadata3, &s.Metadata4, &s.Metadata5,
+ &s.Metadata6, &s.Metadata7, &s.Metadata8, &s.Metadata9, &s.Metadata10); err != nil {
return nil, err
}
if userOSVersion != nil { // TODO: choose format, make f
@@ -35,7 +36,7 @@ func (conn *Conn) GetSession(sessionID uint64) (*Session, error) {
}
if revID != nil {
s.RevID = *revID
- }
+ }
return s, nil
}
@@ -103,4 +104,4 @@ func (conn *Conn) GetSession(sessionID uint64) (*Session, error) {
// }
// }
// return list
-// }
\ No newline at end of file
+// }
diff --git a/backend/pkg/db/types/session.go b/backend/pkg/db/types/session.go
index d354b0cd2..92607514d 100644
--- a/backend/pkg/db/types/session.go
+++ b/backend/pkg/db/types/session.go
@@ -1,46 +1,47 @@
package types
type Session struct {
- SessionID uint64
- Timestamp uint64
- ProjectID uint32
+ SessionID uint64
+ Timestamp uint64
+ ProjectID uint32
TrackerVersion string
- RevID string
- UserUUID string
- UserOS string
- UserOSVersion string
- UserDevice string
- UserCountry string
+ RevID string
+ UserUUID string
+ UserOS string
+ UserOSVersion string
+ UserDevice string
+ UserCountry string
- Duration *uint64
- PagesCount int
- EventsCount int
- ErrorsCount int
- UserID string // pointer??
+ Duration *uint64
+ PagesCount int
+ EventsCount int
+ ErrorsCount int
+
+ UserID *string // pointer??
UserAnonymousID *string
- Metadata1 *string
- Metadata2 *string
- Metadata3 *string
- Metadata4 *string
- Metadata5 *string
- Metadata6 *string
- Metadata7 *string
- Metadata8 *string
- Metadata9 *string
- Metadata10 *string
+ Metadata1 *string
+ Metadata2 *string
+ Metadata3 *string
+ Metadata4 *string
+ Metadata5 *string
+ Metadata6 *string
+ Metadata7 *string
+ Metadata8 *string
+ Metadata9 *string
+ Metadata10 *string
Platform string
// Only-web properties
- UserAgent string
- UserBrowser string
- UserBrowserVersion string
- UserDeviceType string
+ UserAgent string
+ UserBrowser string
+ UserBrowserVersion string
+ UserDeviceType string
UserDeviceMemorySize uint64
- UserDeviceHeapSize uint64
+ UserDeviceHeapSize uint64
}
func (s *Session) SetMetadata(keyNo uint, value string) {
- switch (keyNo) {
+ switch keyNo {
case 1:
s.Metadata1 = &value
case 2:
@@ -62,4 +63,4 @@ func (s *Session) SetMetadata(keyNo uint, value string) {
case 10:
s.Metadata10 = &value
}
-}
\ No newline at end of file
+}
diff --git a/backend/pkg/env/aws.go b/backend/pkg/env/aws.go
index dd277f2b1..6573c8551 100644
--- a/backend/pkg/env/aws.go
+++ b/backend/pkg/env/aws.go
@@ -23,7 +23,8 @@ func AWSSessionOnRegion(region string) *_session.Session {
}
aws_session, err := _session.NewSession(config)
if err != nil {
- log.Fatalf("AWS session error: %v\n", err)
+ log.Printf("AWS session error: %v\n", err)
+ log.Fatal("AWS session error")
}
return aws_session
}
diff --git a/backend/pkg/intervals/intervals.go b/backend/pkg/intervals/intervals.go
index 5cc603ad6..0380f68f9 100644
--- a/backend/pkg/intervals/intervals.go
+++ b/backend/pkg/intervals/intervals.go
@@ -2,7 +2,7 @@ package intervals
const EVENTS_COMMIT_INTERVAL = 30 * 1000
const HEARTBEAT_INTERVAL = 2 * 60 * 1000
-const INTEGRATIONS_REQUEST_INTERVAL = 2 * 60 * 1000
+const INTEGRATIONS_REQUEST_INTERVAL = 1 * 60 * 1000
const EVENTS_PAGE_EVENT_TIMEOUT = 2 * 60 * 1000
const EVENTS_INPUT_EVENT_TIMEOUT = 2 * 60 * 1000
const EVENTS_PERFORMANCE_AGGREGATION_TIMEOUT = 2 * 60 * 1000
diff --git a/backend/services/db/main.go b/backend/services/db/main.go
index a14aa7648..5d2759c90 100644
--- a/backend/services/db/main.go
+++ b/backend/services/db/main.go
@@ -8,14 +8,14 @@ import (
"os/signal"
"syscall"
+ "openreplay/backend/pkg/db/cache"
+ "openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/env"
+ "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/queue/types"
- "openreplay/backend/pkg/messages"
- "openreplay/backend/pkg/db/postgres"
- "openreplay/backend/pkg/db/cache"
"openreplay/backend/services/db/heuristics"
-)
+)
var pg *cache.PGCache
@@ -23,62 +23,62 @@ func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
initStats()
- pg = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000 * 60 * 20)
+ pg = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000*60*20)
defer pg.Close()
heurFinder := heuristics.NewHandler()
consumer := queue.NewMessageConsumer(
env.String("GROUP_DB"),
- []string{
+ []string{
env.String("TOPIC_RAW_IOS"),
env.String("TOPIC_TRIGGER"),
- },
- func(sessionID uint64, msg messages.Message, _ *types.Meta) {
- if err := insertMessage(sessionID, msg); err != nil {
- if !postgres.IsPkeyViolation(err) {
- log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err,sessionID, msg)
- }
- return
- }
+ },
+ func(sessionID uint64, msg messages.Message, _ *types.Meta) {
+ if err := insertMessage(sessionID, msg); err != nil {
+ if !postgres.IsPkeyViolation(err) {
+ log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err, sessionID, msg)
+ }
+ return
+ }
- session, err := pg.GetSession(sessionID)
+ session, err := pg.GetSession(sessionID)
if err != nil {
// Might happen due to the assets-related message TODO: log only if session is necessary for this kind of message
log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, sessionID, msg)
- return;
+ return
}
- err = insertStats(session, msg)
- if err != nil {
- log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg)
- }
+ err = insertStats(session, msg)
+ if err != nil {
+ log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg)
+ }
heurFinder.HandleMessage(session, msg)
heurFinder.IterateSessionReadyMessages(sessionID, func(msg messages.Message) {
// TODO: DRY code (carefully with the return statement logic)
if err := insertMessage(sessionID, msg); err != nil {
- if !postgres.IsPkeyViolation(err) {
- log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg)
- }
- return
- }
+ if !postgres.IsPkeyViolation(err) {
+ log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg)
+ }
+ return
+ }
- err = insertStats(session, msg)
- if err != nil {
- log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg)
- }
+ err = insertStats(session, msg)
+ if err != nil {
+ log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg)
+ }
})
},
)
consumer.DisableAutoCommit()
sigchan := make(chan os.Signal, 1)
- signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
+ signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
- tick := time.Tick(15 * time.Second)
+ tick := time.Tick(15 * time.Second)
- log.Printf("Db service started\n")
+ log.Printf("Db service started\n")
for {
select {
case sig := <-sigchan:
@@ -88,11 +88,11 @@ func main() {
case <-tick:
if err := commitStats(); err != nil {
log.Printf("Error on stats commit: %v", err)
- }
+ }
// TODO?: separate stats & regular messages
if err := consumer.Commit(); err != nil {
log.Printf("Error on consumer commit: %v", err)
- }
+ }
default:
err := consumer.ConsumeNext()
if err != nil {
@@ -101,4 +101,4 @@ func main() {
}
}
-}
\ No newline at end of file
+}
diff --git a/backend/services/http/main.go b/backend/services/http/main.go
index eaede2d4b..8ed8b6d95 100644
--- a/backend/services/http/main.go
+++ b/backend/services/http/main.go
@@ -142,7 +142,8 @@ func main() {
http2.ConfigureServer(server, nil)
go func() {
if err := server.ListenAndServe(); err != nil {
- log.Fatalf("Server error: %v\n", err)
+ log.Printf("Server error: %v\n", err)
+ log.Fatal("Server error")
}
}()
log.Printf("Server successfully started on port %v\n", HTTP_PORT)
diff --git a/backend/services/integrations/integration/client.go b/backend/services/integrations/integration/client.go
index 3786f4324..2abf9913d 100644
--- a/backend/services/integrations/integration/client.go
+++ b/backend/services/integrations/integration/client.go
@@ -1,12 +1,13 @@
package integration
import (
- "sync"
- "fmt"
"encoding/json"
+ "fmt"
+ "log"
+ "sync"
- "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/db/postgres"
+ "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/utime"
)
@@ -135,6 +136,8 @@ func (c *client) Request() {
c.requestData.LastAttemptTimestamp = utime.CurrentTimestamp()
err := c.requester.Request(c)
if err != nil {
+ log.Println("ERRROR L139")
+ log.Println(err)
c.handleError(err)
c.requestData.UnsuccessfullAttemptsCount++;
} else {
diff --git a/backend/services/integrations/integration/elasticsearch.go b/backend/services/integrations/integration/elasticsearch.go
index 66389f7b9..14480e0b8 100644
--- a/backend/services/integrations/integration/elasticsearch.go
+++ b/backend/services/integrations/integration/elasticsearch.go
@@ -1,193 +1,222 @@
package integration
import (
- elasticlib "github.com/elastic/go-elasticsearch/v7"
+ "bytes"
"context"
- "time"
+ b64 "encoding/base64"
"encoding/json"
"fmt"
- "bytes"
+ elasticlib "github.com/elastic/go-elasticsearch/v7"
+ "log"
"strconv"
+ "time"
- "openreplay/backend/pkg/utime"
"openreplay/backend/pkg/messages"
+ "openreplay/backend/pkg/utime"
)
-
type elasticsearch struct {
- Host string
- Port json.Number
- ApiKeyId string //`json:"api_key_id"`
- ApiKey string //`json:"api_key"`
- Indexes string
+ Host string
+ Port json.Number
+ ApiKeyId string //`json:"api_key_id"`
+ ApiKey string //`json:"api_key"`
+ Indexes string
}
type elasticsearchLog struct {
Message string
- Time time.Time `json:"utc_time"` // Should be parsed automatically from RFC3339
+ Time time.Time `json:"utc_time"` // Should be parsed automatically from RFC3339
}
-type elasticResponce struct {
- Hits struct {
- //Total struct {
- // Value int
- //}
- Hits []struct {
- Id string `json:"_id"`
- Source json.RawMessage `json:"_source"`
- }
- }
- ScrollId string `json:"_scroll_id"`
-}
-
-
-func (es *elasticsearch) Request(c* client) error {
+func (es *elasticsearch) Request(c *client) error {
address := es.Host + ":" + es.Port.String()
+ apiKey := b64.StdEncoding.EncodeToString([]byte(es.ApiKeyId + ":" + es.ApiKey))
cfg := elasticlib.Config{
- Addresses: []string{
- address,
- },
- Username: es.ApiKeyId,
- Password: es.ApiKey,
+ Addresses: []string{
+ address,
+ },
+ //Username: es.ApiKeyId,
+ //Password: es.ApiKey,
+ APIKey: apiKey,
}
esC, err := elasticlib.NewClient(cfg)
if err != nil {
+ log.Println("Error while creating new ES client")
+ log.Println(err)
return err
}
- // TODO: ping/versions/ client host check
- // res0, err := esC.Info()
- // if err != nil {
- // log.Printf("ELASTIC Error getting info: %s", err)
- // }
- // defer res0.Body.Close()
- // // Check response status
- // if res0.IsError() {
- // log.Printf("ELASTIC Error: %s", res0.String())
- // }
- // log.Printf("ELASTIC Info: %v ", res0.String())
-
gteTs := c.getLastMessageTimestamp() + 1000 // Sec or millisec to add ?
-
+ log.Printf("gteTs: %v ", gteTs)
var buf bytes.Buffer
- query := map[string]interface{}{
- "query": map[string]interface{}{
- "bool": map[string]interface{}{
- "filter": []map[string]interface{}{
- map[string]interface{}{
- "match": map[string]interface{} {
- "message": map[string]interface{}{
- "query": "openReplaySessionToken=", // asayer_session_id=
- },
- },
- },
- map[string]interface{}{
- "range": map[string]interface{} {
- "utc_time": map[string]interface{}{
- "gte": strconv.FormatUint(gteTs, 10),
- "lte": "now",
- },
- },
- },
- map[string]interface{}{
- "term": map[string]interface{}{
- "tags": "error",
- },
- },
- },
- },
- },
- }
- if err := json.NewEncoder(&buf).Encode(query); err != nil {
- return fmt.Errorf("Error encoding the query: %s", err)
- }
+ query := map[string]interface{}{
+ "query": map[string]interface{}{
+ "bool": map[string]interface{}{
+ "filter": []map[string]interface{}{
+ map[string]interface{}{
+ "match": map[string]interface{}{
+ "message": map[string]interface{}{
+ "query": "openReplaySessionToken=", // asayer_session_id=
+ },
+ },
+ },
+ map[string]interface{}{
+ "range": map[string]interface{}{
+ "utc_time": map[string]interface{}{
+ "gte": strconv.FormatUint(gteTs, 10),
+ "lte": "now",
+ },
+ },
+ },
+ map[string]interface{}{
+ "term": map[string]interface{}{
+ "tags": "error",
+ },
+ },
+ },
+ },
+ },
+ }
+
+ if err := json.NewEncoder(&buf).Encode(query); err != nil {
+ return fmt.Errorf("Error encoding the query: %s", err)
+ }
res, err := esC.Search(
- esC.Search.WithContext(context.Background()),
- esC.Search.WithIndex(es.Indexes),
- esC.Search.WithSize(1000),
- esC.Search.WithScroll(time.Minute * 2),
- esC.Search.WithBody(&buf),
- esC.Search.WithSort("timestamp:asc"),
- )
- if err != nil {
- return fmt.Errorf("Error getting response: %s", err)
- }
- defer res.Body.Close()
- if res.IsError() {
- var e map[string]interface{}
- if err := json.NewDecoder(res.Body).Decode(&e); err != nil {
- return fmt.Errorf("Error parsing the response body: %v", err)
- } else {
- return fmt.Errorf("Elasticsearch [%s] %s: %s",
- res.Status(),
- e["error"],//.(map[string]interface{})["type"],
- e["error"],//.(map[string]interface{})["reason"],
- )
- }
- }
+ esC.Search.WithContext(context.Background()),
+ esC.Search.WithIndex(es.Indexes),
+ esC.Search.WithSize(1000),
+ esC.Search.WithScroll(time.Minute*2),
+ esC.Search.WithBody(&buf),
+ esC.Search.WithSort("utc_time:asc"),
+ )
+ if err != nil {
+ return fmt.Errorf("Error getting response: %s", err)
+ }
+ defer res.Body.Close()
+ if res.IsError() {
+ var e map[string]interface{}
+ if err := json.NewDecoder(res.Body).Decode(&e); err != nil {
+ log.Printf("Error parsing the Error response body: %v\n", err)
+ return fmt.Errorf("Error parsing the Error response body: %v", err)
+ } else {
+ log.Printf("Elasticsearch Error [%s] %s: %s\n",
+ res.Status(),
+ e["error"],
+ e["error"],
+ )
+ return fmt.Errorf("Elasticsearch Error [%s] %s: %s",
+ res.Status(),
+ e["error"],
+ e["error"],
+ )
+ }
+ }
- for {
- var esResp elasticResponce
- if err := json.NewDecoder(res.Body).Decode(&esResp); err != nil {
- return fmt.Errorf("Error parsing the response body: %s", err)
- }
- if len(esResp.Hits.Hits) == 0 {
- break
- }
+ for {
+ var esResp map[string]interface{}
+ if err := json.NewDecoder(res.Body).Decode(&esResp); err != nil {
+ return fmt.Errorf("Error parsing the response body: %s", err)
+ // If no error, then convert response to a map[string]interface
+ }
- for _, hit := range esResp.Hits.Hits {
- var esLog elasticsearchLog
- if err = json.Unmarshal(hit.Source, &esLog); err != nil {
- c.errChan <- err
+ if _, ok := esResp["hits"]; !ok {
+ log.Printf("Hits not found in \n%v\n", esResp)
+ break
+ }
+ hits := esResp["hits"].(map[string]interface{})["hits"].([]interface{})
+ if len(hits) == 0 {
+ log.Println("No hits found")
+ break
+ }
+ log.Printf("received %d hits", len(hits))
+ for _, hit := range hits {
+
+ // Parse the attributes/fields of the document
+ doc := hit.(map[string]interface{})
+ source := doc["_source"].(map[string]interface{})
+
+ if _, ok := source["message"]; !ok {
+ log.Printf("message not found in doc \n%v\n", doc)
+ c.errChan <- fmt.Errorf("message not found in doc '%v' ", doc)
continue
}
+
+ if _, ok := source["utc_time"]; !ok {
+ log.Printf("utc_time not found in doc \n%v\n", doc)
+ c.errChan <- fmt.Errorf("utc_time not found in doc '%v' ", doc)
+ continue
+ }
+
+ parsedTime, err := time.Parse(time.RFC3339, source["utc_time"].(string))
+ if err != nil {
+ log.Println("cannot parse time")
+ c.errChan <- fmt.Errorf("cannot parse RFC3339 time of doc '%v' ", doc)
+ continue
+ }
+ esLog := elasticsearchLog{Message: source["message"].(string), Time: parsedTime}
+ docID := doc["_id"]
+
token, err := GetToken(esLog.Message)
if err != nil {
+ log.Printf("Error generating token: %s\n", err)
c.errChan <- err
continue
}
- //parsedTime, err := time.Parse(time.RFC3339, esLog.Timestamp)
- //if err != nil {
- // c.errChan <- err
- // continue
- //}
timestamp := uint64(utime.ToMilliseconds(esLog.Time))
c.setLastMessageTimestamp(timestamp)
+
+ var sessionID uint64
+ sessionID, err = strconv.ParseUint(token, 10, 64)
+ if err != nil {
+ log.Printf("Error converting token to uint46: %s\n", err)
+ sessionID = 0
+ }
+ payload, err := json.Marshal(source)
+ if err != nil {
+ log.Printf("Error converting source to json: %v\n", source)
+ continue
+ }
c.evChan <- &SessionErrorEvent{
//SessionID: sessionID,
- Token: token,
+ SessionID: sessionID,
+ Token: token,
RawErrorEvent: &messages.RawErrorEvent{
- Source: "elasticsearch",
+ Source: "elasticsearch",
Timestamp: timestamp,
- Name: hit.Id, // sure?
- Payload: string(hit.Source),
+ Name: fmt.Sprintf("%v", docID),
+ Payload: string(payload),
},
}
- }
-
- res, err = esC.Scroll(
- esC.Scroll.WithContext(context.Background()),
- esC.Scroll.WithScrollID(esResp.ScrollId),
- esC.Scroll.WithScroll(time.Minute * 2),
- )
- if err != nil {
- return fmt.Errorf("Error getting scroll response: %s", err)
- }
- defer res.Body.Close()
- if res.IsError() {
- var e map[string]interface{}
- if err := json.NewDecoder(res.Body).Decode(&e); err != nil {
- return fmt.Errorf("Error parsing the response body: %v", err)
- } else {
- return fmt.Errorf("Elasticsearch [%s] %s: %s",
- res.Status(),
- e["error"],//.(map[string]interface{})["type"],
- e["error"],//.(map[string]interface{})["reason"],
- )
- }
- }
+ }
+ if _, ok := esResp["_scroll_id"]; !ok {
+ log.Println("_scroll_id not found")
+ break
+ }
+ log.Println("Scrolling...")
+ scrollId := esResp["_scroll_id"]
+ res, err = esC.Scroll(
+ esC.Scroll.WithContext(context.Background()),
+ esC.Scroll.WithScrollID(fmt.Sprintf("%v", scrollId)),
+ esC.Scroll.WithScroll(time.Minute*2),
+ )
+ if err != nil {
+ return fmt.Errorf("Error getting scroll response: %s", err)
+ }
+ defer res.Body.Close()
+ if res.IsError() {
+ var e map[string]interface{}
+ if err := json.NewDecoder(res.Body).Decode(&e); err != nil {
+ return fmt.Errorf("Error parsing the response body: %v", err)
+ } else {
+ return fmt.Errorf("Elasticsearch [%s] %s: %s",
+ res.Status(),
+ e["error"], //.(map[string]interface{})["type"],
+ e["error"], //.(map[string]interface{})["reason"],
+ )
+ }
+ }
}
return nil
-}
\ No newline at end of file
+}
diff --git a/backend/services/integrations/main.go b/backend/services/integrations/main.go
index f664fe862..e93a7a0cd 100644
--- a/backend/services/integrations/main.go
+++ b/backend/services/integrations/main.go
@@ -8,11 +8,11 @@ import (
"os/signal"
"syscall"
+ "openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/intervals"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
- "openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/token"
"openreplay/backend/services/integrations/clientManager"
)
@@ -42,12 +42,13 @@ func main() {
}
})
- producer:= queue.NewProducer()
+ producer := queue.NewProducer()
defer producer.Close(15000)
listener, err := postgres.NewIntegrationsListener(POSTGRES_STRING)
if err != nil {
- log.Fatalf("Postgres listener error: %v\n", err)
+ log.Printf("Postgres listener error: %v\n", err)
+ log.Fatalf("Postgres listener error")
}
defer listener.Close()
@@ -66,10 +67,10 @@ func main() {
pg.Close()
os.Exit(0)
case <-tick:
- // log.Printf("Requesting all...\n")
+ log.Printf("Requesting all...\n")
manager.RequestAll()
case event := <-manager.Events:
- // log.Printf("New integration event: %v\n", *event.RawErrorEvent)
+ log.Printf("New integration event: %+v\n", *event.RawErrorEvent)
sessionID := event.SessionID
if sessionID == 0 {
sessData, err := tokenizer.Parse(event.Token)
@@ -83,13 +84,19 @@ func main() {
producer.Produce(TOPIC_RAW_WEB, sessionID, messages.Encode(event.RawErrorEvent))
case err := <-manager.Errors:
log.Printf("Integration error: %v\n", err)
+ listener.Close()
+ pg.Close()
+ os.Exit(0)
case i := <-manager.RequestDataUpdates:
// log.Printf("Last request integration update: %v || %v\n", i, string(i.RequestData))
if err := pg.UpdateIntegrationRequestData(&i); err != nil {
log.Printf("Postgres Update request_data error: %v\n", err)
}
case err := <-listener.Errors:
- log.Printf("Postgres listen error: %v\n", err)
+ log.Printf("Postgres listen error: %v\n", err)
+ listener.Close()
+ pg.Close()
+ os.Exit(0)
case iPointer := <-listener.Integrations:
log.Printf("Integration update: %v\n", *iPointer)
err := manager.Update(iPointer)
diff --git a/ee/api/.env.default b/ee/api/.env.default
index cec7e59a4..28f46f273 100644
--- a/ee/api/.env.default
+++ b/ee/api/.env.default
@@ -37,7 +37,8 @@ jwt_algorithm=HS512
jwt_exp_delta_seconds=2592000
jwt_issuer=openreplay-default-ee
jwt_secret="SET A RANDOM STRING HERE"
-peers=http://utilities-openreplay.app.svc.cluster.local:9000/assist/%s/peers
+peersList=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-list
+peers=http://utilities-openreplay.app.svc.cluster.local:9001/assist/%s/sockets-live
pg_dbname=postgres
pg_host=postgresql.db.svc.cluster.local
pg_password=asayerPostgres
diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py
index 0519bb143..b25efee7b 100644
--- a/ee/api/chalicelib/core/errors.py
+++ b/ee/api/chalicelib/core/errors.py
@@ -509,7 +509,8 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY error_id, name, message
- ORDER BY {sort} {order}) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
+ ORDER BY {sort} {order}
+ LIMIT 1001) AS details INNER JOIN (SELECT error_id AS error_id, toUnixTimestamp(MAX(datetime))*1000 AS last_occurrence, toUnixTimestamp(MIN(datetime))*1000 AS first_occurrence
FROM errors
GROUP BY error_id) AS time_details
ON details.error_id=time_details.error_id
diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py
index ce5bcca5d..2d4effc0e 100644
--- a/ee/api/chalicelib/core/users.py
+++ b/ee/api/chalicelib/core/users.py
@@ -340,6 +340,11 @@ def edit(user_id_to_update, tenant_id, changes, editor_id):
return {"data": user}
+def edit_appearance(user_id, tenant_id, changes):
+ updated_user = update(tenant_id=tenant_id, user_id=user_id, changes=changes)
+ return {"data": updated_user}
+
+
def get_by_email_only(email):
with pg_client.PostgresClient() as cur:
cur.execute(
diff --git a/ee/backend/pkg/kafka/consumer.go b/ee/backend/pkg/kafka/consumer.go
index 43714e7ed..1483c2ccf 100644
--- a/ee/backend/pkg/kafka/consumer.go
+++ b/ee/backend/pkg/kafka/consumer.go
@@ -23,7 +23,6 @@ type Consumer struct {
pollTimeout uint
lastKafkaEventTs int64
- partitions []kafka.TopicPartition
}
func NewConsumer(group string, topics []string, messageHandler types.MessageHandler) *Consumer {
@@ -72,13 +71,15 @@ func (consumer *Consumer) Commit() error {
return nil
}
-func (consumer *Consumer) CommitBack(gap int64) error {
- if consumer.lastKafkaEventTs == 0 || consumer.partitions == nil {
- return nil
+func (consumer *Consumer) CommitAtTimestamp(commitTs int64) error {
+ assigned, err := consumer.c.Assignment()
+ if err != nil {
+ return err
}
- commitTs := consumer.lastKafkaEventTs - gap
+ logPartitions("Actually assigned:", assigned)
+
var timestamps []kafka.TopicPartition
- for _, p := range consumer.partitions { // p is a copy here sinse partition is not a pointer
+ for _, p := range assigned { // p is a copy here sinse partition is not a pointer
p.Offset = kafka.Offset(commitTs)
timestamps = append(timestamps, p)
}
@@ -86,13 +87,41 @@ func (consumer *Consumer) CommitBack(gap int64) error {
if err != nil {
return errors.Wrap(err, "Kafka Consumer back commit error")
}
+
+ // Limiting to already committed
+ committed, err := consumer.c.Committed(assigned, 2000) // memorise?
+ logPartitions("Actually committed:",committed)
+ if err != nil {
+ return errors.Wrap(err, "Kafka Consumer retrieving committed error")
+ }
+ for _, offs := range offsets {
+ for _, comm := range committed {
+ if comm.Offset == kafka.OffsetStored ||
+ comm.Offset == kafka.OffsetInvalid ||
+ comm.Offset == kafka.OffsetBeginning ||
+ comm.Offset == kafka.OffsetEnd { continue }
+ if comm.Partition == offs.Partition &&
+ (comm.Topic != nil && offs.Topic != nil && *comm.Topic == *offs.Topic) &&
+ comm.Offset > offs.Offset {
+ offs.Offset = comm.Offset
+ }
+ }
+ }
+
// TODO: check per-partition errors: offsets[i].Error
- // As an option: can store offsets and enable autocommit instead
_, err = consumer.c.CommitOffsets(offsets)
return errors.Wrap(err, "Kafka Consumer back commit error")
}
+func (consumer *Consumer) CommitBack(gap int64) error {
+ if consumer.lastKafkaEventTs == 0 {
+ return nil
+ }
+ commitTs := consumer.lastKafkaEventTs - gap
+ return consumer.CommitAtTimestamp(commitTs)
+}
+
func (consumer *Consumer) ConsumeNext() error {
ev := consumer.c.Poll(int(consumer.pollTimeout))
if ev == nil {
@@ -117,14 +146,15 @@ func (consumer *Consumer) ConsumeNext() error {
Timestamp: ts,
})
consumer.lastKafkaEventTs = ts
- case kafka.AssignedPartitions:
- logPartitions("Kafka Consumer: Partitions Assigned", e.Partitions)
- consumer.partitions = e.Partitions
- consumer.c.Assign(e.Partitions)
- case kafka.RevokedPartitions:
- log.Println("Kafka Cosumer: Partitions Revoked")
- consumer.partitions = nil
- consumer.c.Unassign()
+ // case kafka.AssignedPartitions:
+ // logPartitions("Kafka Consumer: Partitions Assigned", e.Partitions)
+ // consumer.partitions = e.Partitions
+ // consumer.c.Assign(e.Partitions)
+ // log.Printf("Actually partitions assigned!")
+ // case kafka.RevokedPartitions:
+ // log.Println("Kafka Cosumer: Partitions Revoked")
+ // consumer.partitions = nil
+ // consumer.c.Unassign()
case kafka.Error:
if e.Code() == kafka.ErrAllBrokersDown {
os.Exit(1)
diff --git a/ee/scripts/helm/db/init_dbs/clickhouse/1.5.0/1.5.0.sql b/ee/scripts/helm/db/init_dbs/clickhouse/1.5.0/1.5.0.sql
new file mode 100644
index 000000000..e259bdf69
--- /dev/null
+++ b/ee/scripts/helm/db/init_dbs/clickhouse/1.5.0/1.5.0.sql
@@ -0,0 +1,4 @@
+ALTER TABLE sessions
+ ADD COLUMN IF NOT EXISTS utm_source Nullable(String),
+ ADD COLUMN IF NOT EXISTS utm_medium Nullable(String),
+ ADD COLUMN IF NOT EXISTS utm_campaign Nullable(String);
diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.5.0/1.5.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.5.0/1.5.0.sql
new file mode 100644
index 000000000..217ae5d6a
--- /dev/null
+++ b/ee/scripts/helm/db/init_dbs/postgresql/1.5.0/1.5.0.sql
@@ -0,0 +1,173 @@
+BEGIN;
+CREATE OR REPLACE FUNCTION openreplay_version()
+ RETURNS text AS
+$$
+SELECT 'v1.5.0-ee'
+$$ LANGUAGE sql IMMUTABLE;
+
+--
+CREATE TABLE IF NOT EXISTS traces
+(
+ user_id integer NULL REFERENCES users (user_id) ON DELETE CASCADE,
+ tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
+ created_at bigint NOT NULL DEFAULT (EXTRACT(EPOCH FROM now() at time zone 'utc') * 1000)::bigint,
+ auth text NULL,
+ action text NOT NULL,
+ method text NOT NULL,
+ path_format text NOT NULL,
+ endpoint text NOT NULL,
+ payload jsonb NULL,
+ parameters jsonb NULL,
+ status int NULL
+);
+CREATE INDEX IF NOT EXISTS traces_user_id_idx ON traces (user_id);
+CREATE INDEX IF NOT EXISTS traces_tenant_id_idx ON traces (tenant_id);
+
+CREATE INDEX IF NOT EXISTS user_favorite_sessions_user_id_session_id_idx ON user_favorite_sessions (user_id, session_id);
+
+CREATE INDEX IF NOT EXISTS pages_first_contentful_paint_time_idx ON events.pages (first_contentful_paint_time) WHERE first_contentful_paint_time > 0;
+CREATE INDEX IF NOT EXISTS pages_dom_content_loaded_time_idx ON events.pages (dom_content_loaded_time) WHERE dom_content_loaded_time > 0;
+CREATE INDEX IF NOT EXISTS pages_first_paint_time_idx ON events.pages (first_paint_time) WHERE first_paint_time > 0;
+CREATE INDEX IF NOT EXISTS pages_ttfb_idx ON events.pages (ttfb) WHERE ttfb > 0;
+CREATE INDEX IF NOT EXISTS pages_time_to_interactive_idx ON events.pages (time_to_interactive) WHERE time_to_interactive > 0;
+CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_loadgt0NN_idx ON events.pages (session_id, timestamp) WHERE load_time > 0 AND load_time IS NOT NULL;
+CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_visualgt0nn_idx ON events.pages (session_id, timestamp) WHERE visually_complete > 0 AND visually_complete IS NOT NULL;
+CREATE INDEX IF NOT EXISTS pages_timestamp_metgt0_idx ON events.pages (timestamp) WHERE response_time > 0 OR
+ first_paint_time > 0 OR
+ dom_content_loaded_time > 0 OR
+ ttfb > 0 OR
+ time_to_interactive > 0;
+CREATE INDEX IF NOT EXISTS pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL;
+CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL;
+CREATE INDEX IF NOT EXISTS issues_project_id_idx ON issues (project_id);
+
+CREATE INDEX IF NOT EXISTS errors_project_id_error_id_js_exception_idx ON public.errors (project_id, error_id) WHERE source = 'js_exception';
+CREATE INDEX IF NOT EXISTS errors_project_id_error_id_idx ON public.errors (project_id, error_id);
+CREATE INDEX IF NOT EXISTS errors_project_id_error_id_integration_idx ON public.errors (project_id, error_id) WHERE source != 'js_exception';
+
+CREATE INDEX IF NOT EXISTS sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0;
+CREATE INDEX IF NOT EXISTS sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0;
+CREATE INDEX IF NOT EXISTS sessions_session_id_project_id_start_ts_idx ON sessions (session_id, project_id, start_ts) WHERE duration > 0;
+
+CREATE INDEX IF NOT EXISTS user_favorite_sessions_user_id_session_id_idx ON user_favorite_sessions (user_id, session_id);
+CREATE INDEX IF NOT EXISTS jobs_project_id_idx ON jobs (project_id);
+CREATE INDEX IF NOT EXISTS errors_session_id_timestamp_error_id_idx ON events.errors (session_id, timestamp, error_id);
+CREATE INDEX IF NOT EXISTS errors_error_id_timestamp_idx ON events.errors (error_id, timestamp);
+CREATE INDEX IF NOT EXISTS errors_timestamp_error_id_session_id_idx ON events.errors (timestamp, error_id, session_id);
+CREATE INDEX IF NOT EXISTS errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id);
+CREATE INDEX IF NOT EXISTS resources_timestamp_idx ON events.resources (timestamp);
+CREATE INDEX IF NOT EXISTS resources_success_idx ON events.resources (success);
+CREATE INDEX IF NOT EXISTS projects_project_key_idx ON public.projects (project_key);
+CREATE INDEX IF NOT EXISTS resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL;
+CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_idx ON events.resources (session_id, timestamp);
+CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type);
+CREATE INDEX IF NOT EXISTS resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch';
+CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE;
+CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_url_host_firstparty_idx ON events.resources (session_id, timestamp, url_host) WHERE type IN ('fetch', 'script');
+CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_duration_durationgt0NN_img_idx ON events.resources (session_id, timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL AND type = 'img';
+CREATE INDEX IF NOT EXISTS resources_timestamp_session_id_idx ON events.resources (timestamp, session_id);
+
+DROP TRIGGER IF EXISTS on_insert_or_update ON projects;
+CREATE TRIGGER on_insert_or_update
+ AFTER INSERT OR UPDATE
+ ON projects
+ FOR EACH ROW
+EXECUTE PROCEDURE notify_project();
+
+UPDATE tenants
+SET name=''
+WHERE name ISNULL;
+ALTER TABLE tenants
+ ALTER COLUMN name SET NOT NULL;
+
+ALTER TABLE sessions
+ ADD COLUMN IF NOT EXISTS utm_source text NULL DEFAULT NULL,
+ ADD COLUMN IF NOT EXISTS utm_medium text NULL DEFAULT NULL,
+ ADD COLUMN IF NOT EXISTS utm_campaign text NULL DEFAULT NULL;
+
+CREATE INDEX IF NOT EXISTS sessions_utm_source_gin_idx ON public.sessions USING GIN (utm_source gin_trgm_ops);
+CREATE INDEX IF NOT EXISTS sessions_utm_medium_gin_idx ON public.sessions USING GIN (utm_medium gin_trgm_ops);
+CREATE INDEX IF NOT EXISTS sessions_utm_campaign_gin_idx ON public.sessions USING GIN (utm_campaign gin_trgm_ops);
+CREATE INDEX IF NOT EXISTS requests_timestamp_session_id_failed_idx ON events_common.requests (timestamp, session_id) WHERE success = FALSE;
+
+DROP INDEX IF EXISTS sessions_project_id_user_browser_idx1;
+DROP INDEX IF EXISTS sessions_project_id_user_country_idx1;
+ALTER INDEX IF EXISTS platform_idx RENAME TO sessions_platform_idx;
+ALTER INDEX IF EXISTS events.resources_duration_idx RENAME TO resources_duration_durationgt0_idx;
+DROP INDEX IF EXISTS projects_project_key_idx1;
+CREATE INDEX IF NOT EXISTS errors_parent_error_id_idx ON errors (parent_error_id);
+
+CREATE INDEX IF NOT EXISTS performance_session_id_idx ON events.performance (session_id);
+CREATE INDEX IF NOT EXISTS performance_timestamp_idx ON events.performance (timestamp);
+CREATE INDEX IF NOT EXISTS performance_session_id_timestamp_idx ON events.performance (session_id, timestamp);
+CREATE INDEX IF NOT EXISTS performance_avg_cpu_gt0_idx ON events.performance (avg_cpu) WHERE avg_cpu > 0;
+CREATE INDEX IF NOT EXISTS performance_avg_used_js_heap_size_gt0_idx ON events.performance (avg_used_js_heap_size) WHERE avg_used_js_heap_size > 0;
+
+CREATE TABLE IF NOT EXISTS metrics
+(
+ metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
+ user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
+ name text NOT NULL,
+ is_public boolean NOT NULL DEFAULT FALSE,
+ created_at timestamp default timezone('utc'::text, now()) not null,
+ deleted_at timestamp
+);
+CREATE INDEX IF NOT EXISTS metrics_user_id_is_public_idx ON public.metrics (user_id, is_public);
+CREATE TABLE IF NOT EXISTS metric_series
+(
+ series_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ metric_id integer REFERENCES metrics (metric_id) ON DELETE CASCADE,
+ index integer NOT NULL,
+ name text NULL,
+ filter jsonb NOT NULL,
+ created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
+ deleted_at timestamp
+);
+CREATE INDEX IF NOT EXISTS metric_series_metric_id_idx ON public.metric_series (metric_id);
+CREATE INDEX IF NOT EXISTS funnels_project_id_idx ON public.funnels (project_id);
+
+
+CREATE TABLE IF NOT EXISTS searches
+(
+ search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
+ user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
+ name text not null,
+ filter jsonb not null,
+ created_at timestamp default timezone('utc'::text, now()) not null,
+ deleted_at timestamp,
+ is_public boolean NOT NULL DEFAULT False
+);
+
+CREATE INDEX IF NOT EXISTS searches_user_id_is_public_idx ON public.searches (user_id, is_public);
+CREATE INDEX IF NOT EXISTS searches_project_id_idx ON public.searches (project_id);
+CREATE INDEX IF NOT EXISTS alerts_project_id_idx ON alerts (project_id);
+
+ALTER TABLE alerts
+ ADD COLUMN IF NOT EXISTS series_id integer NULL REFERENCES metric_series (series_id) ON DELETE CASCADE;
+
+CREATE INDEX IF NOT EXISTS alerts_series_id_idx ON alerts (series_id);
+UPDATE alerts
+SET options=jsonb_set(options, '{change}', '"change"')
+WHERE detection_method = 'change'
+ AND options -> 'change' ISNULL;
+
+ALTER TABLE roles
+ ADD COLUMN IF NOT EXISTS all_projects bool NOT NULL DEFAULT TRUE;
+
+CREATE TABLE IF NOT EXISTS roles_projects
+(
+ role_id integer NOT NULL REFERENCES roles (role_id) ON DELETE CASCADE,
+ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
+ CONSTRAINT roles_projects_pkey PRIMARY KEY (role_id, project_id)
+);
+CREATE INDEX IF NOT EXISTS roles_projects_role_id_idx ON roles_projects (role_id);
+CREATE INDEX IF NOT EXISTS roles_projects_project_id_idx ON roles_projects (project_id);
+--
+
+ALTER TABLE public.metrics
+ ADD COLUMN IF NOT EXISTS active boolean NOT NULL DEFAULT TRUE;
+CREATE INDEX IF NOT EXISTS resources_timestamp_duration_durationgt0NN_idx ON events.resources (timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL;
+COMMIT;
+ALTER TYPE public.error_source ADD VALUE IF NOT EXISTS 'elasticsearch'; -- cannot add new value inside a transaction block
\ No newline at end of file
diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
index 35f3a5dd4..ffa83689c 100644
--- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
+++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
@@ -1,15 +1,15 @@
BEGIN;
--- Schemas and functions definitions:
CREATE SCHEMA IF NOT EXISTS events_common;
CREATE SCHEMA IF NOT EXISTS events;
+CREATE EXTENSION IF NOT EXISTS pg_trgm;
+CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE OR REPLACE FUNCTION openreplay_version()
RETURNS text AS
$$
-SELECT 'v1.4.0-ee'
+SELECT 'v1.5.0-ee'
$$ LANGUAGE sql IMMUTABLE;
--- --- accounts.sql ---
CREATE OR REPLACE FUNCTION generate_api_key(length integer) RETURNS text AS
$$
@@ -29,7 +29,7 @@ begin
end;
$$ LANGUAGE plpgsql;
--- --- events.sql ---
+
CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS
$$
@@ -54,13 +54,14 @@ BEGIN
END;
$$ LANGUAGE plpgsql IMMUTABLE;
--- --- integrations.sql ---
+
CREATE OR REPLACE FUNCTION notify_integration() RETURNS trigger AS
$$
BEGIN
IF NEW IS NULL THEN
- PERFORM pg_notify('integration', (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text));
+ PERFORM pg_notify('integration',
+ (row_to_json(OLD)::text || '{"options": null, "request_data": null}'::text));
ELSIF (OLD IS NULL) OR (OLD.options <> NEW.options) THEN
PERFORM pg_notify('integration', row_to_json(NEW)::text);
END IF;
@@ -68,7 +69,7 @@ BEGIN
END;
$$ LANGUAGE plpgsql;
--- --- alerts.sql ---
+
CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS
$$
@@ -85,7 +86,6 @@ BEGIN
END ;
$$ LANGUAGE plpgsql;
--- --- projects.sql ---
CREATE OR REPLACE FUNCTION notify_project() RETURNS trigger AS
$$
@@ -95,28 +95,51 @@ BEGIN
END;
$$ LANGUAGE plpgsql;
--- All tables and types:
+
DO
$$
BEGIN
- IF EXISTS(SELECT
- FROM information_schema.tables
- WHERE table_schema = 'public'
- AND table_name = 'tenants') THEN
- raise notice 'DB exists, skipping creation query';
+ IF (with to_check (name) as (
+ values ('alerts'),
+ ('announcements'),
+ ('assigned_sessions'),
+ ('autocomplete'),
+ ('basic_authentication'),
+ ('errors'),
+ ('funnels'),
+ ('integrations'),
+ ('issues'),
+ ('jira_cloud'),
+ ('jobs'),
+ ('metric_series'),
+ ('metrics'),
+ ('notifications'),
+ ('oauth_authentication'),
+ ('projects'),
+ ('roles'),
+ ('roles_projects'),
+ ('searches'),
+ ('sessions'),
+ ('tenants'),
+ ('traces'),
+ ('user_favorite_errors'),
+ ('user_favorite_sessions'),
+ ('user_viewed_errors'),
+ ('user_viewed_sessions'),
+ ('users'),
+ ('webhooks')
+ )
+ select bool_and(exists(select *
+ from information_schema.tables t
+ where table_schema = 'public'
+ AND table_name = to_check.name)) as all_present
+ from to_check) THEN
+ raise notice 'All public schema tables exists';
ELSE
- raise notice 'Creating DB';
+ raise notice 'Some or all public schema tables are missing, creating missing tables';
--- --- public.sql ---
-
- CREATE EXTENSION IF NOT EXISTS pg_trgm;
- CREATE EXTENSION IF NOT EXISTS pgcrypto;
-
-
--- --- accounts.sql ---
-
- CREATE TABLE tenants
+ CREATE TABLE IF NOT EXISTS tenants
(
tenant_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
user_id text NOT NULL DEFAULT generate_api_key(20),
@@ -135,7 +158,7 @@ $$
);
- CREATE TABLE roles
+ CREATE TABLE IF NOT EXISTS roles
(
role_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
@@ -148,9 +171,14 @@ $$
deleted_at timestamp NULL DEFAULT NULL
);
- CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member');
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'user_role') THEN
+ CREATE TYPE user_role AS ENUM ('owner','admin','member');
+ END IF;
- CREATE TABLE users
+
+ CREATE TABLE IF NOT EXISTS users
(
user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
@@ -222,16 +250,16 @@ $$
}'::jsonb,
api_key text UNIQUE default generate_api_key(20) not null,
jwt_iat timestamp without time zone NULL DEFAULT NULL,
- data jsonb NOT NULL DEFAULT '{}'::jsonb,
+ data jsonb NOT NULL DEFAULT'{}'::jsonb,
weekly_report boolean NOT NULL DEFAULT TRUE,
origin text NULL DEFAULT NULL,
role_id integer REFERENCES roles (role_id) ON DELETE SET NULL,
internal_id text NULL DEFAULT NULL
);
- CREATE INDEX users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL;
+ CREATE INDEX IF NOT EXISTS users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL;
- CREATE TABLE basic_authentication
+ CREATE TABLE IF NOT EXISTS basic_authentication
(
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
password text DEFAULT NULL,
@@ -244,9 +272,13 @@ $$
UNIQUE (user_id)
);
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'oauth_provider') THEN
+ CREATE TYPE oauth_provider AS ENUM ('jira','github');
+ END IF;
- CREATE TYPE oauth_provider AS ENUM ('jira', 'github');
- CREATE TABLE oauth_authentication
+ CREATE TABLE IF NOT EXISTS oauth_authentication
(
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
provider oauth_provider NOT NULL,
@@ -256,9 +288,7 @@ $$
);
--- --- projects.sql ---
-
- CREATE TABLE projects
+ CREATE TABLE IF NOT EXISTS projects
(
project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20),
@@ -279,35 +309,41 @@ $$
metadata_8 text DEFAULT NULL,
metadata_9 text DEFAULT NULL,
metadata_10 text DEFAULT NULL,
- gdpr jsonb NOT NULL DEFAULT '{
+ gdpr jsonb NOT NULL DEFAULT'{
"maskEmails": true,
"sampleRate": 33,
"maskNumbers": false,
"defaultInputMode": "plain"
- }'::jsonb -- ??????
+ }'::jsonb
);
- CREATE INDEX projects_project_key_idx ON public.projects (project_key);
+
+ CREATE INDEX IF NOT EXISTS projects_project_key_idx ON public.projects (project_key);
+ DROP TRIGGER IF EXISTS on_insert_or_update ON projects;
CREATE TRIGGER on_insert_or_update
AFTER INSERT OR UPDATE
ON projects
FOR EACH ROW
EXECUTE PROCEDURE notify_project();
- CREATE TABLE roles_projects
+ CREATE TABLE IF NOT EXISTS roles_projects
(
role_id integer NOT NULL REFERENCES roles (role_id) ON DELETE CASCADE,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
CONSTRAINT roles_projects_pkey PRIMARY KEY (role_id, project_id)
);
- CREATE INDEX roles_projects_role_id_idx ON roles_projects (role_id);
- CREATE INDEX roles_projects_project_id_idx ON roles_projects (project_id);
+ CREATE INDEX IF NOT EXISTS roles_projects_role_id_idx ON roles_projects (role_id);
+ CREATE INDEX IF NOT EXISTS roles_projects_project_id_idx ON roles_projects (project_id);
--- --- webhooks.sql ---
- create type webhook_type as enum ('webhook', 'slack', 'email');
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'webhook_type') THEN
+ create type webhook_type as enum ('webhook','slack','email');
+ END IF;
- create table webhooks
+
+ create table IF NOT EXISTS webhooks
(
webhook_id integer generated by default as identity
constraint webhooks_pkey
@@ -325,10 +361,8 @@ $$
name varchar(100)
);
--- --- notifications.sql ---
-
- CREATE TABLE notifications
+ CREATE TABLE IF NOT EXISTS notifications
(
notification_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
tenant_id integer REFERENCES tenants (tenant_id) ON DELETE CASCADE,
@@ -339,25 +373,24 @@ $$
button_url text NULL,
image_url text NULL,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
- options jsonb NOT NULL DEFAULT '{}'::jsonb,
+ options jsonb NOT NULL DEFAULT'{}'::jsonb,
CONSTRAINT notification_tenant_xor_user CHECK ( tenant_id NOTNULL AND user_id ISNULL OR
tenant_id ISNULL AND user_id NOTNULL )
);
- CREATE INDEX notifications_user_id_index ON notifications (user_id);
- CREATE INDEX notifications_tenant_id_index ON notifications (tenant_id);
- CREATE INDEX notifications_created_at_index ON notifications (created_at DESC);
- CREATE INDEX notifications_created_at_epoch_idx ON notifications (CAST(EXTRACT(EPOCH FROM created_at) * 1000 AS BIGINT) DESC);
+ CREATE INDEX IF NOT EXISTS notifications_user_id_index ON notifications (user_id);
+ CREATE INDEX IF NOT EXISTS notifications_tenant_id_index ON notifications (tenant_id);
+ CREATE INDEX IF NOT EXISTS notifications_created_at_index ON notifications (created_at DESC);
+ CREATE INDEX IF NOT EXISTS notifications_created_at_epoch_idx ON notifications (CAST(EXTRACT(EPOCH FROM created_at) * 1000 AS BIGINT) DESC);
- CREATE TABLE user_viewed_notifications
+ CREATE TABLE IF NOT EXISTS user_viewed_notifications
(
user_id integer NOT NULL REFERENCES users (user_id) on delete cascade,
notification_id integer NOT NULL REFERENCES notifications (notification_id) on delete cascade,
constraint user_viewed_notifications_pkey primary key (user_id, notification_id)
);
--- --- funnels.sql ---
- CREATE TABLE funnels
+ CREATE TABLE IF NOT EXISTS funnels
(
funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
@@ -369,14 +402,16 @@ $$
is_public boolean NOT NULL DEFAULT False
);
- CREATE INDEX funnels_user_id_is_public_idx ON public.funnels (user_id, is_public);
- CREATE INDEX funnels_project_id_idx ON public.funnels (project_id);
+ CREATE INDEX IF NOT EXISTS funnels_user_id_is_public_idx ON public.funnels (user_id, is_public);
+ CREATE INDEX IF NOT EXISTS funnels_project_id_idx ON public.funnels (project_id);
--- --- announcements.sql ---
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'announcement_type') THEN
+ create type announcement_type as enum ('notification','alert');
+ END IF;
- create type announcement_type as enum ('notification', 'alert');
-
- create table announcements
+ create table IF NOT EXISTS announcements
(
announcement_id serial not null
constraint announcements_pk
@@ -390,18 +425,23 @@ $$
type announcement_type default 'notification'::announcement_type not null
);
--- --- integrations.sql ---
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'integration_provider') THEN
+ CREATE TYPE integration_provider AS ENUM ('bugsnag','cloudwatch','datadog','newrelic','rollbar','sentry','stackdriver','sumologic','elasticsearch'); --,'jira','github');
+ END IF;
- CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github');
- CREATE TABLE integrations
+ CREATE TABLE IF NOT EXISTS integrations
(
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
provider integration_provider NOT NULL,
options jsonb NOT NULL,
- request_data jsonb NOT NULL DEFAULT '{}'::jsonb,
+ request_data jsonb NOT NULL DEFAULT'{}'::jsonb,
PRIMARY KEY (project_id, provider)
);
+ DROP TRIGGER IF EXISTS on_insert_or_update_or_delete ON integrations;
+
CREATE TRIGGER on_insert_or_update_or_delete
AFTER INSERT OR UPDATE OR DELETE
ON integrations
@@ -409,7 +449,7 @@ $$
EXECUTE PROCEDURE notify_integration();
- create table jira_cloud
+ CREATE TABLE IF NOT EXISTS jira_cloud
(
user_id integer not null
constraint jira_cloud_pk
@@ -422,32 +462,33 @@ $$
url text
);
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'issue_type') THEN
+ CREATE TYPE issue_type AS ENUM (
+ 'click_rage',
+ 'dead_click',
+ 'excessive_scrolling',
+ 'bad_request',
+ 'missing_resource',
+ 'memory',
+ 'cpu',
+ 'slow_resource',
+ 'slow_page_load',
+ 'crash',
+ 'ml_cpu',
+ 'ml_memory',
+ 'ml_dead_click',
+ 'ml_click_rage',
+ 'ml_mouse_thrashing',
+ 'ml_excessive_scrolling',
+ 'ml_slow_resources',
+ 'custom',
+ 'js_exception'
+ );
+ END IF;
--- --- issues.sql ---
-
- CREATE TYPE issue_type AS ENUM (
- 'click_rage',
- 'dead_click',
- 'excessive_scrolling',
- 'bad_request',
- 'missing_resource',
- 'memory',
- 'cpu',
- 'slow_resource',
- 'slow_page_load',
- 'crash',
- 'ml_cpu',
- 'ml_memory',
- 'ml_dead_click',
- 'ml_click_rage',
- 'ml_mouse_thrashing',
- 'ml_excessive_scrolling',
- 'ml_slow_resources',
- 'custom',
- 'js_exception'
- );
-
- CREATE TABLE issues
+ CREATE TABLE IF NOT EXISTS issues
(
issue_id text NOT NULL PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
@@ -455,16 +496,24 @@ $$
context_string text NOT NULL,
context jsonb DEFAULT NULL
);
- CREATE INDEX issues_issue_id_type_idx ON issues (issue_id, type);
- CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops);
- CREATE INDEX issues_project_id_issue_id_idx ON public.issues (project_id, issue_id);
- CREATE INDEX issues_project_id_idx ON issues (project_id);
+ CREATE INDEX IF NOT EXISTS issues_issue_id_type_idx ON issues (issue_id, type);
+ CREATE INDEX IF NOT EXISTS issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS issues_project_id_issue_id_idx ON public.issues (project_id, issue_id);
+ CREATE INDEX IF NOT EXISTS issues_project_id_idx ON issues (project_id);
--- --- errors.sql ---
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'error_source') THEN
+ CREATE TYPE error_source AS ENUM ('js_exception','bugsnag','cloudwatch','datadog','newrelic','rollbar','sentry','stackdriver','sumologic', 'elasticsearch');
+ END IF;
- CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic');
- CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored');
- CREATE TABLE errors
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'error_status') THEN
+ CREATE TYPE error_status AS ENUM ('unresolved','resolved','ignored');
+ END IF;
+
+ CREATE TABLE IF NOT EXISTS errors
(
error_id text NOT NULL PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
@@ -477,40 +526,53 @@ $$
stacktrace jsonb, --to save the stacktrace and not query S3 another time
stacktrace_parsed_at timestamp
);
- CREATE INDEX errors_project_id_source_idx ON errors (project_id, source);
- CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops);
- CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops);
- CREATE INDEX errors_project_id_idx ON public.errors (project_id);
- CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status);
- CREATE INDEX errors_project_id_error_id_js_exception_idx ON public.errors (project_id, error_id) WHERE source = 'js_exception';
- CREATE INDEX errors_project_id_error_id_idx ON public.errors (project_id, error_id);
- CREATE INDEX errors_project_id_error_id_integration_idx ON public.errors (project_id, error_id) WHERE source != 'js_exception';
- CREATE INDEX errors_error_id_idx ON errors (error_id);
- CREATE INDEX errors_parent_error_id_idx ON errors (parent_error_id);
+ CREATE INDEX IF NOT EXISTS errors_project_id_source_idx ON errors (project_id, source);
+ CREATE INDEX IF NOT EXISTS errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS errors_project_id_idx ON public.errors (project_id);
+ CREATE INDEX IF NOT EXISTS errors_project_id_status_idx ON public.errors (project_id, status);
+ CREATE INDEX IF NOT EXISTS errors_project_id_error_id_js_exception_idx ON public.errors (project_id, error_id) WHERE source = 'js_exception';
+ CREATE INDEX IF NOT EXISTS errors_project_id_error_id_idx ON public.errors (project_id, error_id);
+ CREATE INDEX IF NOT EXISTS errors_project_id_error_id_integration_idx ON public.errors (project_id, error_id) WHERE source != 'js_exception';
+ CREATE INDEX IF NOT EXISTS errors_error_id_idx ON errors (error_id);
+ CREATE INDEX IF NOT EXISTS errors_parent_error_id_idx ON errors (parent_error_id);
- CREATE TABLE user_favorite_errors
+ CREATE TABLE IF NOT EXISTS user_favorite_errors
(
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, error_id)
);
- CREATE TABLE user_viewed_errors
+ CREATE TABLE IF NOT EXISTS user_viewed_errors
(
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, error_id)
);
- CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id);
- CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
+ CREATE INDEX IF NOT EXISTS user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id);
+ CREATE INDEX IF NOT EXISTS user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id);
+
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'platform') THEN
+ CREATE TYPE platform AS ENUM ('web','ios','android');
+ END IF;
+
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'device_type') THEN
+ CREATE TYPE device_type AS ENUM ('desktop','tablet','mobile','other');
+ END IF;
--- --- sessions.sql ---
- CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other');
- CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS');
- CREATE TYPE platform AS ENUM ('web','ios','android');
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'country') THEN
+ CREATE TYPE country AS ENUM ('UN','RW','SO','YE','IQ','SA','IR','CY','TZ','SY','AM','KE','CD','DJ','UG','CF','SC','JO','LB','KW','OM','QA','BH','AE','IL','TR','ET','ER','EG','SD','GR','BI','EE','LV','AZ','LT','SJ','GE','MD','BY','FI','AX','UA','MK','HU','BG','AL','PL','RO','XK','ZW','ZM','KM','MW','LS','BW','MU','SZ','RE','ZA','YT','MZ','MG','AF','PK','BD','TM','TJ','LK','BT','IN','MV','IO','NP','MM','UZ','KZ','KG','TF','HM','CC','PW','VN','TH','ID','LA','TW','PH','MY','CN','HK','BN','MO','KH','KR','JP','KP','SG','CK','TL','RU','MN','AU','CX','MH','FM','PG','SB','TV','NR','VU','NC','NF','NZ','FJ','LY','CM','SN','CG','PT','LR','CI','GH','GQ','NG','BF','TG','GW','MR','BJ','GA','SL','ST','GI','GM','GN','TD','NE','ML','EH','TN','ES','MA','MT','DZ','FO','DK','IS','GB','CH','SE','NL','AT','BE','DE','LU','IE','MC','FR','AD','LI','JE','IM','GG','SK','CZ','NO','VA','SM','IT','SI','ME','HR','BA','AO','NA','SH','BV','BB','CV','GY','GF','SR','PM','GL','PY','UY','BR','FK','GS','JM','DO','CU','MQ','BS','BM','AI','TT','KN','DM','AG','LC','TC','AW','VG','VC','MS','MF','BL','GP','GD','KY','BZ','SV','GT','HN','NI','CR','VE','EC','CO','PA','HT','AR','CL','BO','PE','MX','PF','PN','KI','TK','TO','WF','WS','NU','MP','GU','PR','VI','UM','AS','CA','US','PS','RS','AQ','SX','CW','BQ','SS');
+ END IF;
- CREATE TABLE sessions
+ CREATE TABLE IF NOT EXISTS sessions
(
session_id bigint PRIMARY KEY,
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
@@ -552,157 +614,262 @@ $$
metadata_8 text DEFAULT NULL,
metadata_9 text DEFAULT NULL,
metadata_10 text DEFAULT NULL
--- ,
--- rehydration_id integer REFERENCES rehydrations(rehydration_id) ON DELETE SET NULL
);
- CREATE INDEX sessions_project_id_start_ts_idx ON sessions (project_id, start_ts);
- CREATE INDEX sessions_project_id_user_id_idx ON sessions (project_id, user_id);
- CREATE INDEX sessions_project_id_user_anonymous_id_idx ON sessions (project_id, user_anonymous_id);
- CREATE INDEX sessions_project_id_user_device_idx ON sessions (project_id, user_device);
- CREATE INDEX sessions_project_id_user_country_idx ON sessions (project_id, user_country);
- CREATE INDEX sessions_project_id_user_browser_idx ON sessions (project_id, user_browser);
- CREATE INDEX sessions_project_id_metadata_1_idx ON sessions (project_id, metadata_1);
- CREATE INDEX sessions_project_id_metadata_2_idx ON sessions (project_id, metadata_2);
- CREATE INDEX sessions_project_id_metadata_3_idx ON sessions (project_id, metadata_3);
- CREATE INDEX sessions_project_id_metadata_4_idx ON sessions (project_id, metadata_4);
- CREATE INDEX sessions_project_id_metadata_5_idx ON sessions (project_id, metadata_5);
- CREATE INDEX sessions_project_id_metadata_6_idx ON sessions (project_id, metadata_6);
- CREATE INDEX sessions_project_id_metadata_7_idx ON sessions (project_id, metadata_7);
- CREATE INDEX sessions_project_id_metadata_8_idx ON sessions (project_id, metadata_8);
- CREATE INDEX sessions_project_id_metadata_9_idx ON sessions (project_id, metadata_9);
- CREATE INDEX sessions_project_id_metadata_10_idx ON sessions (project_id, metadata_10);
- CREATE INDEX sessions_project_id_watchdogs_score_idx ON sessions (project_id, watchdogs_score DESC);
- CREATE INDEX sessions_platform_idx ON public.sessions (platform);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_start_ts_idx ON sessions (project_id, start_ts);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_user_id_idx ON sessions (project_id, user_id);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_user_anonymous_id_idx ON sessions (project_id, user_anonymous_id);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_user_device_idx ON sessions (project_id, user_device);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_user_country_idx ON sessions (project_id, user_country);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_user_browser_idx ON sessions (project_id, user_browser);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_metadata_1_idx ON sessions (project_id, metadata_1);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_metadata_2_idx ON sessions (project_id, metadata_2);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_metadata_3_idx ON sessions (project_id, metadata_3);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_metadata_4_idx ON sessions (project_id, metadata_4);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_metadata_5_idx ON sessions (project_id, metadata_5);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_metadata_6_idx ON sessions (project_id, metadata_6);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_metadata_7_idx ON sessions (project_id, metadata_7);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_metadata_8_idx ON sessions (project_id, metadata_8);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_metadata_9_idx ON sessions (project_id, metadata_9);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_metadata_10_idx ON sessions (project_id, metadata_10);
+ CREATE INDEX IF NOT EXISTS sessions_project_id_watchdogs_score_idx ON sessions (project_id, watchdogs_score DESC);
+ CREATE INDEX IF NOT EXISTS sessions_platform_idx ON public.sessions (platform);
- CREATE INDEX sessions_metadata1_gin_idx ON public.sessions USING GIN (metadata_1 gin_trgm_ops);
- CREATE INDEX sessions_metadata2_gin_idx ON public.sessions USING GIN (metadata_2 gin_trgm_ops);
- CREATE INDEX sessions_metadata3_gin_idx ON public.sessions USING GIN (metadata_3 gin_trgm_ops);
- CREATE INDEX sessions_metadata4_gin_idx ON public.sessions USING GIN (metadata_4 gin_trgm_ops);
- CREATE INDEX sessions_metadata5_gin_idx ON public.sessions USING GIN (metadata_5 gin_trgm_ops);
- CREATE INDEX sessions_metadata6_gin_idx ON public.sessions USING GIN (metadata_6 gin_trgm_ops);
- CREATE INDEX sessions_metadata7_gin_idx ON public.sessions USING GIN (metadata_7 gin_trgm_ops);
- CREATE INDEX sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops);
- CREATE INDEX sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops);
- CREATE INDEX sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops);
- CREATE INDEX sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops);
- CREATE INDEX sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops);
- CREATE INDEX sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops);
- CREATE INDEX sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops);
- CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops);
- CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country);
- CREATE INDEX sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0;
- CREATE INDEX sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0;
- CREATE INDEX sessions_session_id_project_id_start_ts_idx ON sessions (session_id, project_id, start_ts) WHERE duration > 0;
- CREATE INDEX sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL;
- CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id IS NOT NULL;
- CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0;
- CREATE INDEX sessions_utm_source_gin_idx ON public.sessions USING GIN (utm_source gin_trgm_ops);
- CREATE INDEX sessions_utm_medium_gin_idx ON public.sessions USING GIN (utm_medium gin_trgm_ops);
- CREATE INDEX sessions_utm_campaign_gin_idx ON public.sessions USING GIN (utm_campaign gin_trgm_ops);
-
- ALTER TABLE public.sessions
- ADD CONSTRAINT web_browser_constraint CHECK (
- (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR
- (sessions.platform != 'web' AND sessions.user_browser ISNULL));
-
- ALTER TABLE public.sessions
- ADD CONSTRAINT web_user_browser_version_constraint CHECK ( sessions.platform = 'web' OR sessions.user_browser_version ISNULL);
-
- ALTER TABLE public.sessions
- ADD CONSTRAINT web_user_agent_constraint CHECK (
- (sessions.platform = 'web' AND sessions.user_agent NOTNULL) OR
- (sessions.platform != 'web' AND sessions.user_agent ISNULL));
-
-
- CREATE TABLE user_viewed_sessions
+ CREATE INDEX IF NOT EXISTS sessions_metadata1_gin_idx ON public.sessions USING GIN (metadata_1 gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_metadata2_gin_idx ON public.sessions USING GIN (metadata_2 gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_metadata3_gin_idx ON public.sessions USING GIN (metadata_3 gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_metadata4_gin_idx ON public.sessions USING GIN (metadata_4 gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_metadata5_gin_idx ON public.sessions USING GIN (metadata_5 gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_metadata6_gin_idx ON public.sessions USING GIN (metadata_6 gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_metadata7_gin_idx ON public.sessions USING GIN (metadata_7 gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_metadata8_gin_idx ON public.sessions USING GIN (metadata_8 gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_metadata9_gin_idx ON public.sessions USING GIN (metadata_9 gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_metadata10_gin_idx ON public.sessions USING GIN (metadata_10 gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_user_os_gin_idx ON public.sessions USING GIN (user_os gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_user_browser_gin_idx ON public.sessions USING GIN (user_browser gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_user_device_gin_idx ON public.sessions USING GIN (user_device gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_user_id_gin_idx ON public.sessions USING GIN (user_id gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (user_anonymous_id gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_user_country_gin_idx ON public.sessions (project_id, user_country);
+ CREATE INDEX IF NOT EXISTS sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0;
+ CREATE INDEX IF NOT EXISTS sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0;
+ CREATE INDEX IF NOT EXISTS sessions_session_id_project_id_start_ts_idx ON sessions (session_id, project_id, start_ts) WHERE duration > 0;
+ CREATE INDEX IF NOT EXISTS sessions_session_id_project_id_start_ts_durationNN_idx ON sessions (session_id, project_id, start_ts) WHERE duration IS NOT NULL;
+ CREATE INDEX IF NOT EXISTS sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id IS NOT NULL;
+ CREATE INDEX IF NOT EXISTS sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0;
+ CREATE INDEX IF NOT EXISTS sessions_utm_source_gin_idx ON public.sessions USING GIN (utm_source gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_utm_medium_gin_idx ON public.sessions USING GIN (utm_medium gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS sessions_utm_campaign_gin_idx ON public.sessions USING GIN (utm_campaign gin_trgm_ops);
+ BEGIN
+ ALTER TABLE public.sessions
+ ADD CONSTRAINT web_browser_constraint CHECK (
+ (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR
+ (sessions.platform != 'web' AND sessions.user_browser ISNULL));
+ EXCEPTION
+ WHEN duplicate_object THEN RAISE NOTICE 'Table constraint exists';
+ END;
+ BEGIN
+ ALTER TABLE public.sessions
+ ADD CONSTRAINT web_user_browser_version_constraint CHECK (
+ sessions.platform = 'web' OR sessions.user_browser_version ISNULL);
+ EXCEPTION
+ WHEN duplicate_object THEN RAISE NOTICE 'Table constraint exists';
+ END;
+ BEGIN
+ ALTER TABLE public.sessions
+ ADD CONSTRAINT web_user_agent_constraint CHECK (
+ (sessions.platform = 'web' AND sessions.user_agent NOTNULL) OR
+ (sessions.platform != 'web' AND sessions.user_agent ISNULL));
+ EXCEPTION
+ WHEN duplicate_object THEN RAISE NOTICE 'Table constraint already exists';
+ END;
+ CREATE TABLE IF NOT EXISTS user_viewed_sessions
(
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, session_id)
);
- CREATE TABLE user_favorite_sessions
+ CREATE TABLE IF NOT EXISTS user_favorite_sessions
(
user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
PRIMARY KEY (user_id, session_id)
);
- CREATE INDEX user_favorite_sessions_user_id_session_id_idx ON user_favorite_sessions (user_id, session_id);
+ CREATE INDEX IF NOT EXISTS user_favorite_sessions_user_id_session_id_idx ON user_favorite_sessions (user_id, session_id);
--- --- assignments.sql ---
- create table assigned_sessions
+ CREATE TABLE IF NOT EXISTS assigned_sessions
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
issue_id text NOT NULL,
provider oauth_provider NOT NULL,
created_by integer NOT NULL,
created_at timestamp default timezone('utc'::text, now()) NOT NULL,
- provider_data jsonb default '{}'::jsonb NOT NULL
+ provider_data jsonb default'{}'::jsonb NOT NULL
);
- CREATE INDEX assigned_sessions_session_id_idx ON assigned_sessions (session_id);
+ CREATE INDEX IF NOT EXISTS assigned_sessions_session_id_idx ON assigned_sessions (session_id);
--- --- events_common.sql ---
- CREATE SCHEMA IF NOT EXISTS events_common;
-
- CREATE TYPE events_common.custom_level AS ENUM ('info','error');
-
- CREATE TABLE events_common.customs
+ CREATE TABLE IF NOT EXISTS autocomplete
(
- session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
- timestamp bigint NOT NULL,
- seq_index integer NOT NULL,
- name text NOT NULL,
- payload jsonb NOT NULL,
- level events_common.custom_level NOT NULL DEFAULT 'info',
- PRIMARY KEY (session_id, timestamp, seq_index)
+ value text NOT NULL,
+ type text NOT NULL,
+ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE
);
- CREATE INDEX customs_name_idx ON events_common.customs (name);
- CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops);
- CREATE INDEX customs_timestamp_idx ON events_common.customs (timestamp);
+ CREATE unique index IF NOT EXISTS autocomplete_unique ON autocomplete (project_id, value, type);
+ CREATE index IF NOT EXISTS autocomplete_project_id_idx ON autocomplete (project_id);
+ CREATE INDEX IF NOT EXISTS autocomplete_type_idx ON public.autocomplete (type);
+ CREATE INDEX IF NOT EXISTS autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
- CREATE TABLE events_common.issues
+ BEGIN
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'job_status') THEN
+ CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed');
+ END IF;
+ END;
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'job_action') THEN
+ CREATE TYPE job_action AS ENUM ('delete_user_data');
+ END IF;
+
+ CREATE TABLE IF NOT EXISTS jobs
(
- session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
- timestamp bigint NOT NULL,
- seq_index integer NOT NULL,
- issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE,
- payload jsonb DEFAULT NULL,
- PRIMARY KEY (session_id, timestamp, seq_index)
+ job_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ description text NOT NULL,
+ status job_status NOT NULL,
+ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
+ action job_action NOT NULL,
+ reference_id text NOT NULL,
+ created_at timestamp default timezone('utc'::text, now()) NOT NULL,
+ updated_at timestamp default timezone('utc'::text, now()) NULL,
+ start_at timestamp NOT NULL,
+ errors text NULL
);
- CREATE INDEX issues_issue_id_timestamp_idx ON events_common.issues (issue_id, timestamp);
- CREATE INDEX issues_timestamp_idx ON events_common.issues (timestamp);
+ CREATE INDEX IF NOT EXISTS jobs_status_idx ON jobs (status);
+ CREATE INDEX IF NOT EXISTS jobs_start_at_idx ON jobs (start_at);
+ CREATE INDEX IF NOT EXISTS jobs_project_id_idx ON jobs (project_id);
- CREATE TABLE events_common.requests
+ CREATE TABLE IF NOT EXISTS traces
(
- session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
- timestamp bigint NOT NULL,
- seq_index integer NOT NULL,
- url text NOT NULL,
- duration integer NOT NULL,
- success boolean NOT NULL,
- PRIMARY KEY (session_id, timestamp, seq_index)
+ user_id integer NULL REFERENCES users (user_id) ON DELETE CASCADE,
+ tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
+ created_at bigint NOT NULL DEFAULT (EXTRACT(EPOCH FROM now() at time zone 'utc') * 1000)::bigint,
+ auth text NULL,
+ action text NOT NULL,
+ method text NOT NULL,
+ path_format text NOT NULL,
+ endpoint text NOT NULL,
+ payload jsonb NULL,
+ parameters jsonb NULL,
+ status int NULL
);
- CREATE INDEX requests_url_idx ON events_common.requests (url);
- CREATE INDEX requests_duration_idx ON events_common.requests (duration);
- CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops);
- CREATE INDEX requests_timestamp_idx ON events_common.requests (timestamp);
- CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE
- WHEN url LIKE 'http://%'
- THEN 7
- WHEN url LIKE 'https://%'
- THEN 8
- ELSE 0 END))
- gin_trgm_ops);
- CREATE INDEX requests_timestamp_session_id_failed_idx ON events_common.requests (timestamp, session_id) WHERE success = FALSE;
+ CREATE INDEX IF NOT EXISTS traces_user_id_idx ON traces (user_id);
+ CREATE INDEX IF NOT EXISTS traces_tenant_id_idx ON traces (tenant_id);
--- --- events.sql ---
- CREATE SCHEMA IF NOT EXISTS events;
+ CREATE TABLE IF NOT EXISTS metrics
+ (
+ metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
+ user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
+ name text NOT NULL,
+ is_public boolean NOT NULL DEFAULT FALSE,
+ active boolean NOT NULL DEFAULT TRUE,
+ created_at timestamp default timezone('utc'::text, now()) not null,
+ deleted_at timestamp
+ );
+ CREATE INDEX IF NOT EXISTS metrics_user_id_is_public_idx ON public.metrics (user_id, is_public);
+ CREATE TABLE IF NOT EXISTS metric_series
+ (
+ series_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ metric_id integer REFERENCES metrics (metric_id) ON DELETE CASCADE,
+ index integer NOT NULL,
+ name text NULL,
+ filter jsonb NOT NULL,
+ created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
+ deleted_at timestamp
+ );
+ CREATE INDEX IF NOT EXISTS metric_series_metric_id_idx ON public.metric_series (metric_id);
- CREATE TABLE events.pages
+ CREATE TABLE IF NOT EXISTS searches
+ (
+ search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
+ user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
+ name text not null,
+ filter jsonb not null,
+ created_at timestamp default timezone('utc'::text, now()) not null,
+ deleted_at timestamp,
+ is_public boolean NOT NULL DEFAULT False
+ );
+
+ CREATE INDEX IF NOT EXISTS searches_user_id_is_public_idx ON public.searches (user_id, is_public);
+ CREATE INDEX IF NOT EXISTS searches_project_id_idx ON public.searches (project_id);
+
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'alert_detection_method') THEN
+ CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change');
+ END IF;
+ CREATE TABLE IF NOT EXISTS alerts
+ (
+ alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
+ series_id integer NULL REFERENCES metric_series (series_id) ON DELETE CASCADE,
+ name text NOT NULL,
+ description text NULL DEFAULT NULL,
+ active boolean NOT NULL DEFAULT TRUE,
+ detection_method alert_detection_method NOT NULL,
+ query jsonb NOT NULL,
+ deleted_at timestamp NULL DEFAULT NULL,
+ created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
+ options jsonb NOT NULL DEFAULT'{
+ "renotifyInterval": 1440
+ }'::jsonb
+ );
+ CREATE INDEX IF NOT EXISTS alerts_project_id_idx ON alerts (project_id);
+ CREATE INDEX IF NOT EXISTS alerts_series_id_idx ON alerts (series_id);
+
+ DROP TRIGGER IF EXISTS on_insert_or_update_or_delete ON alerts;
+
+ CREATE TRIGGER on_insert_or_update_or_delete
+ AFTER INSERT OR UPDATE OR DELETE
+ ON alerts
+ FOR EACH ROW
+ EXECUTE PROCEDURE notify_alert();
+
+ RAISE NOTICE 'Created missing public schema tables';
+ END IF;
+ END;
+
+$$
+LANGUAGE plpgsql;
+
+
+DO
+$$
+ BEGIN
+ IF (with to_check (name) as (
+ values ('clicks'),
+ ('errors'),
+ ('graphql'),
+ ('inputs'),
+ ('pages'),
+ ('performance'),
+ ('resources'),
+ ('state_actions')
+ )
+ select bool_and(exists(select *
+ from information_schema.tables t
+ where table_schema = 'events'
+ AND table_name = to_check.name)) as all_present
+ from to_check) THEN
+ raise notice 'All events schema tables exists';
+ ELSE
+ CREATE TABLE IF NOT EXISTS events.pages
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
message_id bigint NOT NULL,
@@ -725,48 +892,49 @@ $$
ttfb integer DEFAULT NULL,
PRIMARY KEY (session_id, message_id)
);
- CREATE INDEX pages_session_id_idx ON events.pages (session_id);
- CREATE INDEX pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops);
- CREATE INDEX pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops);
- CREATE INDEX pages_timestamp_idx ON events.pages (timestamp);
- CREATE INDEX pages_session_id_timestamp_idx ON events.pages (session_id, timestamp);
- CREATE INDEX pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops);
- CREATE INDEX pages_base_path_idx ON events.pages (base_path);
- CREATE INDEX pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1));
- CREATE INDEX pages_base_referrer_idx ON events.pages (base_referrer);
- CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer,
- length(base_referrer) - (CASE
- WHEN base_referrer LIKE 'http://%'
- THEN 7
- WHEN base_referrer LIKE 'https://%'
- THEN 8
- ELSE 0 END))
- gin_trgm_ops);
- CREATE INDEX pages_response_time_idx ON events.pages (response_time);
- CREATE INDEX pages_response_end_idx ON events.pages (response_end);
- CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops);
- CREATE INDEX pages_path_idx ON events.pages (path);
- CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0;
- CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0;
- CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0;
- CREATE INDEX pages_first_contentful_paint_time_idx ON events.pages (first_contentful_paint_time) WHERE first_contentful_paint_time > 0;
- CREATE INDEX pages_dom_content_loaded_time_idx ON events.pages (dom_content_loaded_time) WHERE dom_content_loaded_time > 0;
- CREATE INDEX pages_first_paint_time_idx ON events.pages (first_paint_time) WHERE first_paint_time > 0;
- CREATE INDEX pages_ttfb_idx ON events.pages (ttfb) WHERE ttfb > 0;
- CREATE INDEX pages_time_to_interactive_idx ON events.pages (time_to_interactive) WHERE time_to_interactive > 0;
- CREATE INDEX pages_session_id_timestamp_loadgt0NN_idx ON events.pages (session_id, timestamp) WHERE load_time > 0 AND load_time IS NOT NULL;
- CREATE INDEX pages_session_id_timestamp_visualgt0nn_idx ON events.pages (session_id, timestamp) WHERE visually_complete > 0 AND visually_complete IS NOT NULL;
- CREATE INDEX pages_timestamp_metgt0_idx ON events.pages (timestamp) WHERE response_time > 0 OR
- first_paint_time > 0 OR
- dom_content_loaded_time > 0 OR
- ttfb > 0 OR
- time_to_interactive > 0;
- CREATE INDEX pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL;
- CREATE INDEX pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL;
- CREATE INDEX pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp);
- CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2;
+ CREATE INDEX IF NOT EXISTS pages_session_id_idx ON events.pages (session_id);
+ CREATE INDEX IF NOT EXISTS pages_base_path_gin_idx ON events.pages USING GIN (base_path gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS pages_base_referrer_gin_idx ON events.pages USING GIN (base_referrer gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS pages_timestamp_idx ON events.pages (timestamp);
+ CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_idx ON events.pages (session_id, timestamp);
+ CREATE INDEX IF NOT EXISTS pages_base_path_gin_idx2 ON events.pages USING GIN (RIGHT(base_path, length(base_path) - 1) gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS pages_base_path_idx ON events.pages (base_path);
+ CREATE INDEX IF NOT EXISTS pages_base_path_idx2 ON events.pages (RIGHT(base_path, length(base_path) - 1));
+ CREATE INDEX IF NOT EXISTS pages_base_referrer_idx ON events.pages (base_referrer);
+ CREATE INDEX IF NOT EXISTS pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_referrer,
+ length(base_referrer) -
+ (CASE
+ WHEN base_referrer LIKE 'http://%'
+ THEN 7
+ WHEN base_referrer LIKE 'https://%'
+ THEN 8
+ ELSE 0 END))
+ gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS pages_response_time_idx ON events.pages (response_time);
+ CREATE INDEX IF NOT EXISTS pages_response_end_idx ON events.pages (response_end);
+ CREATE INDEX IF NOT EXISTS pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS pages_path_idx ON events.pages (path);
+ CREATE INDEX IF NOT EXISTS pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0;
+ CREATE INDEX IF NOT EXISTS pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0;
+ CREATE INDEX IF NOT EXISTS pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0;
+ CREATE INDEX IF NOT EXISTS pages_first_contentful_paint_time_idx ON events.pages (first_contentful_paint_time) WHERE first_contentful_paint_time > 0;
+ CREATE INDEX IF NOT EXISTS pages_dom_content_loaded_time_idx ON events.pages (dom_content_loaded_time) WHERE dom_content_loaded_time > 0;
+ CREATE INDEX IF NOT EXISTS pages_first_paint_time_idx ON events.pages (first_paint_time) WHERE first_paint_time > 0;
+ CREATE INDEX IF NOT EXISTS pages_ttfb_idx ON events.pages (ttfb) WHERE ttfb > 0;
+ CREATE INDEX IF NOT EXISTS pages_time_to_interactive_idx ON events.pages (time_to_interactive) WHERE time_to_interactive > 0;
+ CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_loadgt0NN_idx ON events.pages (session_id, timestamp) WHERE load_time > 0 AND load_time IS NOT NULL;
+ CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_visualgt0nn_idx ON events.pages (session_id, timestamp) WHERE visually_complete > 0 AND visually_complete IS NOT NULL;
+ CREATE INDEX IF NOT EXISTS pages_timestamp_metgt0_idx ON events.pages (timestamp) WHERE response_time > 0 OR
+ first_paint_time > 0 OR
+ dom_content_loaded_time > 0 OR
+ ttfb > 0 OR
+ time_to_interactive > 0;
+ CREATE INDEX IF NOT EXISTS pages_session_id_speed_indexgt0nn_idx ON events.pages (session_id, speed_index) WHERE speed_index > 0 AND speed_index IS NOT NULL;
+ CREATE INDEX IF NOT EXISTS pages_session_id_timestamp_dom_building_timegt0nn_idx ON events.pages (session_id, timestamp, dom_building_time) WHERE dom_building_time > 0 AND dom_building_time IS NOT NULL;
+ CREATE INDEX IF NOT EXISTS pages_base_path_session_id_timestamp_idx ON events.pages (base_path, session_id, timestamp);
+ CREATE INDEX IF NOT EXISTS pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2;
- CREATE TABLE events.clicks
+ CREATE TABLE IF NOT EXISTS events.clicks
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
message_id bigint NOT NULL,
@@ -776,18 +944,18 @@ $$
selector text DEFAULT '' NOT NULL,
PRIMARY KEY (session_id, message_id)
);
- CREATE INDEX clicks_session_id_idx ON events.clicks (session_id);
- CREATE INDEX clicks_label_idx ON events.clicks (label);
- CREATE INDEX clicks_label_gin_idx ON events.clicks USING GIN (label gin_trgm_ops);
- CREATE INDEX clicks_timestamp_idx ON events.clicks (timestamp);
- CREATE INDEX clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp);
- CREATE INDEX clicks_url_idx ON events.clicks (url);
- CREATE INDEX clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops);
- CREATE INDEX clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector);
- CREATE INDEX clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp);
+ CREATE INDEX IF NOT EXISTS clicks_session_id_idx ON events.clicks (session_id);
+ CREATE INDEX IF NOT EXISTS clicks_label_idx ON events.clicks (label);
+ CREATE INDEX IF NOT EXISTS clicks_label_gin_idx ON events.clicks USING GIN (label gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS clicks_timestamp_idx ON events.clicks (timestamp);
+ CREATE INDEX IF NOT EXISTS clicks_label_session_id_timestamp_idx ON events.clicks (label, session_id, timestamp);
+ CREATE INDEX IF NOT EXISTS clicks_url_idx ON events.clicks (url);
+ CREATE INDEX IF NOT EXISTS clicks_url_gin_idx ON events.clicks USING GIN (url gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS clicks_url_session_id_timestamp_selector_idx ON events.clicks (url, session_id, timestamp, selector);
+ CREATE INDEX IF NOT EXISTS clicks_session_id_timestamp_idx ON events.clicks (session_id, timestamp);
- CREATE TABLE events.inputs
+ CREATE TABLE IF NOT EXISTS events.inputs
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
message_id bigint NOT NULL,
@@ -796,14 +964,14 @@ $$
value text DEFAULT NULL,
PRIMARY KEY (session_id, message_id)
);
- CREATE INDEX inputs_session_id_idx ON events.inputs (session_id);
- CREATE INDEX inputs_label_value_idx ON events.inputs (label, value);
- CREATE INDEX inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops);
- CREATE INDEX inputs_label_idx ON events.inputs (label);
- CREATE INDEX inputs_timestamp_idx ON events.inputs (timestamp);
- CREATE INDEX inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp);
+ CREATE INDEX IF NOT EXISTS inputs_session_id_idx ON events.inputs (session_id);
+ CREATE INDEX IF NOT EXISTS inputs_label_value_idx ON events.inputs (label, value);
+ CREATE INDEX IF NOT EXISTS inputs_label_gin_idx ON events.inputs USING GIN (label gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS inputs_label_idx ON events.inputs (label);
+ CREATE INDEX IF NOT EXISTS inputs_timestamp_idx ON events.inputs (timestamp);
+ CREATE INDEX IF NOT EXISTS inputs_label_session_id_timestamp_idx ON events.inputs (label, session_id, timestamp);
- CREATE TABLE events.errors
+ CREATE TABLE IF NOT EXISTS events.errors
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
message_id bigint NOT NULL,
@@ -811,16 +979,16 @@ $$
error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE,
PRIMARY KEY (session_id, message_id)
);
- CREATE INDEX errors_session_id_idx ON events.errors (session_id);
- CREATE INDEX errors_timestamp_idx ON events.errors (timestamp);
- CREATE INDEX errors_session_id_timestamp_error_id_idx ON events.errors (session_id, timestamp, error_id);
- CREATE INDEX errors_error_id_timestamp_idx ON events.errors (error_id, timestamp);
- CREATE INDEX errors_timestamp_error_id_session_id_idx ON events.errors (timestamp, error_id, session_id);
- CREATE INDEX errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id);
- CREATE INDEX errors_error_id_idx ON events.errors (error_id);
+ CREATE INDEX IF NOT EXISTS errors_session_id_idx ON events.errors (session_id);
+ CREATE INDEX IF NOT EXISTS errors_timestamp_idx ON events.errors (timestamp);
+ CREATE INDEX IF NOT EXISTS errors_session_id_timestamp_error_id_idx ON events.errors (session_id, timestamp, error_id);
+ CREATE INDEX IF NOT EXISTS errors_error_id_timestamp_idx ON events.errors (error_id, timestamp);
+ CREATE INDEX IF NOT EXISTS errors_timestamp_error_id_session_id_idx ON events.errors (timestamp, error_id, session_id);
+ CREATE INDEX IF NOT EXISTS errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id);
+ CREATE INDEX IF NOT EXISTS errors_error_id_idx ON events.errors (error_id);
- CREATE TABLE events.graphql
+ CREATE TABLE IF NOT EXISTS events.graphql
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
message_id bigint NOT NULL,
@@ -828,11 +996,11 @@ $$
name text NOT NULL,
PRIMARY KEY (session_id, message_id)
);
- CREATE INDEX graphql_name_idx ON events.graphql (name);
- CREATE INDEX graphql_name_gin_idx ON events.graphql USING GIN (name gin_trgm_ops);
- CREATE INDEX graphql_timestamp_idx ON events.graphql (timestamp);
+ CREATE INDEX IF NOT EXISTS graphql_name_idx ON events.graphql (name);
+ CREATE INDEX IF NOT EXISTS graphql_name_gin_idx ON events.graphql USING GIN (name gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS graphql_timestamp_idx ON events.graphql (timestamp);
- CREATE TABLE events.state_actions
+ CREATE TABLE IF NOT EXISTS events.state_actions
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
message_id bigint NOT NULL,
@@ -840,13 +1008,21 @@ $$
name text NOT NULL,
PRIMARY KEY (session_id, message_id)
);
- CREATE INDEX state_actions_name_idx ON events.state_actions (name);
- CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops);
- CREATE INDEX state_actions_timestamp_idx ON events.state_actions (timestamp);
+ CREATE INDEX IF NOT EXISTS state_actions_name_idx ON events.state_actions (name);
+ CREATE INDEX IF NOT EXISTS state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS state_actions_timestamp_idx ON events.state_actions (timestamp);
- CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media');
- CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' );
- CREATE TABLE events.resources
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'resource_type') THEN
+ CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media');
+ END IF;
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'resource_method') THEN
+ CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' );
+ END IF;
+ CREATE TABLE IF NOT EXISTS events.resources
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
message_id bigint NOT NULL,
@@ -865,28 +1041,29 @@ $$
decoded_body_size integer NULL,
PRIMARY KEY (session_id, message_id)
);
- CREATE INDEX resources_session_id_idx ON events.resources (session_id);
- CREATE INDEX resources_status_idx ON events.resources (status);
- CREATE INDEX resources_type_idx ON events.resources (type);
- CREATE INDEX resources_duration_durationgt0_idx ON events.resources (duration) WHERE duration > 0;
- CREATE INDEX resources_url_host_idx ON events.resources (url_host);
- CREATE INDEX resources_timestamp_idx ON events.resources (timestamp);
- CREATE INDEX resources_success_idx ON events.resources (success);
+ CREATE INDEX IF NOT EXISTS resources_session_id_idx ON events.resources (session_id);
+ CREATE INDEX IF NOT EXISTS resources_status_idx ON events.resources (status);
+ CREATE INDEX IF NOT EXISTS resources_type_idx ON events.resources (type);
+ CREATE INDEX IF NOT EXISTS resources_duration_durationgt0_idx ON events.resources (duration) WHERE duration > 0;
+ CREATE INDEX IF NOT EXISTS resources_url_host_idx ON events.resources (url_host);
+ CREATE INDEX IF NOT EXISTS resources_timestamp_idx ON events.resources (timestamp);
+ CREATE INDEX IF NOT EXISTS resources_success_idx ON events.resources (success);
- CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops);
- CREATE INDEX resources_url_idx ON events.resources (url);
- CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops);
- CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath);
- CREATE INDEX resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL;
- CREATE INDEX resources_session_id_timestamp_idx ON events.resources (session_id, timestamp);
- CREATE INDEX resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type);
- CREATE INDEX resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch';
- CREATE INDEX resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE;
- CREATE INDEX resources_session_id_timestamp_url_host_firstparty_idx ON events.resources (session_id, timestamp, url_host) WHERE type IN ('fetch', 'script');
- CREATE INDEX resources_session_id_timestamp_duration_durationgt0NN_img_idx ON events.resources (session_id, timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL AND type = 'img';
- CREATE INDEX resources_timestamp_session_id_idx ON events.resources (timestamp, session_id);
+ CREATE INDEX IF NOT EXISTS resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS resources_url_idx ON events.resources (url);
+ CREATE INDEX IF NOT EXISTS resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS resources_url_hostpath_idx ON events.resources (url_hostpath);
+ CREATE INDEX IF NOT EXISTS resources_timestamp_type_durationgt0NN_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL;
+ CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_idx ON events.resources (session_id, timestamp);
+ CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_type_idx ON events.resources (session_id, timestamp, type);
+ CREATE INDEX IF NOT EXISTS resources_timestamp_type_durationgt0NN_noFetch_idx ON events.resources (timestamp, type) WHERE duration > 0 AND duration IS NOT NULL AND type != 'fetch';
+ CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_url_host_fail_idx ON events.resources (session_id, timestamp, url_host) WHERE success = FALSE;
+ CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_url_host_firstparty_idx ON events.resources (session_id, timestamp, url_host) WHERE type IN ('fetch', 'script');
+ CREATE INDEX IF NOT EXISTS resources_session_id_timestamp_duration_durationgt0NN_img_idx ON events.resources (session_id, timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL AND type = 'img';
+ CREATE INDEX IF NOT EXISTS resources_timestamp_session_id_idx ON events.resources (timestamp, session_id);
+ CREATE INDEX IF NOT EXISTS resources_timestamp_duration_durationgt0NN_idx ON events.resources (timestamp, duration) WHERE duration > 0 AND duration IS NOT NULL;
- CREATE TABLE events.performance
+ CREATE TABLE IF NOT EXISTS events.performance
(
session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
timestamp bigint NOT NULL,
@@ -905,132 +1082,93 @@ $$
max_used_js_heap_size bigint NOT NULL,
PRIMARY KEY (session_id, message_id)
);
- CREATE INDEX performance_session_id_idx ON events.performance (session_id);
- CREATE INDEX performance_timestamp_idx ON events.performance (timestamp);
- CREATE INDEX performance_session_id_timestamp_idx ON events.performance (session_id, timestamp);
- CREATE INDEX performance_avg_cpu_gt0_idx ON events.performance (avg_cpu) WHERE avg_cpu > 0;
- CREATE INDEX performance_avg_used_js_heap_size_gt0_idx ON events.performance (avg_used_js_heap_size) WHERE avg_used_js_heap_size > 0;
-
--- --- autocomplete.sql ---
-
- CREATE TABLE autocomplete
- (
- value text NOT NULL,
- type text NOT NULL,
- project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE
- );
-
- CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type);
- CREATE index autocomplete_project_id_idx ON autocomplete (project_id);
- CREATE INDEX autocomplete_type_idx ON public.autocomplete (type);
- CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
-
--- --- jobs.sql ---
- CREATE TYPE job_status AS ENUM ('scheduled','running','cancelled','failed','completed');
- CREATE TYPE job_action AS ENUM ('delete_user_data');
- CREATE TABLE jobs
- (
- job_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
- description text NOT NULL,
- status job_status NOT NULL,
- project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
- action job_action NOT NULL,
- reference_id text NOT NULL,
- created_at timestamp default timezone('utc'::text, now()) NOT NULL,
- updated_at timestamp default timezone('utc'::text, now()) NULL,
- start_at timestamp NOT NULL,
- errors text NULL
- );
- CREATE INDEX jobs_status_idx ON jobs (status);
- CREATE INDEX jobs_start_at_idx ON jobs (start_at);
- CREATE INDEX jobs_project_id_idx ON jobs (project_id);
-
-
- CREATE TABLE traces
- (
- user_id integer NULL REFERENCES users (user_id) ON DELETE CASCADE,
- tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
- created_at bigint NOT NULL DEFAULT (EXTRACT(EPOCH FROM now() at time zone 'utc') * 1000)::bigint,
- auth text NULL,
- action text NOT NULL,
- method text NOT NULL,
- path_format text NOT NULL,
- endpoint text NOT NULL,
- payload jsonb NULL,
- parameters jsonb NULL,
- status int NULL
- );
- CREATE INDEX traces_user_id_idx ON traces (user_id);
- CREATE INDEX traces_tenant_id_idx ON traces (tenant_id);
-
- CREATE TABLE metrics
- (
- metric_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
- project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
- user_id integer REFERENCES users (user_id) ON DELETE SET NULL,
- name text NOT NULL,
- is_public boolean NOT NULL DEFAULT FALSE,
- created_at timestamp default timezone('utc'::text, now()) not null,
- deleted_at timestamp
- );
- CREATE INDEX metrics_user_id_is_public_idx ON public.metrics (user_id, is_public);
- CREATE TABLE metric_series
- (
- series_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
- metric_id integer REFERENCES metrics (metric_id) ON DELETE CASCADE,
- index integer NOT NULL,
- name text NULL,
- filter jsonb NOT NULL,
- created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL,
- deleted_at timestamp
- );
- CREATE INDEX metric_series_metric_id_idx ON public.metric_series (metric_id);
-
- CREATE TABLE searches
- (
- search_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
- project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
- user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
- name text not null,
- filter jsonb not null,
- created_at timestamp default timezone('utc'::text, now()) not null,
- deleted_at timestamp,
- is_public boolean NOT NULL DEFAULT False
- );
-
- CREATE INDEX searches_user_id_is_public_idx ON public.searches (user_id, is_public);
- CREATE INDEX searches_project_id_idx ON public.searches (project_id);
-
- CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change');
-
- CREATE TABLE alerts
- (
- alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
- project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
- series_id integer NULL REFERENCES metric_series (series_id) ON DELETE CASCADE,
- name text NOT NULL,
- description text NULL DEFAULT NULL,
- active boolean NOT NULL DEFAULT TRUE,
- detection_method alert_detection_method NOT NULL,
- query jsonb NOT NULL,
- deleted_at timestamp NULL DEFAULT NULL,
- created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
- options jsonb NOT NULL DEFAULT '{
- "renotifyInterval": 1440
- }'::jsonb
- );
- CREATE INDEX alerts_project_id_idx ON alerts (project_id);
- CREATE INDEX alerts_series_id_idx ON alerts (series_id);
- CREATE TRIGGER on_insert_or_update_or_delete
- AFTER INSERT OR UPDATE OR DELETE
- ON alerts
- FOR EACH ROW
- EXECUTE PROCEDURE notify_alert();
-
- raise notice 'DB created';
+ CREATE INDEX IF NOT EXISTS performance_session_id_idx ON events.performance (session_id);
+ CREATE INDEX IF NOT EXISTS performance_timestamp_idx ON events.performance (timestamp);
+ CREATE INDEX IF NOT EXISTS performance_session_id_timestamp_idx ON events.performance (session_id, timestamp);
+ CREATE INDEX IF NOT EXISTS performance_avg_cpu_gt0_idx ON events.performance (avg_cpu) WHERE avg_cpu > 0;
+ CREATE INDEX IF NOT EXISTS performance_avg_used_js_heap_size_gt0_idx ON events.performance (avg_used_js_heap_size) WHERE avg_used_js_heap_size > 0;
END IF;
END;
-
$$
LANGUAGE plpgsql;
+
+
+DO
+$$
+ BEGIN
+ IF (with to_check (name) as (
+ values ('customs'),
+ ('issues'),
+ ('requests')
+ )
+ select bool_and(exists(select *
+ from information_schema.tables t
+ where table_schema = 'events_common'
+ AND table_name = to_check.name)) as all_present
+ from to_check) THEN
+ raise notice 'All events_common schema tables exists';
+ ELSE
+ IF NOT EXISTS(SELECT *
+ FROM pg_type typ
+ WHERE typ.typname = 'custom_level') THEN
+ CREATE TYPE events_common.custom_level AS ENUM ('info','error');
+ END IF;
+ CREATE TABLE IF NOT EXISTS events_common.customs
+ (
+ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
+ timestamp bigint NOT NULL,
+ seq_index integer NOT NULL,
+ name text NOT NULL,
+ payload jsonb NOT NULL,
+ level events_common.custom_level NOT NULL DEFAULT 'info',
+ PRIMARY KEY (session_id, timestamp, seq_index)
+ );
+ CREATE INDEX IF NOT EXISTS customs_name_idx ON events_common.customs (name);
+ CREATE INDEX IF NOT EXISTS customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS customs_timestamp_idx ON events_common.customs (timestamp);
+
+
+ CREATE TABLE IF NOT EXISTS events_common.issues
+ (
+ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
+ timestamp bigint NOT NULL,
+ seq_index integer NOT NULL,
+ issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE,
+ payload jsonb DEFAULT NULL,
+ PRIMARY KEY (session_id, timestamp, seq_index)
+ );
+ CREATE INDEX IF NOT EXISTS issues_issue_id_timestamp_idx ON events_common.issues (issue_id, timestamp);
+ CREATE INDEX IF NOT EXISTS issues_timestamp_idx ON events_common.issues (timestamp);
+
+
+ CREATE TABLE IF NOT EXISTS events_common.requests
+ (
+ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
+ timestamp bigint NOT NULL,
+ seq_index integer NOT NULL,
+ url text NOT NULL,
+ duration integer NOT NULL,
+ success boolean NOT NULL,
+ PRIMARY KEY (session_id, timestamp, seq_index)
+ );
+ CREATE INDEX IF NOT EXISTS requests_url_idx ON events_common.requests (url);
+ CREATE INDEX IF NOT EXISTS requests_duration_idx ON events_common.requests (duration);
+ CREATE INDEX IF NOT EXISTS requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS requests_timestamp_idx ON events_common.requests (timestamp);
+ CREATE INDEX IF NOT EXISTS requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url,
+ length(url) -
+ (CASE
+ WHEN url LIKE 'http://%'
+ THEN 7
+ WHEN url LIKE 'https://%'
+ THEN 8
+ ELSE 0 END))
+ gin_trgm_ops);
+ CREATE INDEX IF NOT EXISTS requests_timestamp_session_id_failed_idx ON events_common.requests (timestamp, session_id) WHERE success = FALSE;
+ END IF;
+ END;
+$$
+LANGUAGE plpgsql;
+
+
COMMIT;
\ No newline at end of file
diff --git a/frontend/app/api_client.js b/frontend/app/api_client.js
index 02e575033..a42f19468 100644
--- a/frontend/app/api_client.js
+++ b/frontend/app/api_client.js
@@ -16,13 +16,15 @@ const siteIdRequiredPaths = [
'/integration/sources',
'/issue_types',
'/sample_rate',
- '/flows',
+ '/saved_search',
'/rehydrations',
'/sourcemaps',
'/errors',
'/funnels',
'/assist',
- '/heatmaps'
+ '/heatmaps',
+ '/custom_metrics',
+ // '/custom_metrics/sessions',
];
const noStoringFetchPathStarts = [
diff --git a/frontend/app/components/Alerts/AlertForm.js b/frontend/app/components/Alerts/AlertForm.js
index deb0fa405..f4b9362c7 100644
--- a/frontend/app/components/Alerts/AlertForm.js
+++ b/frontend/app/components/Alerts/AlertForm.js
@@ -1,4 +1,4 @@
-import React from 'react'
+import React, { useEffect } from 'react'
import { Button, Dropdown, Form, Input, SegmentSelection, Checkbox, Message, Link, Icon } from 'UI';
import { alertMetrics as metrics } from 'App/constants';
import { alertConditions as conditions } from 'App/constants';
@@ -8,6 +8,7 @@ import stl from './alertForm.css';
import DropdownChips from './DropdownChips';
import { validateEmail } from 'App/validate';
import cn from 'classnames';
+import { fetchTriggerOptions } from 'Duck/alerts';
const thresholdOptions = [
{ text: '15 minutes', value: 15 },
@@ -46,11 +47,15 @@ const Section = ({ index, title, description, content }) => (
const integrationsRoute = client(CLIENT_TABS.INTEGRATIONS);
const AlertForm = props => {
- const { instance, slackChannels, webhooks, loading, onDelete, deleting } = props;
+ const { instance, slackChannels, webhooks, loading, onDelete, deleting, triggerOptions, metricId, style={ width: '580px', height: '100vh' } } = props;
const write = ({ target: { value, name } }) => props.edit({ [ name ]: value })
const writeOption = (e, { name, value }) => props.edit({ [ name ]: value });
const onChangeOption = (e, { checked, name }) => props.edit({ [ name ]: checked })
+ useEffect(() => {
+ props.fetchTriggerOptions();
+ }, [])
+
const writeQueryOption = (e, { name, value }) => {
const { query } = instance;
props.edit({ query: { ...query, [name] : value } });
@@ -61,13 +66,12 @@ const AlertForm = props => {
props.edit({ query: { ...query, [name] : value } });
}
- const metric = (instance && instance.query.left) ? metrics.find(i => i.value === instance.query.left) : null;
+ const metric = (instance && instance.query.left) ? triggerOptions.find(i => i.value === instance.query.left) : null;
const unit = metric ? metric.unit : '';
const isThreshold = instance.detectionMethod === 'threshold';
-
return (
-
diff --git a/frontend/app/components/Header/SiteDropdown.js b/frontend/app/components/Header/SiteDropdown.js
index 170b99b6a..37057866b 100644
--- a/frontend/app/components/Header/SiteDropdown.js
+++ b/frontend/app/components/Header/SiteDropdown.js
@@ -9,6 +9,7 @@ import { init } from 'Duck/site';
import styles from './siteDropdown.css';
import cn from 'classnames';
import NewSiteForm from '../Client/Sites/NewSiteForm';
+import { clearSearch } from 'Duck/search';
@withRouter
@connect(state => ({
@@ -18,7 +19,8 @@ import NewSiteForm from '../Client/Sites/NewSiteForm';
}), {
setSiteId,
pushNewSite,
- init
+ init,
+ clearSearch,
})
export default class SiteDropdown extends React.PureComponent {
state = { showProductModal: false }
@@ -32,6 +34,11 @@ export default class SiteDropdown extends React.PureComponent {
this.setState({showProductModal: true})
}
+ switchSite = (siteId) => {
+ this.props.setSiteId(siteId);
+ this.props.clearSearch();
+ }
+
render() {
const { sites, siteId, account, location: { pathname } } = this.props;
const { showProductModal } = this.state;
@@ -54,7 +61,7 @@ export default class SiteDropdown extends React.PureComponent {
{ !showCurrent && { 'Does not require domain selection.' }}
{
sites.map(site => (
- this.props.setSiteId(site.id) }>
+ this.switchSite(site.id)}>
{
initPlayer(session, jwt, config);
+
+ const jumptTime = props.query.get('jumpto');
+ if (jumptTime) {
+ Controls.jump(parseInt(jumptTime));
+ }
+
return () => cleanPlayer()
}, [ session.sessionId ]);
@@ -56,7 +66,6 @@ function WebPlayer ({ session, toggleFullscreen, closeBottomBlock, live, fullscr
);
}
-
export default connect(state => ({
session: state.getIn([ 'sessions', 'current' ]),
jwt: state.get('jwt'),
@@ -65,5 +74,4 @@ export default connect(state => ({
}), {
toggleFullscreen,
closeBottomBlock,
-})(WebPlayer)
-
+})(withLocationHandlers()(WebPlayer));
diff --git a/frontend/app/components/Session_/Player/Overlay.tsx b/frontend/app/components/Session_/Player/Overlay.tsx
index 09e1036a6..ca4384ccb 100644
--- a/frontend/app/components/Session_/Player/Overlay.tsx
+++ b/frontend/app/components/Session_/Player/Overlay.tsx
@@ -2,6 +2,7 @@ import React, {useEffect} from 'react';
import { connectPlayer, markTargets } from 'Player';
import { getStatusText } from 'Player/MessageDistributor/managers/AssistManager';
import type { MarkedTarget } from 'Player/MessageDistributor/StatedScreen/StatedScreen';
+import { ConnectionStatus } from 'Player/MessageDistributor/managers/AssistManager';
import AutoplayTimer from './Overlay/AutoplayTimer';
import PlayIconLayer from './Overlay/PlayIconLayer';
@@ -17,6 +18,7 @@ interface Props {
loading: boolean,
live: boolean,
liveStatusText: string,
+ concetionStatus: ConnectionStatus,
autoplay: boolean,
markedTargets: MarkedTarget[] | null,
activeTargetIndex: number,
@@ -33,6 +35,7 @@ function Overlay({
loading,
live,
liveStatusText,
+ concetionStatus,
autoplay,
markedTargets,
activeTargetIndex,
@@ -53,7 +56,7 @@ function Overlay({
<>
{ showAutoplayTimer && }
{ showLiveStatusText &&
-
+
}
{ messagesLoading && }
{ showPlayIconLayer &&
@@ -74,6 +77,7 @@ export default connectPlayer(state => ({
autoplay: state.autoplay,
live: state.live,
liveStatusText: getStatusText(state.peerConnectionStatus),
+ concetionStatus: state.peerConnectionStatus,
markedTargets: state.markedTargets,
activeTargetIndex: state.activeTargetIndex,
}))(Overlay);
\ No newline at end of file
diff --git a/frontend/app/components/Session_/Player/Overlay/LiveStatusText.tsx b/frontend/app/components/Session_/Player/Overlay/LiveStatusText.tsx
index e3eb7fc20..b642a8a7c 100644
--- a/frontend/app/components/Session_/Player/Overlay/LiveStatusText.tsx
+++ b/frontend/app/components/Session_/Player/Overlay/LiveStatusText.tsx
@@ -1,11 +1,64 @@
import React from 'react';
import stl from './LiveStatusText.css';
import ovStl from './overlay.css';
+import { ConnectionStatus } from 'Player/MessageDistributor/managers/AssistManager';
+import { Loader } from 'UI';
interface Props {
text: string;
+ concetionStatus: ConnectionStatus;
}
-export default function LiveStatusText({ text }: Props) {
- return
+export default function LiveStatusText({ text, concetionStatus }: Props) {
+ const renderView = () => {
+ switch (concetionStatus) {
+ case ConnectionStatus.Connecting:
+ return (
+
+
+
Connecting...
+
Establishing a connection with the remote session.
+
+ )
+ case ConnectionStatus.WaitingMessages:
+ return (
+
+
+
Waiting for the session to become active...
+
If it's taking too much time, it could mean the user is simply inactive.
+
+ )
+ case ConnectionStatus.Connected:
+ return (
+
+ )
+ case ConnectionStatus.Inactive:
+ return (
+
+
+
Waiting for the session to become active...
+
If it's taking too much time, it could mean the user is simply inactive.
+
+ )
+ case ConnectionStatus.Disconnected:
+ return (
+
+
Disconnected
+
The connection was lost with the remote session. The user may have simply closed the tab/browser.
+
+ )
+ case ConnectionStatus.Error:
+ return (
+
+
Error
+
Something wrong just happened. Try refreshing the page.
+
+ )
+ }
+ }
+ return
+ { renderView()}
+
}
\ No newline at end of file
diff --git a/frontend/app/components/Session_/PlayerBlockHeader.js b/frontend/app/components/Session_/PlayerBlockHeader.js
index 1cade051e..d78e115c8 100644
--- a/frontend/app/components/Session_/PlayerBlockHeader.js
+++ b/frontend/app/components/Session_/PlayerBlockHeader.js
@@ -2,7 +2,7 @@ import { connect } from 'react-redux';
import { withRouter } from 'react-router-dom';
import { browserIcon, osIcon, deviceTypeIcon } from 'App/iconNames';
import { formatTimeOrDate } from 'App/date';
-import { sessions as sessionsRoute, funnel as funnelRoute, funnelIssue as funnelIssueRoute, withSiteId } from 'App/routes';
+import { sessions as sessionsRoute, withSiteId } from 'App/routes';
import { Icon, CountryFlag, IconButton, BackLink } from 'UI';
import { toggleFavorite, setSessionPath } from 'Duck/sessions';
import cn from 'classnames';
@@ -41,7 +41,6 @@ function capitalise(str) {
local: state.getIn(['sessions', 'timezone']),
funnelRef: state.getIn(['funnels', 'navRef']),
siteId: state.getIn([ 'user', 'siteId' ]),
- funnelPage: state.getIn(['sessions', 'funnelPage']),
hasSessionsPath: hasSessioPath && !isAssist,
}
}, {
@@ -61,22 +60,12 @@ export default class PlayerBlockHeader extends React.PureComponent {
);
backHandler = () => {
- const { history, siteId, funnelPage, sessionPath } = this.props;
- // alert(sessionPath)
- if (sessionPath === history.location.pathname) {
+ const { history, siteId, sessionPath } = this.props;
+ if (sessionPath === history.location.pathname || sessionPath.includes("/session/")) {
history.push(withSiteId(SESSIONS_ROUTE), siteId);
} else {
history.push(sessionPath ? sessionPath : withSiteId(SESSIONS_ROUTE, siteId));
}
- // const funnelId = funnelPage && funnelPage.get('funnelId');
- // const issueId = funnelPage && funnelPage.get('issueId');
- // if (funnelId || issueId) {
- // if (issueId) {
- // history.push(withSiteId(funnelIssueRoute(funnelId, issueId), siteId))
- // } else
- // history.push(withSiteId(funnelRoute(funnelId), siteId));
- // } else
- // history.push(withSiteId(SESSIONS_ROUTE), siteId);
}
toggleFavorite = () => {
@@ -106,9 +95,9 @@ export default class PlayerBlockHeader extends React.PureComponent {
disabled,
jiraConfig,
fullscreen,
- hasSessionsPath
+ hasSessionsPath,
+ sessionPath,
} = this.props;
- // const { history, siteId } = this.props;
const _live = live && !hasSessionsPath;
return (
@@ -145,6 +134,7 @@ export default class PlayerBlockHeader extends React.PureComponent {
}
diff --git a/frontend/app/components/shared/AddWidgets.js b/frontend/app/components/shared/AddWidgets.js
index 64aaef8ca..41b5e0b7e 100644
--- a/frontend/app/components/shared/AddWidgets.js
+++ b/frontend/app/components/shared/AddWidgets.js
@@ -1,40 +1,73 @@
import { connect } from 'react-redux';
import cn from 'classnames';
import withToggle from 'HOCs/withToggle';
-import { IconButton, SlideModal, NoContent } from 'UI';
+import { IconButton, Popup } from 'UI';
import { updateAppearance } from 'Duck/user';
-import { WIDGET_LIST } from 'Types/dashboard';
import stl from './addWidgets.css';
import OutsideClickDetectingDiv from 'Shared/OutsideClickDetectingDiv';
+import { updateActiveState } from 'Duck/customMetrics';
+
+const CUSTOM_METRICS = 'custom_metrics';
@connect(state => ({
appearance: state.getIn([ 'user', 'account', 'appearance' ]),
+ customMetrics: state.getIn(['customMetrics', 'list']),
}), {
- updateAppearance,
+ updateAppearance, updateActiveState,
})
@withToggle()
export default class AddWidgets extends React.PureComponent {
makeAddHandler = widgetKey => () => {
- const { appearance } = this.props;
- const newAppearance = appearance.setIn([ 'dashboard', widgetKey ], true);
+ if (this.props.type === CUSTOM_METRICS) {
+ this.props.updateActiveState(widgetKey, true);
+ } else {
+ const { appearance } = this.props;
+ const newAppearance = appearance.setIn([ 'dashboard', widgetKey ], true);
+ this.props.updateAppearance(newAppearance)
+ }
+
this.props.switchOpen(false);
- this.props.updateAppearance(newAppearance)
+ }
+
+ getCustomMetricWidgets = () => {
+ return this.props.customMetrics.filter(i => !i.active).map(item => ({
+ type: CUSTOM_METRICS,
+ key: item.metricId,
+ name: item.name,
+ })).toJS();
}
render() {
- const { appearance, disabled } = this.props;
- const avaliableWidgets = WIDGET_LIST.filter(({ key, type }) => !appearance.dashboard[ key ] && type === this.props.type );
+ const { disabled, widgets, type } = this.props;
+ const filteredWidgets = type === CUSTOM_METRICS ? this.getCustomMetricWidgets() : widgets;
return (
+
+ }
+ content={ `Add a metric to this section.` }
+ size="tiny"
+ inverted
+ position="top center"
+ />
this.props.switchOpen(false)}>
{this.props.open &&
- {avaliableWidgets.map(w => (
+ {filteredWidgets.map(w => (
@@ -44,46 +77,6 @@ export default class AddWidgets extends React.PureComponent {
}
-
-
- { avaliableWidgets.map(({ key, name, description, thumb }) => (
-
- ))}
-
- }
- onClose={ this.props.switchOpen }
- />
-
);
}
diff --git a/frontend/app/components/shared/CustomMetrics/CustomMetricForm/CustomMetricForm.tsx b/frontend/app/components/shared/CustomMetrics/CustomMetricForm/CustomMetricForm.tsx
new file mode 100644
index 000000000..7700a7a29
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/CustomMetricForm/CustomMetricForm.tsx
@@ -0,0 +1,143 @@
+import React from 'react';
+import { Form, SegmentSelection, Button, IconButton } from 'UI';
+import FilterSeries from '../FilterSeries';
+import { connect } from 'react-redux';
+import { edit as editMetric, save, addSeries, removeSeries, remove } from 'Duck/customMetrics';
+import CustomMetricWidgetPreview from 'App/components/Dashboard/Widgets/CustomMetricsWidgets/CustomMetricWidgetPreview';
+import { confirm } from 'UI/Confirmation';
+import { toast } from 'react-toastify';
+import cn from 'classnames';
+
+interface Props {
+ metric: any;
+ editMetric: (metric, shouldFetch?) => void;
+ save: (metric) => Promise;
+ loading: boolean;
+ addSeries: (series?) => void;
+ onClose: () => void;
+ remove: (id) => Promise;
+ removeSeries: (seriesIndex) => void;
+}
+
+function CustomMetricForm(props: Props) {
+ const { metric, loading } = props;
+
+ const addSeries = () => {
+ props.addSeries();
+ }
+
+ const removeSeries = (index) => {
+ props.removeSeries(index);
+ }
+
+ const write = ({ target: { value, name } }) => props.editMetric({ ...metric, [ name ]: value }, false);
+
+ const changeConditionTab = (e, { name, value }) => {
+ props.editMetric({[ 'viewType' ]: value });
+ };
+
+ const save = () => {
+ props.save(metric).then(() => {
+ toast.success(metric.exists() ? 'Updated succesfully.' : 'Created succesfully.');
+ props.onClose()
+ });
+ }
+
+ const deleteHandler = async () => {
+ if (await confirm({
+ header: 'Custom Metric',
+ confirmButton: 'Delete',
+ confirmation: `Are you sure you want to delete ${metric.name}`
+ })) {
+ props.remove(metric.metricId).then(() => {
+ toast.success('Deleted succesfully.');
+ props.onClose();
+ });
+ }
+ }
+
+ return (
+
+ );
+}
+
+export default connect(state => ({
+ metric: state.getIn(['customMetrics', 'instance']),
+ loading: state.getIn(['customMetrics', 'saveRequest', 'loading']),
+}), { editMetric, save, addSeries, remove, removeSeries })(CustomMetricForm);
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/CustomMetricForm/index.ts b/frontend/app/components/shared/CustomMetrics/CustomMetricForm/index.ts
new file mode 100644
index 000000000..e6ffb605b
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/CustomMetricForm/index.ts
@@ -0,0 +1 @@
+export { default } from './CustomMetricForm';
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/CustomMetrics.tsx b/frontend/app/components/shared/CustomMetrics/CustomMetrics.tsx
new file mode 100644
index 000000000..aedd4a097
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/CustomMetrics.tsx
@@ -0,0 +1,17 @@
+import React from 'react';
+import { IconButton } from 'UI';
+import { connect } from 'react-redux';
+import { edit, init } from 'Duck/customMetrics';
+
+interface Props {
+ init: (instance?, setDefault?) => void;
+}
+function CustomMetrics(props: Props) {
+ return (
+
+ props.init()} />
+
+ );
+}
+
+export default connect(null, { edit, init })(CustomMetrics);
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/CustomMetricsModal/CustomMetricsModal.tsx b/frontend/app/components/shared/CustomMetrics/CustomMetricsModal/CustomMetricsModal.tsx
new file mode 100644
index 000000000..9783ceca0
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/CustomMetricsModal/CustomMetricsModal.tsx
@@ -0,0 +1,38 @@
+import React from 'react'
+import { IconButton, SlideModal } from 'UI';
+import CustomMetricForm from '../CustomMetricForm';
+import { connect } from 'react-redux'
+import { init } from 'Duck/customMetrics';
+
+interface Props {
+ metric: any;
+ init: (instance?, setDefault?) => void;
+}
+function CustomMetricsModal(props: Props) {
+ const { metric } = props;
+ return (
+ <>
+
+ { metric && metric.exists() ? 'Update Custom Metric' : 'Create Custom Metric' }
+
+ }
+ isDisplayed={ !!metric }
+ onClose={ () => props.init(null, true)}
+ content={ (!!metric) && (
+
+ props.init(null, true)} />
+
+ )}
+ />
+ >
+ )
+}
+
+
+export default connect(state => ({
+ metric: state.getIn(['customMetrics', 'instance']),
+ alertInstance: state.getIn(['alerts', 'instance']),
+ showModal: state.getIn(['customMetrics', 'showModal']),
+ }), { init })(CustomMetricsModal);
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/CustomMetricsModal/index.tsx b/frontend/app/components/shared/CustomMetrics/CustomMetricsModal/index.tsx
new file mode 100644
index 000000000..251375d3b
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/CustomMetricsModal/index.tsx
@@ -0,0 +1 @@
+export { default } from './CustomMetricsModal';
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx b/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx
new file mode 100644
index 000000000..0f5df220b
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/FilterSeries/FilterSeries.tsx
@@ -0,0 +1,105 @@
+import React, { useState } from 'react';
+import FilterList from 'Shared/Filters/FilterList';
+import {
+ edit,
+ updateSeries,
+ addSeriesFilterFilter,
+ removeSeriesFilterFilter,
+ editSeriesFilterFilter,
+ editSeriesFilter,
+} from 'Duck/customMetrics';
+import { connect } from 'react-redux';
+import { IconButton, Icon } from 'UI';
+import FilterSelection from '../../Filters/FilterSelection';
+import SeriesName from './SeriesName';
+import cn from 'classnames';
+
+interface Props {
+ seriesIndex: number;
+ series: any;
+ edit: typeof edit;
+ updateSeries: typeof updateSeries;
+ onRemoveSeries: (seriesIndex) => void;
+ canDelete?: boolean;
+ addSeriesFilterFilter: typeof addSeriesFilterFilter;
+ editSeriesFilterFilter: typeof editSeriesFilterFilter;
+ editSeriesFilter: typeof editSeriesFilter;
+ removeSeriesFilterFilter: typeof removeSeriesFilterFilter;
+}
+
+function FilterSeries(props: Props) {
+ const { canDelete } = props;
+ const [expanded, setExpanded] = useState(true)
+ const { series, seriesIndex } = props;
+
+ const onAddFilter = (filter) => {
+ filter.value = [""]
+ props.addSeriesFilterFilter(seriesIndex, filter);
+ }
+
+ const onUpdateFilter = (filterIndex, filter) => {
+ props.editSeriesFilterFilter(seriesIndex, filterIndex, filter);
+ }
+
+ const onChangeEventsOrder = (e, { name, value }) => {
+ props.editSeriesFilter(seriesIndex, { eventsOrder: value });
+ }
+
+ const onRemoveFilter = (filterIndex) => {
+ props.removeSeriesFilterFilter(seriesIndex, filterIndex);
+ }
+
+ return (
+
+
+
+ props.updateSeries(seriesIndex, { name }) } />
+
+
+
+
+
+
+
+
setExpanded(!expanded)} className="ml-3">
+
+
+
+
+
+ { expanded && (
+ <>
+
+ { series.filter.filters.size > 0 ? (
+
+ ): (
+
Add user event or filter to define the series by clicking Add Step.
+ )}
+
+
+
+
+
+
+ >
+ )}
+
+ );
+}
+
+export default connect(null, {
+ edit,
+ updateSeries,
+ addSeriesFilterFilter,
+ editSeriesFilterFilter,
+ editSeriesFilter,
+ removeSeriesFilterFilter,
+})(FilterSeries);
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/FilterSeries/SeriesName/SeriesName.tsx b/frontend/app/components/shared/CustomMetrics/FilterSeries/SeriesName/SeriesName.tsx
new file mode 100644
index 000000000..cba21a33b
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/FilterSeries/SeriesName/SeriesName.tsx
@@ -0,0 +1,55 @@
+import React, { useState, useRef, useEffect } from 'react';
+import { Icon } from 'UI';
+
+interface Props {
+ name: string;
+ onUpdate: (name) => void;
+}
+function SeriesName(props: Props) {
+ const [editing, setEditing] = useState(false)
+ const [name, setName] = useState(props.name)
+ const ref = useRef(null)
+
+ const write = ({ target: { value, name } }) => {
+ setName(value)
+ }
+
+ const onBlur = () => {
+ setEditing(false)
+ props.onUpdate(name)
+ }
+
+ useEffect(() => {
+ if (editing) {
+ ref.current.focus()
+ }
+ }, [editing])
+
+ useEffect(() => {
+ setName(props.name)
+ }, [props.name])
+
+ // const { name } = props;
+ return (
+
+ { editing ? (
+
setEditing(true)}
+ />
+ ) : (
+
{name}
+ )}
+
+
setEditing(true)}>
+
+ );
+}
+
+export default SeriesName;
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/FilterSeries/SeriesName/index.ts b/frontend/app/components/shared/CustomMetrics/FilterSeries/SeriesName/index.ts
new file mode 100644
index 000000000..90e63cdb6
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/FilterSeries/SeriesName/index.ts
@@ -0,0 +1 @@
+export { default } from './SeriesName';
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/FilterSeries/index.ts b/frontend/app/components/shared/CustomMetrics/FilterSeries/index.ts
new file mode 100644
index 000000000..5882e382a
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/FilterSeries/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterSeries'
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/SessionListModal/SessionListModal.css b/frontend/app/components/shared/CustomMetrics/SessionListModal/SessionListModal.css
new file mode 100644
index 000000000..05f1ffb8a
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/SessionListModal/SessionListModal.css
@@ -0,0 +1,29 @@
+.wrapper {
+ padding: 20px;
+ background-color: #f6f6f6;
+ min-height: calc(100vh - 59px);
+ }
+
+ .dropdown {
+ display: flex !important;
+ padding: 4px 6px;
+ border-radius: 3px;
+ color: $gray-darkest;
+ font-weight: 500;
+ &:hover {
+ background-color: $gray-light;
+ }
+ }
+
+ .dropdownTrigger {
+ padding: 4px 8px;
+ border-radius: 3px;
+ &:hover {
+ background-color: $gray-light;
+ }
+ }
+
+ .dropdownIcon {
+ margin-top: 2px;
+ margin-left: 3px;
+ }
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/SessionListModal/SessionListModal.tsx b/frontend/app/components/shared/CustomMetrics/SessionListModal/SessionListModal.tsx
new file mode 100644
index 000000000..9bc3e56a0
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/SessionListModal/SessionListModal.tsx
@@ -0,0 +1,117 @@
+import React, { useEffect, useState } from 'react';
+import { SlideModal, NoContent, Dropdown, Icon, TimezoneDropdown, Loader } from 'UI';
+import SessionItem from 'Shared/SessionItem';
+import stl from './SessionListModal.css';
+import { connect } from 'react-redux';
+import { fetchSessionList, setActiveWidget } from 'Duck/customMetrics';
+import { DateTime } from 'luxon';
+
+interface Props {
+ loading: boolean;
+ list: any;
+ fetchSessionList: (params) => void;
+ activeWidget: any;
+ setActiveWidget: (widget) => void;
+}
+function SessionListModal(props: Props) {
+ const { activeWidget, loading, list } = props;
+ const [seriesOptions, setSeriesOptions] = useState([
+ { text: 'All', value: 'all' },
+ ]);
+ const [activeSeries, setActiveSeries] = useState('all');
+ useEffect(() => {
+ if (!activeWidget || !activeWidget.widget) return;
+ props.fetchSessionList({
+ metricId: activeWidget.widget.metricId,
+ startDate: activeWidget.startTimestamp,
+ endDate: activeWidget.endTimestamp
+ });
+ }, [activeWidget]);
+
+ useEffect(() => {
+ if (!list) return;
+ const seriesOptions = list.map(item => ({
+ text: item.seriesName,
+ value: item.seriesId,
+ }));
+ setSeriesOptions([
+ { text: 'All', value: 'all' },
+ ...seriesOptions,
+ ]);
+ }, [list]);
+
+ const getListSessionsBySeries = (seriesId) => {
+ const arr: any = []
+ list.forEach(element => {
+ if (seriesId === 'all') {
+ const sessionIds = arr.map(i => i.sessionId);
+ arr.push(...element.sessions.filter(i => !sessionIds.includes(i.sessionId)));
+ } else {
+ if (element.seriesId === seriesId) {
+ arr.push(...element.sessions)
+ }
+ }
+ });
+ return arr;
+ }
+
+ const writeOption = (e, { name, value }) => setActiveSeries(value);
+ const filteredSessions = getListSessionsBySeries(activeSeries);
+
+ const startTime = DateTime.fromMillis(activeWidget.startTimestamp).toFormat('LLL dd, yyyy HH:mm a');
+ const endTime = DateTime.fromMillis(activeWidget.endTimestamp).toFormat('LLL dd, yyyy HH:mm a');
+ return (
+
+ { activeWidget.widget.name }
+
+ )}
+ isDisplayed={ !!activeWidget }
+ onClose={ () => props.setActiveWidget(null)}
+ content={ activeWidget && (
+
+
+
Showing all sessions between {startTime} and {endTime}
+
+
+ Timezone
+
+
+
+ Series
+ }
+ />
+
+ {/*
Series */}
+
+
+
+
+ { filteredSessions.map(session => ) }
+
+
+
+ )}
+ />
+ );
+}
+
+export default connect(state => ({
+ loading: state.getIn(['customMetrics', 'fetchSessionList', 'loading']),
+ list: state.getIn(['customMetrics', 'sessionList']),
+ // activeWidget: state.getIn(['customMetrics', 'activeWidget']),
+}), { fetchSessionList, setActiveWidget })(SessionListModal);
diff --git a/frontend/app/components/shared/CustomMetrics/SessionListModal/index.ts b/frontend/app/components/shared/CustomMetrics/SessionListModal/index.ts
new file mode 100644
index 000000000..75303a134
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/SessionListModal/index.ts
@@ -0,0 +1 @@
+export { default } from './SessionListModal';
\ No newline at end of file
diff --git a/frontend/app/components/shared/CustomMetrics/index.ts b/frontend/app/components/shared/CustomMetrics/index.ts
new file mode 100644
index 000000000..ebbb8203c
--- /dev/null
+++ b/frontend/app/components/shared/CustomMetrics/index.ts
@@ -0,0 +1 @@
+export { default } from './CustomMetrics';
\ No newline at end of file
diff --git a/frontend/app/components/shared/DateRange.js b/frontend/app/components/shared/DateRange.js
index 680a76b0f..13561a412 100644
--- a/frontend/app/components/shared/DateRange.js
+++ b/frontend/app/components/shared/DateRange.js
@@ -2,7 +2,7 @@ import { connect } from 'react-redux';
import DateRangeDropdown from 'Shared/DateRangeDropdown';
function DateRange (props) {
- const { startDate, endDate, rangeValue, className, onDateChange, customRangeRight=false, customHidden = false } = props;
+ const { direction = "left", startDate, endDate, rangeValue, className, onDateChange, customRangeRight=false, customHidden = false } = props;
return (
);
}
diff --git a/frontend/app/components/shared/DateRangeDropdown/DateRangeDropdown.js b/frontend/app/components/shared/DateRangeDropdown/DateRangeDropdown.js
index f29d47745..05a42658f 100644
--- a/frontend/app/components/shared/DateRangeDropdown/DateRangeDropdown.js
+++ b/frontend/app/components/shared/DateRangeDropdown/DateRangeDropdown.js
@@ -88,7 +88,7 @@ export default class DateRangeDropdown extends React.PureComponent {
: null
}
- selection={!button}
+ // selection={!button}
name="sessionDateRange"
direction={ direction }
className={ button ? "" : "customDropdown" }
@@ -97,8 +97,9 @@ export default class DateRangeDropdown extends React.PureComponent {
icon={ null }
>
- { options.map(props =>
+ { options.map((props, i) =>
diff --git a/frontend/app/components/shared/EventFilter/Attributes/AttributeValueField.js b/frontend/app/components/shared/EventFilter/Attributes/AttributeValueField.js
index 176ae2c4a..daee15cc3 100644
--- a/frontend/app/components/shared/EventFilter/Attributes/AttributeValueField.js
+++ b/frontend/app/components/shared/EventFilter/Attributes/AttributeValueField.js
@@ -137,12 +137,13 @@ class AttributeValueField extends React.PureComponent {
const { filter, onChange } = this.props;
const _showAutoComplete = this.isAutoComplete(filter.type);
const _params = _showAutoComplete ? this.getParams(filter) : {};
- let _optionsEndpoint= '/events/search';
+ let _optionsEndpoint= '/events/search';
+ console.log('value', filter.value)
return (
{ _showAutoComplete ?
- { getHeader(filter.type) } }
fullWidth={ (filter.type === TYPES.CONSOLE || filter.type === TYPES.LOCATION || filter.type === TYPES.CUSTOM) && filter.value }
+ // onAddOrRemove={}
/>
: this.renderField()
}
diff --git a/frontend/app/components/shared/EventFilter/AutoComplete/AutoComplete.js b/frontend/app/components/shared/EventFilter/AutoComplete/AutoComplete.js
index 922b7f650..e075b17bd 100644
--- a/frontend/app/components/shared/EventFilter/AutoComplete/AutoComplete.js
+++ b/frontend/app/components/shared/EventFilter/AutoComplete/AutoComplete.js
@@ -77,7 +77,7 @@ class AutoComplete extends React.PureComponent {
noResultsMessage: SOME_ERROR_MSG,
})
- onInputChange = (e, { name, value }) => {
+ onInputChange = ({ target: { value } }) => {
changed = true;
this.setState({ query: value, updated: true })
const _value = value.trim();
@@ -118,7 +118,8 @@ class AutoComplete extends React.PureComponent {
valueToText = defaultValueToText,
placeholder = 'Type to search...',
headerText = '',
- fullWidth = false
+ fullWidth = false,
+ onAddOrRemove = () => null,
} = this.props;
const options = optionMapping(values, valueToText)
@@ -128,7 +129,7 @@ class AutoComplete extends React.PureComponent {
className={ cn("relative", { "flex-1" : fullWidth }) }
onClickOutside={this.onClickOutside}
>
-
+ /> */}
+
+
this.setState({ddOpen: true})}
+ onChange={ this.onInputChange }
+ onBlur={ this.onBlur }
+ onFocus={ () => this.setState({ddOpen: true})}
+ value={ query }
+ autoFocus={ true }
+ type="text"
+ placeholder={ placeholder }
+ onPaste={(e) => {
+ const text = e.clipboardData.getData('Text');
+ this.hiddenInput.value = text;
+ pasted = true; // to use only the hidden input
+ } }
+ />
+
+ {/* */}
+ or
+
+
{ ddOpen && options.length > 0 &&
diff --git a/frontend/app/components/shared/EventFilter/AutoComplete/autoComplete.css b/frontend/app/components/shared/EventFilter/AutoComplete/autoComplete.css
index c2c827bfe..b72653c42 100644
--- a/frontend/app/components/shared/EventFilter/AutoComplete/autoComplete.css
+++ b/frontend/app/components/shared/EventFilter/AutoComplete/autoComplete.css
@@ -28,3 +28,30 @@
.fullWidth {
width: 100% !important;
}
+
+.inputWrapper {
+ border: solid thin $gray-light !important;
+ border-radius: 3px;
+ border-radius: 3px;
+ display: flex;
+ align-items: center;
+ & input {
+ height: 28px;
+ font-size: 13px !important;
+ padding: 0 5px !important;
+ border-top-left-radius: 3px;
+ border-bottom-left-radius: 3px;
+ }
+
+ & .right {
+ height: 28px;
+ display: flex;
+ align-items: center;
+ padding: 0 5px;
+ background-color: $gray-lightest;
+ border-left: solid thin $gray-light !important;
+ border-top-right-radius: 3px;
+ border-bottom-right-radius: 3px;
+ cursor: pointer;
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/EventFilter/EventEditor.js b/frontend/app/components/shared/EventFilter/EventEditor.js
index 94ffb8692..f2d5dee8f 100644
--- a/frontend/app/components/shared/EventFilter/EventEditor.js
+++ b/frontend/app/components/shared/EventFilter/EventEditor.js
@@ -97,7 +97,7 @@ export default class EventEditor extends React.PureComponent {
{ dndBtn }
diff --git a/frontend/app/components/shared/EventFilter/FilterModal/FilterModal.js b/frontend/app/components/shared/EventFilter/FilterModal/FilterModal.js
index f76c28065..cbe8c9546 100644
--- a/frontend/app/components/shared/EventFilter/FilterModal/FilterModal.js
+++ b/frontend/app/components/shared/EventFilter/FilterModal/FilterModal.js
@@ -67,7 +67,7 @@ export default class FilterModal extends React.PureComponent {
this.props.addAttribute(filter, _in >= 0 ? _in : _index);
} else {
logger.log('Adding Event', filter)
- const _index = filterType === 'event' ? index : undefined; // should add new one if coming from fitlers
+ const _index = filterType === 'event' ? index : undefined; // should add new one if coming from filters
this.props.addEvent(filter, false, _index);
}
diff --git a/frontend/app/components/shared/EventSearchInput/EventSearchInput.tsx b/frontend/app/components/shared/EventSearchInput/EventSearchInput.tsx
new file mode 100644
index 000000000..0a7752ccd
--- /dev/null
+++ b/frontend/app/components/shared/EventSearchInput/EventSearchInput.tsx
@@ -0,0 +1,17 @@
+import React from 'react';
+
+interface Props {
+
+}
+function EventSearchInput(props) {
+ return (
+
+
+
+ );
+}
+
+export default EventSearchInput;
\ No newline at end of file
diff --git a/frontend/app/components/shared/EventSearchInput/index.ts b/frontend/app/components/shared/EventSearchInput/index.ts
new file mode 100644
index 000000000..2e4e57078
--- /dev/null
+++ b/frontend/app/components/shared/EventSearchInput/index.ts
@@ -0,0 +1 @@
+export { default } from './EventSearchInput';
\ No newline at end of file
diff --git a/frontend/app/components/shared/FilterDropdown/FilterDropdown.js b/frontend/app/components/shared/FilterDropdown/FilterDropdown.js
index 8bf6e5faa..66da1f586 100644
--- a/frontend/app/components/shared/FilterDropdown/FilterDropdown.js
+++ b/frontend/app/components/shared/FilterDropdown/FilterDropdown.js
@@ -27,7 +27,7 @@ const locationOptions = Object.keys(regionLabels).map(k => ({ key: LOCATION, tex
const _filterKeys = [
{ key: 'userId', name: 'User ID', icon: 'user-alt', placeholder: 'Search for User ID' },
{ key: 'userAnonymousId', name: 'User Anonymous ID', icon: 'filters/userid', placeholder: 'Search for User Anonymous ID' },
- { key: 'revId', name: 'Rev ID', icon: 'filters/border-outer', placeholder: 'Search for Rev ID' },
+ { key: 'revId', name: 'Rev ID', icon: 'filters/rev-id', placeholder: 'Search for Rev ID' },
{ key: COUNTRY, name: 'Country', icon: 'map-marker-alt', placeholder: 'Search for Country' },
{ key: 'device', name: 'Device', icon: 'device', placeholder: 'Search for Device' },
{ key: 'os', name: 'OS', icon: 'os', placeholder: 'Search for OS' },
diff --git a/frontend/app/components/shared/FilterDropdown/filterDropdown.css b/frontend/app/components/shared/FilterDropdown/filterDropdown.css
index 976a0d233..5ebcbcb65 100644
--- a/frontend/app/components/shared/FilterDropdown/filterDropdown.css
+++ b/frontend/app/components/shared/FilterDropdown/filterDropdown.css
@@ -3,6 +3,10 @@
padding: 0 10px;
&:hover {
background-color: $gray-light;
+ color: white;
+ & svg {
+ fill: white;
+ }
}
}
diff --git a/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.css b/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.css
new file mode 100644
index 000000000..bb9f17313
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.css
@@ -0,0 +1,81 @@
+.wrapper {
+ border: solid thin $gray-light !important;
+ border-radius: 3px;
+ border-radius: 3px;
+ display: flex;
+ align-items: center;
+ background-color: white;
+ width: 100%;
+ & input {
+ height: 24px;
+ font-size: 13px !important;
+ padding: 0 5px !important;
+ border-top-left-radius: 3px;
+ border-bottom-left-radius: 3px;
+ border: solid thin transparent !important;
+ width: 100%;
+ }
+
+ & .right {
+ height: 24px;
+ display: flex;
+ align-items: stretch;
+ padding: 0;
+ background-color: $gray-lightest;
+ border-top-right-radius: 3px;
+ border-bottom-right-radius: 3px;
+
+ & div {
+ /* background-color: red; */
+ border-left: solid thin $gray-light !important;
+ width: 28px;
+ cursor: pointer;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ &:last-child {
+ border-top-right-radius: 3px;
+ border-bottom-right-radius: 3px;
+ }
+ &:hover {
+ background-color: $gray-light;
+ }
+ }
+ }
+}
+
+.menu {
+ border-radius: 0 0 3px 3px;
+ border: solid thin $gray-light !important;
+ box-shadow: 0 2px 2px 0 $gray-light;
+ /* padding: 20px; */
+ background-color: white;
+ max-height: 350px;
+ overflow-y: auto;
+ position: absolute;
+ top: 28px;
+ left: 0;
+ width: 500px;
+ z-index: 99;
+}
+
+.filterItem {
+ display: flex;
+ align-items: center;
+ padding: 8px 10px;
+ cursor: pointer;
+ border-radius: 3px;
+ /* transition: all 0.4s; */
+ margin-bottom: 5px;
+ max-width: 100%;
+ & .label {
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+ }
+
+ &:hover {
+ background-color: $gray-lightest;
+ /* transition: all 0.2s; */
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx b/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx
new file mode 100644
index 000000000..aaa19e245
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterAutoComplete/FilterAutoComplete.tsx
@@ -0,0 +1,167 @@
+import React, { useState, useEffect } from 'react';
+import { Icon, Loader } from 'UI';
+import APIClient from 'App/api_client';
+import { debounce } from 'App/utils';
+import stl from './FilterAutoComplete.css';
+import cn from 'classnames';
+
+const hiddenStyle = {
+ whiteSpace: 'pre-wrap',
+ opacity: 0, position: 'fixed', left: '-3000px'
+};
+
+let debouncedRequestValues = (value) => null;
+
+interface Props {
+ showOrButton?: boolean;
+ showCloseButton?: boolean;
+ onRemoveValue?: () => void;
+ onAddValue?: () => void;
+ endpoint?: string;
+ method?: string;
+ params?: any;
+ headerText?: string;
+ placeholder?: string;
+ onSelect: (e, item) => void;
+ value: any;
+ icon?: string;
+}
+
+function FilterAutoComplete(props: Props) {
+ const {
+ showCloseButton = false,
+ placeholder = 'Type to search',
+ method = 'GET',
+ showOrButton = false,
+ onRemoveValue = () => null,
+ onAddValue = () => null,
+ endpoint = '',
+ params = {},
+ headerText = '',
+ value = '',
+ icon = null,
+ } = props;
+ const [showModal, setShowModal] = useState(false)
+ const [loading, setLoading] = useState(false)
+ const [options, setOptions] = useState([]);
+ const [query, setQuery] = useState(value);
+
+
+ useEffect(() => {
+ const requestValues = (q) => {
+ setLoading(true);
+
+ return new APIClient()[method?.toLowerCase()](endpoint, { ...params, q })
+ .then(response => response.json())
+ .then(({ errors, data }) => {
+ if (errors) {
+ // this.setError();
+ } else {
+ setOptions(data);
+ }
+ }).finally(() => setLoading(false));
+
+ }
+
+ debouncedRequestValues = debounce(requestValues, 1000)
+ }, [])
+
+ const onInputChange = ({ target: { value } }) => {
+ setQuery(value);
+ if (!showModal) {
+ setShowModal(true);
+ }
+
+ if (value === '' || value === ' ') {
+ return
+ }
+ debouncedRequestValues(value);
+ }
+
+ // useEffect(() => {
+ // if (query === '' || query === ' ') {
+ // return
+ // }
+
+ // debouncedRequestValues(query)
+ // }, [query])
+
+ useEffect(() => {
+ setQuery(value);
+ }, [value])
+
+ const onBlur = (e) => {
+ setTimeout(() => { setShowModal(false) }, 200)
+ if (query !== value) {
+ props.onSelect(e, { value: query })
+ }
+ }
+
+ const onItemClick = (e, item) => {
+ e.stopPropagation();
+ e.preventDefault();
+ // const { onSelect, name } = this.props;
+
+
+ if (query !== item.value) {
+ setQuery(item.value);
+ }
+ // this.setState({ query: item.value, ddOpen: false})
+ props.onSelect(e, item);
+ // setTimeout(() => {
+ // setShowModal(false)
+ // }, 10)
+ }
+
+ return (
+
+
+
setShowModal(true)}
+ value={ query }
+ autoFocus={ true }
+ type="text"
+ placeholder={ placeholder }
+ // onPaste={(e) => {
+ // const text = e.clipboardData.getData('Text');
+ // // this.hiddenInput.value = text;
+ // // pasted = true; // to use only the hidden input
+ // } }
+ />
+
+ { showCloseButton &&
}
+ { showOrButton &&
or
}
+
+
+
+ { !showOrButton &&
or
}
+
+ { (showModal && (options.length > 0) || loading) && (
+
+
+ {
+ options.map((item, i) => (
+ onItemClick(e, item) }
+ >
+ { icon && }
+ { item.value }
+
+ ))
+ }
+
+
+ )}
+
+ );
+}
+
+export default FilterAutoComplete;
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterAutoComplete/index.ts b/frontend/app/components/shared/Filters/FilterAutoComplete/index.ts
new file mode 100644
index 000000000..8540e6f40
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterAutoComplete/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterAutoComplete';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/FilterAutoCompleteLocal.css b/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/FilterAutoCompleteLocal.css
new file mode 100644
index 000000000..bb9f17313
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/FilterAutoCompleteLocal.css
@@ -0,0 +1,81 @@
+.wrapper {
+ border: solid thin $gray-light !important;
+ border-radius: 3px;
+ border-radius: 3px;
+ display: flex;
+ align-items: center;
+ background-color: white;
+ width: 100%;
+ & input {
+ height: 24px;
+ font-size: 13px !important;
+ padding: 0 5px !important;
+ border-top-left-radius: 3px;
+ border-bottom-left-radius: 3px;
+ border: solid thin transparent !important;
+ width: 100%;
+ }
+
+ & .right {
+ height: 24px;
+ display: flex;
+ align-items: stretch;
+ padding: 0;
+ background-color: $gray-lightest;
+ border-top-right-radius: 3px;
+ border-bottom-right-radius: 3px;
+
+ & div {
+ /* background-color: red; */
+ border-left: solid thin $gray-light !important;
+ width: 28px;
+ cursor: pointer;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ &:last-child {
+ border-top-right-radius: 3px;
+ border-bottom-right-radius: 3px;
+ }
+ &:hover {
+ background-color: $gray-light;
+ }
+ }
+ }
+}
+
+.menu {
+ border-radius: 0 0 3px 3px;
+ border: solid thin $gray-light !important;
+ box-shadow: 0 2px 2px 0 $gray-light;
+ /* padding: 20px; */
+ background-color: white;
+ max-height: 350px;
+ overflow-y: auto;
+ position: absolute;
+ top: 28px;
+ left: 0;
+ width: 500px;
+ z-index: 99;
+}
+
+.filterItem {
+ display: flex;
+ align-items: center;
+ padding: 8px 10px;
+ cursor: pointer;
+ border-radius: 3px;
+ /* transition: all 0.4s; */
+ margin-bottom: 5px;
+ max-width: 100%;
+ & .label {
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+ }
+
+ &:hover {
+ background-color: $gray-lightest;
+ /* transition: all 0.2s; */
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/FilterAutoCompleteLocal.tsx b/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/FilterAutoCompleteLocal.tsx
new file mode 100644
index 000000000..24e940079
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/FilterAutoCompleteLocal.tsx
@@ -0,0 +1,77 @@
+import React, { useState, useEffect } from 'react';
+import { Icon, Loader } from 'UI';
+import { debounce } from 'App/utils';
+import stl from './FilterAutoCompleteLocal.css';
+import cn from 'classnames';
+
+interface Props {
+ showOrButton?: boolean;
+ showCloseButton?: boolean;
+ onRemoveValue?: () => void;
+ onAddValue?: () => void;
+ placeholder?: string;
+ onSelect: (e, item) => void;
+ value: any;
+ icon?: string;
+}
+
+function FilterAutoCompleteLocal(props: Props) {
+ const {
+ showCloseButton = false,
+ placeholder = 'Type to search',
+ showOrButton = false,
+ onRemoveValue = () => null,
+ onAddValue = () => null,
+ value = '',
+ icon = null,
+ } = props;
+ const [showModal, setShowModal] = useState(true)
+ const [query, setQuery] = useState(value);
+
+ const onInputChange = ({ target: { value } }) => {
+ setQuery(value);
+ }
+
+ useEffect(() => {
+ setQuery(value);
+ }, [value])
+
+ const onBlur = (e) => {
+ setTimeout(() => { setShowModal(false) }, 200)
+ props.onSelect(e, { value: query })
+ }
+
+ const handleKeyDown = (e) => {
+ if (e.key === 'Enter') {
+ props.onSelect(e, { value: query })
+ }
+ }
+
+ return (
+
+
+
setShowModal(true)}
+ value={ query }
+ autoFocus={ true }
+ type="text"
+ placeholder={ placeholder }
+ onKeyDown={handleKeyDown}
+ />
+
+ { showCloseButton &&
}
+ { showOrButton &&
or
}
+
+
+
+ { !showOrButton &&
or
}
+
+ );
+}
+
+export default FilterAutoCompleteLocal;
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/index.ts b/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/index.ts
new file mode 100644
index 000000000..b44237db5
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterAutoCompleteLocal/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterAutoCompleteLocal';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterDuration/FilterDuration.css b/frontend/app/components/shared/Filters/FilterDuration/FilterDuration.css
new file mode 100644
index 000000000..79e5d6d8b
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterDuration/FilterDuration.css
@@ -0,0 +1,34 @@
+.wrapper {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+
+ & input {
+ height: 26px;
+ max-width: 85px !important;
+ font-size: 13px !important;
+ font-weight: 400 !important;
+ color: $gray-medium !important;
+ }
+
+ & > div {
+ /* &:first-child { */
+ margin-right: 10px;
+ height: 26px;
+ /* padding: 5px !important; */
+ /* } */
+ }
+}
+
+.label {
+ font-size: 13px !important;
+ font-weight: 400 !important;
+ color: $gray-medium !important;
+ padding: 0px 5px !important;
+ line-height: 1.9 !important;
+ /* height: 26px;
+ display: flex;
+ align-items: center;
+ justify-content: center; */
+}
+
diff --git a/frontend/app/components/shared/Filters/FilterDuration/FilterDuration.js b/frontend/app/components/shared/Filters/FilterDuration/FilterDuration.js
new file mode 100644
index 000000000..7fe5b725a
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterDuration/FilterDuration.js
@@ -0,0 +1,66 @@
+import { Input, Label } from 'semantic-ui-react';
+import styles from './FilterDuration.css';
+
+const fromMs = value => value ? `${ value / 1000 / 60 }` : ''
+const toMs = value => value !== '' ? value * 1000 * 60 : null
+
+export default class FilterDuration extends React.PureComponent {
+ state = { focused: false }
+ onChange = (e, { name, value }) => {
+ const { onChange } = this.props;
+ if (typeof onChange === 'function') {
+ onChange({
+ [ name ]: toMs(value),
+ });
+ }
+ }
+
+ onKeyPress = e => {
+ const { onEnterPress } = this.props;
+ if (e.key === 'Enter' && typeof onEnterPress === 'function') {
+ onEnterPress(e);
+ }
+ }
+
+ render() {
+ const {
+ minDuration,
+ maxDuration,
+ } = this.props;
+
+ return (
+
+ this.setState({ focused: true })}
+ onBlur={this.props.onBlur}
+ >
+
+
+
+ this.setState({ focused: true })}
+ onBlur={this.props.onBlur}
+ >
+
+
+
+
+ );
+ }
+}
diff --git a/frontend/app/components/shared/Filters/FilterDuration/index.js b/frontend/app/components/shared/Filters/FilterDuration/index.js
new file mode 100644
index 000000000..cbf9296f3
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterDuration/index.js
@@ -0,0 +1 @@
+export { default } from './FilterDuration';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterItem/FilterItem.tsx b/frontend/app/components/shared/Filters/FilterItem/FilterItem.tsx
new file mode 100644
index 000000000..a8760428b
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterItem/FilterItem.tsx
@@ -0,0 +1,72 @@
+import React from 'react';
+import FilterOperator from '../FilterOperator';
+import FilterSelection from '../FilterSelection';
+import FilterValue from '../FilterValue';
+import { Icon } from 'UI';
+import FilterSource from '../FilterSource';
+
+interface Props {
+ filterIndex: number;
+ filter: any; // event/filter
+ onUpdate: (filter) => void;
+ onRemoveFilter: () => void;
+ isFilter?: boolean;
+}
+function FilterItem(props: Props) {
+ const { isFilter = false, filterIndex, filter } = props;
+ const canShowValues = !(filter.operator === "isAny" || filter.operator === "onAny");
+
+ const replaceFilter = (filter) => {
+ props.onUpdate({ ...filter, value: [""]});
+ };
+
+ const onOperatorChange = (e, { name, value }) => {
+ props.onUpdate({ ...filter, operator: value })
+ }
+
+ const onSourceOperatorChange = (e, { name, value }) => {
+ props.onUpdate({ ...filter, sourceOperator: value })
+ }
+
+ return (
+
+
+ { !isFilter &&
{filterIndex+1}
}
+
+
+ {/* Filter with Source */}
+ { filter.hasSource && (
+ <>
+
+
+ >
+ )}
+
+ {/* Filter values */}
+
+ { canShowValues && (
) }
+
+
+
+
+ );
+}
+
+export default FilterItem;
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterItem/index.ts b/frontend/app/components/shared/Filters/FilterItem/index.ts
new file mode 100644
index 000000000..b09a3e2f1
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterItem/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterItem';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterList/FilterList.tsx b/frontend/app/components/shared/Filters/FilterList/FilterList.tsx
new file mode 100644
index 000000000..e8472ca51
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterList/FilterList.tsx
@@ -0,0 +1,90 @@
+import React, { useState} from 'react';
+import FilterItem from '../FilterItem';
+import { SegmentSelection, Popup } from 'UI';
+
+interface Props {
+ // filters: any[]; // event/filter
+ filter?: any; // event/filter
+ onUpdateFilter: (filterIndex, filter) => void;
+ onRemoveFilter: (filterIndex) => void;
+ onChangeEventsOrder: (e, { name, value }) => void;
+}
+function FilterList(props: Props) {
+ const { filter } = props;
+ const filters = filter.filters;
+ const hasEvents = filter.filters.filter(i => i.isEvent).size > 0;
+ const hasFilters = filter.filters.filter(i => !i.isEvent).size > 0;
+ let rowIndex = 0;
+
+ const onRemoveFilter = (filterIndex) => {
+ const newFilters = filters.filter((_filter, i) => {
+ return i !== filterIndex;
+ });
+
+ props.onRemoveFilter(filterIndex);
+ }
+
+ return (
+
+ { hasEvents && (
+ <>
+
+
EVENTS
+
+
}
+ content={ `Events Order` }
+ size="tiny"
+ inverted
+ position="top center"
+ />
+
+
+
+
+ {filters.map((filter, filterIndex) => filter.isEvent ? (
+ props.onUpdateFilter(filterIndex, filter)}
+ onRemoveFilter={() => onRemoveFilter(filterIndex) }
+ />
+ ): null)}
+
+ >
+ )}
+
+ {hasFilters && (
+ <>
+ {hasEvents && }
+ FILTERS
+ {filters.map((filter, filterIndex) => !filter.isEvent ? (
+ props.onUpdateFilter(filterIndex, filter)}
+ onRemoveFilter={() => onRemoveFilter(filterIndex) }
+ />
+ ): null)}
+ >
+ )}
+
+ );
+}
+
+export default FilterList;
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterList/index.ts b/frontend/app/components/shared/Filters/FilterList/index.ts
new file mode 100644
index 000000000..ecf0adf70
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterList/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterList';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterModal/FilterModal.css b/frontend/app/components/shared/Filters/FilterModal/FilterModal.css
new file mode 100644
index 000000000..463a7a2a9
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterModal/FilterModal.css
@@ -0,0 +1,37 @@
+.wrapper {
+ border-radius: 3px;
+ border: solid thin $gray-light;
+ padding: 20px;
+ overflow: hidden;
+ overflow-y: auto;
+ box-shadow: 0 2px 2px 0 $gray-light;
+}
+.optionItem {
+ white-space: nowrap;
+ text-overflow: ellipsis;
+ overflow: hidden;
+ &:hover {
+ background-color: $active-blue;
+ color: $teal !important;
+ & svg {
+ fill: $teal !important;
+ }
+ }
+}
+
+.filterSearchItem {
+ &:hover {
+ background-color: $active-blue;
+ color: $teal;
+
+ & svg {
+ fill: $teal;
+ }
+ }
+
+ & div {
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterModal/FilterModal.tsx b/frontend/app/components/shared/Filters/FilterModal/FilterModal.tsx
new file mode 100644
index 000000000..8b8e3e185
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterModal/FilterModal.tsx
@@ -0,0 +1,95 @@
+import React from 'react';
+import { Icon, Loader } from 'UI';
+import { connect } from 'react-redux';
+import cn from 'classnames';
+import stl from './FilterModal.css';
+import { filtersMap } from 'Types/filter/newFilter';
+
+interface Props {
+ filters: any,
+ onFilterClick?: (filter) => void,
+ filterSearchList: any,
+ metaOptions: any,
+ isMainSearch?: boolean,
+ fetchingFilterSearchList: boolean,
+ searchQuery?: string,
+}
+function FilterModal(props: Props) {
+ const {
+ filters,
+ metaOptions,
+ onFilterClick = () => null,
+ filterSearchList,
+ isMainSearch = false,
+ fetchingFilterSearchList,
+ searchQuery = '',
+ } = props;
+ const hasSearchQuery = searchQuery && searchQuery.length > 0;
+ const showSearchList = isMainSearch && searchQuery.length > 0;
+
+ const onFilterSearchClick = (filter) => {
+ const _filter = filtersMap[filter.type];
+ _filter.value = [filter.value];
+ onFilterClick(_filter);
+ }
+
+ return (
+
+ { showSearchList && (
+
+
+ { filterSearchList && Object.keys(filterSearchList).map((key, index) => {
+ const filter = filterSearchList[key];
+ const option = filtersMap[key];
+ return (
+
+
{option.label}
+
+ {filter.map((f, i) => (
+
onFilterSearchClick({ type: key, value: f.value })}
+ >
+
+
{f.value}
+
+ ))}
+
+
+ );
+ })}
+
+
+ )}
+
+ { !hasSearchQuery && (
+
+ {filters && Object.keys(filters).map((key) => (
+
+
{key}
+
+ {filters[key].map((filter: any) => (
+
onFilterClick(filter)}>
+
+ {filter.label}
+
+ ))}
+
+
+ ))}
+
+ )}
+
+ );
+}
+
+export default connect(state => ({
+ filters: state.getIn([ 'search', 'filterList' ]),
+ filterSearchList: state.getIn([ 'search', 'filterSearchList' ]),
+ metaOptions: state.getIn([ 'customFields', 'list' ]),
+ fetchingFilterSearchList: state.getIn([ 'search', 'fetchFilterSearch', 'loading' ]),
+}))(FilterModal);
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterModal/index.ts b/frontend/app/components/shared/Filters/FilterModal/index.ts
new file mode 100644
index 000000000..a8ab8d552
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterModal/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterModal';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterOperator/FilterOperator.css b/frontend/app/components/shared/Filters/FilterOperator/FilterOperator.css
new file mode 100644
index 000000000..2a1fedeab
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterOperator/FilterOperator.css
@@ -0,0 +1,19 @@
+.operatorDropdown {
+ font-weight: 400;
+ height: 26px;
+ min-width: 60px;
+ display: flex !important;
+ align-items: center;
+ justify-content: space-between;
+ padding: 0 8px !important;
+ font-size: 13px;
+ /* background-color: rgba(255, 255, 255, 0.8) !important; */
+ background-color: $gray-lightest !important;
+ border: solid thin #e9e9e9 !important;
+ border-radius: 4px !important;
+ color: $gray-darkest !important;
+ font-size: 14px !important;
+ &.ui.basic.button {
+ box-shadow: 0 0 0 1px rgba(62, 170, 175,36,38,.35) inset, 0 0 0 0 rgba(62, 170, 175,.15) inset !important;
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterOperator/FilterOperator.tsx b/frontend/app/components/shared/Filters/FilterOperator/FilterOperator.tsx
new file mode 100644
index 000000000..ba2482ac8
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterOperator/FilterOperator.tsx
@@ -0,0 +1,29 @@
+import React from 'react';
+import cn from 'classnames';
+import { Dropdown, Icon } from 'UI';
+import stl from './FilterOperator.css';
+
+interface Props {
+ // filter: any; // event/filter
+ onChange: (e, { name, value }) => void;
+ className?: string;
+ options?: any;
+ value?: string;
+}
+function FilterOperator(props: Props) {
+ const { options, value, onChange, className = '' } = props;
+
+ return (
+ }
+ />
+ );
+}
+
+export default FilterOperator;
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterOperator/index.ts b/frontend/app/components/shared/Filters/FilterOperator/index.ts
new file mode 100644
index 000000000..9345f24f8
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterOperator/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterOperator';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterSelection/FilterSelection.tsx b/frontend/app/components/shared/Filters/FilterSelection/FilterSelection.tsx
new file mode 100644
index 000000000..d7279f949
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterSelection/FilterSelection.tsx
@@ -0,0 +1,52 @@
+import React, { useState } from 'react';
+import FilterModal from '../FilterModal';
+import LiveFilterModal from '../LiveFilterModal';
+import OutsideClickDetectingDiv from 'Shared/OutsideClickDetectingDiv';
+import { Icon } from 'UI';
+import { connect } from 'react-redux';
+
+interface Props {
+ filter: any; // event/filter
+ onFilterClick: (filter) => void;
+ children?: any;
+ isLive?: boolean;
+}
+function FilterSelection(props: Props) {
+ const { filter, onFilterClick, children, isLive = true } = props;
+ const [showModal, setShowModal] = useState(false);
+
+ return (
+
+
setTimeout(function() {
+ setShowModal(false)
+ }, 200)}
+ >
+ { children ? React.cloneElement(children, { onClick: (e) => {
+ e.stopPropagation();
+ e.preventDefault();
+ setShowModal(true);
+ }}) : (
+ setShowModal(true)}
+ >
+ {filter.label}
+
+
+ ) }
+
+ {showModal && (
+
+ { isLive ? : }
+
+ )}
+
+ );
+}
+
+export default connect(state => ({
+ isLive: state.getIn([ 'sessions', 'activeTab' ]).type === 'live',
+}), { })(FilterSelection);
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterSelection/index.ts b/frontend/app/components/shared/Filters/FilterSelection/index.ts
new file mode 100644
index 000000000..8c9764781
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterSelection/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterSelection';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterSource/FilterSource.css b/frontend/app/components/shared/Filters/FilterSource/FilterSource.css
new file mode 100644
index 000000000..6eb6c6a5b
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterSource/FilterSource.css
@@ -0,0 +1,10 @@
+.inputField {
+ display: inline-block;
+ margin-right: 10px;
+ border: solid thin $gray-light;
+ border-radius: 3px;
+ height: 26px;
+ background-color: $white;
+ padding: 0 5px;
+ max-width: 100px;
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx
new file mode 100644
index 000000000..180f0f2a4
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterSource/FilterSource.tsx
@@ -0,0 +1,50 @@
+import { FilterType } from 'App/types/filter/filterType';
+import React, { useState, useEffect } from 'react';
+import stl from './FilterSource.css';
+
+interface Props {
+ filter: any,
+ onUpdate: (filter) => void;
+}
+function FilterSource(props: Props) {
+ const { filter } = props;
+ const [value, setValue] = useState(filter.source[0] || '');
+
+ const onChange = ({ target: { value, name } }) => {
+ props.onUpdate({ ...filter, [name]: [value] })
+ }
+
+ useEffect(() => {
+ setValue(filter.source[0] || '');
+ }, [filter])
+
+ useEffect(() => {
+ props.onUpdate({ ...filter, source: [value] })
+ }, [value])
+
+ const write = ({ target: { value, name } }) => setValue(value)
+
+ const renderFiled = () => {
+ switch(filter.sourceType) {
+ case FilterType.NUMBER:
+ return (
+
+ )
+ }
+ }
+
+ return (
+
+ { renderFiled()}
+
+ );
+}
+
+export default FilterSource;
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterSource/index.ts b/frontend/app/components/shared/Filters/FilterSource/index.ts
new file mode 100644
index 000000000..10da7cf7e
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterSource/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterSource';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx b/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx
new file mode 100644
index 000000000..80a1f705a
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterValue/FilterValue.tsx
@@ -0,0 +1,160 @@
+import React, { useState } from 'react';
+import FilterAutoComplete from '../FilterAutoComplete';
+import FilterAutoCompleteLocal from '../FilterAutoCompleteLocal';
+import { FilterKey, FilterCategory, FilterType } from 'Types/filter/filterType';
+import FilterValueDropdown from '../FilterValueDropdown';
+import FilterDuration from '../FilterDuration';
+
+interface Props {
+ filter: any;
+ onUpdate: (filter) => void;
+}
+function FilterValue(props: Props) {
+ const { filter } = props;
+ const [durationValues, setDurationValues] = useState({ minDuration: filter.value[0], maxDuration: filter.value[1] });
+ const showCloseButton = filter.value.length > 1;
+ const lastIndex = filter.value.length - 1;
+
+ const onAddValue = () => {
+ const newValue = filter.value.concat('');
+ props.onUpdate({ ...filter, value: newValue });
+ }
+
+ const onRemoveValue = (valueIndex) => {
+ const newValue = filter.value.filter((_, index) => index !== valueIndex);
+ props.onUpdate({ ...filter, value: newValue });
+ }
+
+ const onChange = (e, item, valueIndex) => {
+ const newValues = filter.value.map((_, _index) => {
+ if (_index === valueIndex) {
+ return item.value;
+ }
+ return _;
+ })
+ props.onUpdate({ ...filter, value: newValues })
+ }
+
+ const onDurationChange = (newValues) => {
+ console.log('durationValues', durationValues)
+ // setDurationValues({ ...durationValues });
+ setDurationValues({ ...durationValues, ...newValues });
+ }
+
+ const handleBlur = (e) => {
+ if (filter.type === FilterType.DURATION) {
+ const { maxDuration, minDuration, key } = filter;
+ if (maxDuration || minDuration) return;
+ if (maxDuration !== durationValues.maxDuration ||
+ minDuration !== durationValues.minDuration) {
+ props.onUpdate({ ...filter, value: [durationValues.minDuration, durationValues.maxDuration] });
+ }
+ }
+ }
+
+ const getParms = (key) => {
+ switch (filter.category) {
+ case FilterCategory.METADATA:
+ return { type: FilterKey.METADATA, key: key };
+ default:
+ return { type: filter.key };
+ }
+ }
+
+ const renderValueFiled = (value, valueIndex) => {
+ const showOrButton = valueIndex === lastIndex;
+ switch(filter.type) {
+ case FilterType.STRING:
+ return (
+ onRemoveValue(valueIndex)}
+ onSelect={(e, item) => onChange(e, item, valueIndex)}
+ icon={filter.icon}
+ />
+ )
+ case FilterType.DROPDOWN:
+ return (
+ onChange(e, { value }, valueIndex)}
+ />
+ )
+ case FilterType.ISSUE:
+ case FilterType.MULTIPLE_DROPDOWN:
+ return (
+ onChange(e, { value }, valueIndex)}
+ onAddValue={onAddValue}
+ onRemoveValue={() => onRemoveValue(valueIndex)}
+ showCloseButton={showCloseButton}
+ showOrButton={showOrButton}
+ />
+ )
+ case FilterType.DURATION:
+ return (
+
+ )
+ case FilterType.NUMBER:
+ return (
+ onChange(e, { value: e.target.value }, valueIndex)}
+ />
+ )
+ case FilterType.MULTIPLE:
+ return (
+ onRemoveValue(valueIndex)}
+ method={'GET'}
+ endpoint='/events/search'
+ params={getParms(filter.key)}
+ headerText={''}
+ // placeholder={''}
+ onSelect={(e, item) => onChange(e, item, valueIndex)}
+ icon={filter.icon}
+ />
+ )
+ }
+ }
+
+ return (
+
+ { filter.type === FilterType.DURATION ? (
+ renderValueFiled(filter.value, 0)
+ ) : (
+ filter.value && filter.value.map((value, valueIndex) => (
+
+ {renderValueFiled(value, valueIndex)}
+
+ ))
+ )}
+
+ );
+}
+
+export default FilterValue;
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterValue/index.ts b/frontend/app/components/shared/Filters/FilterValue/index.ts
new file mode 100644
index 000000000..a4e4a517e
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterValue/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterValue';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.css b/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.css
new file mode 100644
index 000000000..d93803884
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.css
@@ -0,0 +1,60 @@
+.wrapper {
+ border: solid thin $gray-light !important;
+ border-radius: 3px;
+ background-color: white !important;
+ display: flex;
+ align-items: center;
+ height: 26px;
+
+ & .right {
+ height: 24px;
+ display: flex;
+ align-items: stretch;
+ padding: 0;
+ background-color: $gray-lightest;
+ border-top-right-radius: 3px;
+ border-bottom-right-radius: 3px;
+ margin-left: auto;
+
+ & div {
+ /* background-color: red; */
+ border-left: solid thin $gray-light !important;
+ width: 28px;
+ cursor: pointer;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ &:last-child {
+ border-top-right-radius: 3px;
+ border-bottom-right-radius: 3px;
+ }
+ &:hover {
+ background-color: $gray-light;
+ }
+ }
+ }
+}
+.operatorDropdown {
+ font-weight: 400;
+ /* height: 30px; */
+ min-width: 60px;
+ display: flex !important;
+ align-items: center;
+ justify-content: space-between;
+ padding: 0 8px !important;
+ font-size: 13px;
+ height: 26px;
+ /* background-color: rgba(255, 255, 255, 0.8) !important; */
+ /* background-color: $gray-lightest !important; */
+ /* border: solid thin rgba(34, 36, 38, 0.15) !important; */
+ /* border-radius: 4px !important; */
+ color: $gray-darkest !important;
+ font-size: 14px !important;
+ &.ui.basic.button {
+ box-shadow: 0 0 0 1px rgba(62, 170, 175,36,38,.35) inset, 0 0 0 0 rgba(62, 170, 175,.15) inset !important;
+ }
+/*
+ & input {
+ padding: 0 8px !important;
+ } */
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.tsx b/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.tsx
new file mode 100644
index 000000000..1381f509a
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterValueDropdown/FilterValueDropdown.tsx
@@ -0,0 +1,48 @@
+import React from 'react';
+import cn from 'classnames';
+import { Dropdown, Icon } from 'UI';
+import stl from './FilterValueDropdown.css';
+
+interface Props {
+ filter: any; // event/filter
+ // options: any[];
+ value: string;
+ onChange: (e, { name, value }) => void;
+ className?: string;
+ options: any[];
+ search?: boolean;
+ multiple?: boolean;
+ showCloseButton?: boolean;
+ showOrButton?: boolean;
+ onRemoveValue?: () => void;
+ onAddValue?: () => void;
+}
+function FilterValueDropdown(props: Props) {
+ const { filter, multiple = false, search = false, options, onChange, value, className = '', showCloseButton = true, showOrButton = true } = props;
+ // const options = []
+
+ return (
+
+
}
+ />
+
+ { showCloseButton &&
}
+ { showOrButton &&
or
}
+
+
+ );
+}
+
+export default FilterValueDropdown;
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/FilterValueDropdown/index.ts b/frontend/app/components/shared/Filters/FilterValueDropdown/index.ts
new file mode 100644
index 000000000..0a0240086
--- /dev/null
+++ b/frontend/app/components/shared/Filters/FilterValueDropdown/index.ts
@@ -0,0 +1 @@
+export { default } from './FilterValueDropdown';
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/LiveFilterModal/LiveFilterModal.css b/frontend/app/components/shared/Filters/LiveFilterModal/LiveFilterModal.css
new file mode 100644
index 000000000..463a7a2a9
--- /dev/null
+++ b/frontend/app/components/shared/Filters/LiveFilterModal/LiveFilterModal.css
@@ -0,0 +1,37 @@
+.wrapper {
+ border-radius: 3px;
+ border: solid thin $gray-light;
+ padding: 20px;
+ overflow: hidden;
+ overflow-y: auto;
+ box-shadow: 0 2px 2px 0 $gray-light;
+}
+.optionItem {
+ white-space: nowrap;
+ text-overflow: ellipsis;
+ overflow: hidden;
+ &:hover {
+ background-color: $active-blue;
+ color: $teal !important;
+ & svg {
+ fill: $teal !important;
+ }
+ }
+}
+
+.filterSearchItem {
+ &:hover {
+ background-color: $active-blue;
+ color: $teal;
+
+ & svg {
+ fill: $teal;
+ }
+ }
+
+ & div {
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/LiveFilterModal/LiveFilterModal.tsx b/frontend/app/components/shared/Filters/LiveFilterModal/LiveFilterModal.tsx
new file mode 100644
index 000000000..c75d0f4ca
--- /dev/null
+++ b/frontend/app/components/shared/Filters/LiveFilterModal/LiveFilterModal.tsx
@@ -0,0 +1,95 @@
+import React from 'react';
+import { Icon, Loader } from 'UI';
+import { connect } from 'react-redux';
+import cn from 'classnames';
+import stl from './LiveFilterModal.css';
+import { filtersMap } from 'Types/filter/newFilter';
+
+interface Props {
+ filters: any,
+ onFilterClick?: (filter) => void,
+ filterSearchList: any,
+ metaOptions: any,
+ isMainSearch?: boolean,
+ fetchingFilterSearchList: boolean,
+ searchQuery?: string,
+}
+function LiveFilterModal(props: Props) {
+ const {
+ filters,
+ metaOptions,
+ onFilterClick = () => null,
+ filterSearchList,
+ isMainSearch = false,
+ fetchingFilterSearchList,
+ searchQuery = '',
+ } = props;
+ const hasSearchQuery = searchQuery && searchQuery.length > 0;
+ const showSearchList = isMainSearch && searchQuery.length > 0;
+
+ const onFilterSearchClick = (filter) => {
+ const _filter = filtersMap[filter.type];
+ _filter.value = [filter.value];
+ onFilterClick(_filter);
+ }
+
+ return (
+
+ { showSearchList && (
+
+
+ { filterSearchList && Object.keys(filterSearchList).filter(i => filtersMap[i].isLive).map((key, index) => {
+ const filter = filterSearchList[key];
+ const option = filtersMap[key];
+ return (
+
+
{option.label}
+
+ {filter.map((f, i) => (
+
onFilterSearchClick({ type: key, value: f.value })}
+ >
+
+
{f.value}
+
+ ))}
+
+
+ );
+ })}
+
+
+ )}
+
+ { !hasSearchQuery && (
+
+ {filters && Object.keys(filters).map((key) => (
+
+
{key}
+
+ {filters[key].map((filter: any) => (
+
onFilterClick(filter)}>
+
+ {filter.label}
+
+ ))}
+
+
+ ))}
+
+ )}
+
+ );
+}
+
+export default connect(state => ({
+ filters: state.getIn([ 'search', 'filterListLive' ]),
+ filterSearchList: state.getIn([ 'search', 'filterSearchList' ]),
+ metaOptions: state.getIn([ 'customFields', 'list' ]),
+ fetchingFilterSearchList: state.getIn([ 'search', 'fetchFilterSearch', 'loading' ]),
+}))(LiveFilterModal);
\ No newline at end of file
diff --git a/frontend/app/components/shared/Filters/LiveFilterModal/index.ts b/frontend/app/components/shared/Filters/LiveFilterModal/index.ts
new file mode 100644
index 000000000..0171de0a6
--- /dev/null
+++ b/frontend/app/components/shared/Filters/LiveFilterModal/index.ts
@@ -0,0 +1 @@
+export { default } from './LiveFilterModal';
\ No newline at end of file
diff --git a/frontend/app/components/shared/LiveSearchBar/LiveSearchBar.tsx b/frontend/app/components/shared/LiveSearchBar/LiveSearchBar.tsx
new file mode 100644
index 000000000..f6d9122e3
--- /dev/null
+++ b/frontend/app/components/shared/LiveSearchBar/LiveSearchBar.tsx
@@ -0,0 +1,42 @@
+import React from 'react';
+import LiveSessionSearchField from 'Shared/LiveSessionSearchField';
+import { Button, Popup } from 'UI';
+import { clearSearch } from 'Duck/liveSearch';
+import { connect } from 'react-redux';
+
+interface Props {
+ clearSearch: () => void;
+ appliedFilter: any;
+}
+const LiveSearchBar = (props: Props) => {
+ const { appliedFilter } = props;
+ const hasFilters = appliedFilter && appliedFilter.filters && appliedFilter.filters.size > 0;
+ return (
+
+
+
+
+
+
props.clearSearch()}
+ >
+ Clear
+
+ }
+ content={'Clear Steps'}
+ size="tiny"
+ inverted
+ position="top right"
+ />
+
+
+ )
+}
+export default connect(state => ({
+ appliedFilter: state.getIn(['liveSearch', 'instance']),
+}), { clearSearch })(LiveSearchBar);
\ No newline at end of file
diff --git a/frontend/app/components/shared/LiveSearchBar/index.ts b/frontend/app/components/shared/LiveSearchBar/index.ts
new file mode 100644
index 000000000..32cdf44ce
--- /dev/null
+++ b/frontend/app/components/shared/LiveSearchBar/index.ts
@@ -0,0 +1 @@
+export { default } from './LiveSearchBar';
\ No newline at end of file
diff --git a/frontend/app/components/shared/LiveSessionSearch/LiveSessionSearch.tsx b/frontend/app/components/shared/LiveSessionSearch/LiveSessionSearch.tsx
new file mode 100644
index 000000000..ae7a60b28
--- /dev/null
+++ b/frontend/app/components/shared/LiveSessionSearch/LiveSessionSearch.tsx
@@ -0,0 +1,83 @@
+import React from 'react';
+import FilterList from 'Shared/Filters/FilterList';
+import { connect } from 'react-redux';
+import { edit, addFilter, addFilterByKeyAndValue } from 'Duck/liveSearch';
+import FilterSelection from 'Shared/Filters/FilterSelection';
+import { IconButton } from 'UI';
+import { FilterKey } from 'App/types/filter/filterType';
+
+interface Props {
+ appliedFilter: any;
+ edit: typeof edit;
+ addFilter: typeof addFilter;
+ addFilterByKeyAndValue: typeof addFilterByKeyAndValue;
+}
+function LiveSessionSearch(props: Props) {
+ const { appliedFilter } = props;
+ const hasEvents = appliedFilter.filters.filter(i => i.isEvent).size > 0;
+ const hasFilters = appliedFilter.filters.filter(i => !i.isEvent).size > 0;
+
+ const onAddFilter = (filter) => {
+ props.addFilter(filter);
+ }
+
+ const onUpdateFilter = (filterIndex, filter) => {
+ const newFilters = appliedFilter.filters.map((_filter, i) => {
+ if (i === filterIndex) {
+ return filter;
+ } else {
+ return _filter;
+ }
+ });
+
+ props.edit({
+ ...appliedFilter,
+ filters: newFilters,
+ });
+ }
+
+ const onRemoveFilter = (filterIndex) => {
+ const newFilters = appliedFilter.filters.filter((_filter, i) => {
+ return i !== filterIndex;
+ });
+
+ props.edit({ filters: newFilters, });
+ if (newFilters.size === 0) {
+ props.addFilterByKeyAndValue(FilterKey.USERID, '');
+ }
+ }
+
+ const onChangeEventsOrder = (e, { name, value }) => {
+ props.edit({
+ eventsOrder: value,
+ });
+ }
+
+ return (hasEvents || hasFilters) ? (
+
+ ) : <>>;
+}
+
+export default connect(state => ({
+ appliedFilter: state.getIn([ 'liveSearch', 'instance' ]),
+}), { edit, addFilter, addFilterByKeyAndValue })(LiveSessionSearch);
\ No newline at end of file
diff --git a/frontend/app/components/shared/LiveSessionSearch/index.ts b/frontend/app/components/shared/LiveSessionSearch/index.ts
new file mode 100644
index 000000000..cb3abb7be
--- /dev/null
+++ b/frontend/app/components/shared/LiveSessionSearch/index.ts
@@ -0,0 +1 @@
+export { default } from './LiveSessionSearch';
\ No newline at end of file
diff --git a/frontend/app/components/shared/LiveSessionSearchField/LiveSessionSearchField.css b/frontend/app/components/shared/LiveSessionSearchField/LiveSessionSearchField.css
new file mode 100644
index 000000000..6a3a268ba
--- /dev/null
+++ b/frontend/app/components/shared/LiveSessionSearchField/LiveSessionSearchField.css
@@ -0,0 +1,10 @@
+.searchField {
+ box-shadow: none !important;
+ & input {
+ box-shadow: none !important;
+ border-radius: 3 !important;
+ border: solid thin $gray-light !important;
+ height: 34px !important;
+ font-size: 16px;
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/LiveSessionSearchField/LiveSessionSearchField.tsx b/frontend/app/components/shared/LiveSessionSearchField/LiveSessionSearchField.tsx
new file mode 100644
index 000000000..e0266c7c1
--- /dev/null
+++ b/frontend/app/components/shared/LiveSessionSearchField/LiveSessionSearchField.tsx
@@ -0,0 +1,59 @@
+import React, { useState } from 'react';
+import { connect } from 'react-redux';
+import stl from './LiveSessionSearchField.css';
+import { Input } from 'UI';
+import LiveFilterModal from 'Shared/Filters/LiveFilterModal';
+import { fetchFilterSearch } from 'Duck/search';
+import { debounce } from 'App/utils';
+import { edit as editFilter, addFilterByKeyAndValue } from 'Duck/liveSearch';
+
+interface Props {
+ fetchFilterSearch: (query: any) => void;
+ editFilter: typeof editFilter;
+ addFilterByKeyAndValue: (key: string, value: string) => void;
+}
+function LiveSessionSearchField(props: Props) {
+ const debounceFetchFilterSearch = debounce(props.fetchFilterSearch, 1000)
+ const [showModal, setShowModal] = useState(false)
+ const [searchQuery, setSearchQuery] = useState('')
+
+ const onSearchChange = (e, { value }) => {
+ setSearchQuery(value)
+ debounceFetchFilterSearch({ q: value });
+ }
+
+ const onAddFilter = (filter) => {
+ props.addFilterByKeyAndValue(filter.key, filter.value)
+ }
+
+ return (
+
+
setShowModal(true) }
+ onBlur={ () => setTimeout(setShowModal, 200, false) }
+ onChange={ onSearchChange }
+ icon="search"
+ iconPosition="left"
+ placeholder={ 'Find live sessions by user or metadata.'}
+ fluid
+ id="search"
+ type="search"
+ autoComplete="off"
+ />
+
+ { showModal && (
+
+
+
+ )}
+
+ );
+}
+
+export default connect(null, { fetchFilterSearch, editFilter, addFilterByKeyAndValue })(LiveSessionSearchField);
\ No newline at end of file
diff --git a/frontend/app/components/shared/LiveSessionSearchField/index.ts b/frontend/app/components/shared/LiveSessionSearchField/index.ts
new file mode 100644
index 000000000..849b74d96
--- /dev/null
+++ b/frontend/app/components/shared/LiveSessionSearchField/index.ts
@@ -0,0 +1 @@
+export { default } from './LiveSessionSearchField';
\ No newline at end of file
diff --git a/frontend/app/components/shared/MainSearchBar/MainSearchBar.tsx b/frontend/app/components/shared/MainSearchBar/MainSearchBar.tsx
new file mode 100644
index 000000000..0ed764339
--- /dev/null
+++ b/frontend/app/components/shared/MainSearchBar/MainSearchBar.tsx
@@ -0,0 +1,42 @@
+import React from 'react';
+import SessionSearchField from 'Shared/SessionSearchField';
+import SavedSearch from 'Shared/SavedSearch';
+import { Button, Popup } from 'UI';
+import { clearSearch } from 'Duck/search';
+import { connect } from 'react-redux';
+
+interface Props {
+ clearSearch: () => void;
+ appliedFilter: any;
+}
+const MainSearchBar = (props: Props) => {
+ const { appliedFilter } = props;
+ const hasFilters = appliedFilter && appliedFilter.filters && appliedFilter.filters.size > 0;
+ return (
+
+
+
+
+
props.clearSearch()}
+ >
+ Clear
+
+ }
+ content={'Clear Steps'}
+ size="tiny"
+ inverted
+ position="top right"
+ />
+
+
+ )
+}
+export default connect(state => ({
+ appliedFilter: state.getIn(['search', 'instance']),
+}), { clearSearch })(MainSearchBar);
\ No newline at end of file
diff --git a/frontend/app/components/shared/MainSearchBar/index.ts b/frontend/app/components/shared/MainSearchBar/index.ts
new file mode 100644
index 000000000..2b3906c44
--- /dev/null
+++ b/frontend/app/components/shared/MainSearchBar/index.ts
@@ -0,0 +1 @@
+export { default } from './MainSearchBar';
\ No newline at end of file
diff --git a/frontend/app/components/shared/MetricsFilters/FilterItem/FilterItem.js b/frontend/app/components/shared/MetricsFilters/FilterItem/FilterItem.js
index aa235f2fe..249eb6ae6 100644
--- a/frontend/app/components/shared/MetricsFilters/FilterItem/FilterItem.js
+++ b/frontend/app/components/shared/MetricsFilters/FilterItem/FilterItem.js
@@ -55,7 +55,7 @@ const FilterItem = props => {
{f.text || f.value}
- props.removeFilter(f)} />
+ props.removeFilter(f)} />
))}
diff --git a/frontend/app/components/shared/SaveFilterButton/SaveFilterButton.tsx b/frontend/app/components/shared/SaveFilterButton/SaveFilterButton.tsx
new file mode 100644
index 000000000..51bed6e8c
--- /dev/null
+++ b/frontend/app/components/shared/SaveFilterButton/SaveFilterButton.tsx
@@ -0,0 +1,30 @@
+import React, { useState } from 'react';
+import { connect } from 'react-redux';
+import { save } from 'Duck/filters';
+import { IconButton } from 'UI';
+import SaveSearchModal from 'Shared/SaveSearchModal'
+
+interface Props {
+ filter: any;
+ savedSearch: any;
+}
+
+function SaveFilterButton(props: Props) {
+ const { savedSearch } = props;
+ const [showModal, setshowModal] = useState(false)
+ return (
+
+ { savedSearch.exists() ? (
+ setshowModal(true)} primaryText label="UPDATE SEARCH" icon="zoom-in" />
+ ) : (
+ setshowModal(true)} primaryText label="SAVE SEARCH" icon="zoom-in" />
+ )}
+ { showModal && ( setshowModal(false)} /> )}
+
+ );
+}
+
+export default connect(state => ({
+ filter: state.getIn([ 'search', 'instance' ]),
+ savedSearch: state.getIn([ 'search', 'savedSearch' ]),
+}), { save })(SaveFilterButton);
\ No newline at end of file
diff --git a/frontend/app/components/shared/SaveFilterButton/index.ts b/frontend/app/components/shared/SaveFilterButton/index.ts
new file mode 100644
index 000000000..9f22c4ecb
--- /dev/null
+++ b/frontend/app/components/shared/SaveFilterButton/index.ts
@@ -0,0 +1 @@
+export { default } from './SaveFilterButton'
\ No newline at end of file
diff --git a/frontend/app/components/shared/SaveFunnelButton/SaveFunnelButton.tsx b/frontend/app/components/shared/SaveFunnelButton/SaveFunnelButton.tsx
new file mode 100644
index 000000000..c68c451d0
--- /dev/null
+++ b/frontend/app/components/shared/SaveFunnelButton/SaveFunnelButton.tsx
@@ -0,0 +1,20 @@
+import React, { useState } from 'react';
+import { IconButton } from 'UI';
+import FunnelSaveModal from 'App/components/Funnels/FunnelSaveModal';
+
+export default function SaveFunnelButton() {
+ const [showModal, setshowModal] = useState(false)
+ return (
+
+ setshowModal(true)} primaryText label="SAVE FUNNEL" icon="funnel"
+ />
+
+ setshowModal(false)}
+ />
+
+ )
+}
diff --git a/frontend/app/components/shared/SaveFunnelButton/index.ts b/frontend/app/components/shared/SaveFunnelButton/index.ts
new file mode 100644
index 000000000..246df92ff
--- /dev/null
+++ b/frontend/app/components/shared/SaveFunnelButton/index.ts
@@ -0,0 +1 @@
+export { default } from './SaveFunnelButton';
\ No newline at end of file
diff --git a/frontend/app/components/shared/SaveSearchModal/SaveSearchModal.css b/frontend/app/components/shared/SaveSearchModal/SaveSearchModal.css
new file mode 100644
index 000000000..7dfc3e91b
--- /dev/null
+++ b/frontend/app/components/shared/SaveSearchModal/SaveSearchModal.css
@@ -0,0 +1,23 @@
+.modalHeader {
+ display: flex !important;
+ align-items: center;
+ justify-content: space-between;
+}
+
+.cancelButton {
+ background-color: transparent !important;
+ border: solid thin transparent !important;
+ color: $teal !important;
+ &:hover {
+ background-color: $active-blue !important;
+ }
+}
+
+.applyButton {
+ background-color: white !important;
+ border: solid thin $active-blue-border !important;
+ color: $teal !important;
+ &:hover {
+ background-color: $active-blue !important;
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/SaveSearchModal/SaveSearchModal.tsx b/frontend/app/components/shared/SaveSearchModal/SaveSearchModal.tsx
new file mode 100644
index 000000000..b579652e9
--- /dev/null
+++ b/frontend/app/components/shared/SaveSearchModal/SaveSearchModal.tsx
@@ -0,0 +1,128 @@
+import React, { useState } from 'react';
+import { connect } from 'react-redux';
+import { editSavedSearch as edit, save, remove } from 'Duck/search';
+import { Button, Modal, Form, Icon, Checkbox } from 'UI';
+import { confirm } from 'UI/Confirmation';
+import stl from './SaveSearchModal.css';
+import cn from 'classnames';
+
+interface Props {
+ filter: any;
+ loading: boolean;
+ edit: (filter: any) => void;
+ save: (searchId) => Promise;
+ show: boolean;
+ closeHandler: () => void;
+ savedSearch: any;
+ remove: (filterId: number) => Promise;
+ userId: number;
+}
+function SaveSearchModal(props: Props) {
+ const { savedSearch, filter, loading, show, closeHandler } = props;
+ const [name, setName] = useState(savedSearch ? savedSearch.name : '');
+
+ const onNameChange = ({ target: { value } }) => {
+ props.edit({ name: value });
+ // setName(value);
+ };
+
+ const onSave = () => {
+ const { filter, closeHandler } = props;
+ // if (name.trim() === '') return;
+ props.save(savedSearch.exists() ? savedSearch.searchId : null).then(function() {
+ // this.props.fetchFunnelsList();
+ closeHandler();
+ });
+ }
+
+ const onDelete = async () => {
+ if (await confirm({
+ header: 'Confirm',
+ confirmButton: 'Yes, Delete',
+ confirmation: `Are you sure you want to permanently delete this Saved serch?`,
+ })) {
+ props.remove(savedSearch.searchId).then(() => {
+ closeHandler();
+ });
+ }
+ }
+
+ const onChangeOption = (e, { checked, name }) => props.edit({ [ name ]: checked })
+
+
+ return (
+
+
+ { 'Save Search' }
+
+
+
+
+
+
+
+
+
+
+
+
+
props.edit({ 'isPublic' : !savedSearch.isPublic }) }
+ >
+
+ Team Visible
+
+
+
+
+ { savedSearch.exists() && Changes in filters will be updated.
}
+
+
+
+
+
+
+ { savedSearch && }
+
+
+ );
+}
+
+export default connect(state => ({
+ userId: state.getIn([ 'user', 'account', 'id' ]),
+ savedSearch: state.getIn([ 'search', 'savedSearch' ]),
+ filter: state.getIn(['search', 'instance']),
+ loading: state.getIn([ 'search', 'saveRequest', 'loading' ]) ||
+ state.getIn([ 'search', 'updateRequest', 'loading' ]),
+}), { edit, save, remove })(SaveSearchModal);
\ No newline at end of file
diff --git a/frontend/app/components/shared/SaveSearchModal/index.ts b/frontend/app/components/shared/SaveSearchModal/index.ts
new file mode 100644
index 000000000..6c5515e82
--- /dev/null
+++ b/frontend/app/components/shared/SaveSearchModal/index.ts
@@ -0,0 +1 @@
+export { default } from './SaveSearchModal'
\ No newline at end of file
diff --git a/frontend/app/components/shared/SavedSearch/SavedSearch.css b/frontend/app/components/shared/SavedSearch/SavedSearch.css
new file mode 100644
index 000000000..995afd92f
--- /dev/null
+++ b/frontend/app/components/shared/SavedSearch/SavedSearch.css
@@ -0,0 +1,4 @@
+.disabled {
+ opacity: 0.5 !important;
+ pointer-events: none;
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/SavedSearch/SavedSearch.tsx b/frontend/app/components/shared/SavedSearch/SavedSearch.tsx
new file mode 100644
index 000000000..1321563ae
--- /dev/null
+++ b/frontend/app/components/shared/SavedSearch/SavedSearch.tsx
@@ -0,0 +1,64 @@
+import React, { useState, useEffect } from 'react';
+import { Button, Icon } from 'UI';
+import SavedSearchDropdown from './components/SavedSearchDropdown';
+import { connect } from 'react-redux';
+import { fetchList as fetchListSavedSearch } from 'Duck/search';
+import OutsideClickDetectingDiv from 'Shared/OutsideClickDetectingDiv';
+import cn from 'classnames';
+import { list } from 'App/components/BugFinder/CustomFilters/filterModal.css';
+import stl from './SavedSearch.css';
+
+interface Props {
+ fetchListSavedSearch: () => void;
+ list: any;
+ savedSearch: any;
+}
+function SavedSearch(props) {
+ const { list } = props;
+ const { savedSearch } = props;
+ const [showMenu, setShowMenu] = useState(false)
+
+ useEffect(() => {
+ props.fetchListSavedSearch()
+ }, [])
+
+ return (
+ setShowMenu(false)}
+ >
+
+
+
+ { savedSearch.exists() && (
+
+
+ Viewing:
+ {savedSearch.name}
+
+ )}
+
+
+ { showMenu && (
+
+ setShowMenu(false)} />
+
+ )}
+
+
+ );
+}
+
+export default connect(state => ({
+ list: state.getIn([ 'search', 'list' ]),
+ savedSearch: state.getIn([ 'search', 'savedSearch' ])
+}), { fetchListSavedSearch })(SavedSearch);
\ No newline at end of file
diff --git a/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/SavedSearchDropdown.css b/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/SavedSearchDropdown.css
new file mode 100644
index 000000000..d4451d0bf
--- /dev/null
+++ b/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/SavedSearchDropdown.css
@@ -0,0 +1,15 @@
+.wrapper {
+ position: relative;
+ display: inline-block;
+ z-index: 999;
+ display: flex;
+ flex-direction: column;
+ max-height: 250px;
+ overflow-y: auto;
+}
+
+.rowItem {
+ &:hover {
+ color: $teal;
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/SavedSearchDropdown.tsx b/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/SavedSearchDropdown.tsx
new file mode 100644
index 000000000..9a3cf6ee7
--- /dev/null
+++ b/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/SavedSearchDropdown.tsx
@@ -0,0 +1,72 @@
+import React from 'react';
+import stl from './SavedSearchDropdown.css';
+import cn from 'classnames';
+import { Icon } from 'UI';
+import { applySavedSearch, remove, edit } from 'Duck/search'
+import { connect } from 'react-redux';
+import { confirm } from 'UI/Confirmation';
+
+interface Props {
+ list: Array
+ applySavedSearch: (filter: any) => void
+ remove: (id: string) => Promise
+ onClose: () => void,
+ edit: (filter: any) => void,
+}
+
+function Row ({ name, isPublic, onClick, onClickEdit, onDelete }) {
+ return (
+
+
{name}
+
+ { isPublic &&
}
+ {/*
*/}
+ {/*
*/}
+
+
+ )
+}
+
+function SavedSearchDropdown(props: Props) {
+ const onClick = (item) => {
+ props.applySavedSearch(item)
+ // props.edit(item.filter)
+ props.onClose()
+ }
+
+ const onDelete = async (instance) => {
+ if (await confirm({
+ header: 'Confirm',
+ confirmButton: 'Yes, Delete',
+ confirmation: `Are you sure you want to permanently delete this search?`
+ })) {
+ props.remove(instance.alertId).then(() => {
+ // toggleForm(null, false);
+ });
+ }
+ }
+
+ const onClickEdit = (instance) => {
+ // toggleForm(instance);
+ }
+
+ return (
+
+ {props.list.map(item => (
+ onClick(item)}
+ onDelete={() => onDelete(item) }
+ onClickEdit={() => onClickEdit(item)}
+ isPublic={item.isPublic}
+ />
+ ))}
+
+ );
+}
+
+export default connect(null, { applySavedSearch, remove, edit })(SavedSearchDropdown);
\ No newline at end of file
diff --git a/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/index.ts b/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/index.ts
new file mode 100644
index 000000000..2fea67949
--- /dev/null
+++ b/frontend/app/components/shared/SavedSearch/components/SavedSearchDropdown/index.ts
@@ -0,0 +1 @@
+export { default } from './SavedSearchDropdown';
\ No newline at end of file
diff --git a/frontend/app/components/shared/SavedSearch/index.ts b/frontend/app/components/shared/SavedSearch/index.ts
new file mode 100644
index 000000000..71c14305d
--- /dev/null
+++ b/frontend/app/components/shared/SavedSearch/index.ts
@@ -0,0 +1 @@
+export { default } from './SavedSearch'
\ No newline at end of file
diff --git a/frontend/app/components/shared/SessionItem/Counter.tsx b/frontend/app/components/shared/SessionItem/Counter.tsx
index 7244d7035..5303974ae 100644
--- a/frontend/app/components/shared/SessionItem/Counter.tsx
+++ b/frontend/app/components/shared/SessionItem/Counter.tsx
@@ -1,5 +1,6 @@
import React, { useState, useEffect } from 'react'
import { Duration } from 'luxon';
+import { durationFormatted, formatTimeOrDate } from 'App/date';
interface Props {
startTime: any,
@@ -9,6 +10,7 @@ interface Props {
function Counter({ startTime, className }: Props) {
let intervalId;
const [duration, setDuration] = useState(new Date().getTime() - startTime)
+ const formattedDuration = durationFormatted(Duration.fromMillis(duration));
useEffect(() => {
if (!intervalId) {
@@ -21,7 +23,7 @@ function Counter({ startTime, className }: Props) {
return (
- {startTime && Duration.fromMillis(duration).toFormat('m:ss')}
+ {startTime && formattedDuration}
)
}
diff --git a/frontend/app/components/shared/SessionItem/SessionItem.js b/frontend/app/components/shared/SessionItem/SessionItem.js
index c1c20fe13..3abf12ca8 100644
--- a/frontend/app/components/shared/SessionItem/SessionItem.js
+++ b/frontend/app/components/shared/SessionItem/SessionItem.js
@@ -29,14 +29,6 @@ const Label = ({ label = '', color = 'color-gray-medium'}) => (
}), { toggleFavorite, setSessionPath })
@withRouter
export default class SessionItem extends React.PureComponent {
-
- replaySession = () => {
- const { history, session: { sessionId }, siteId, isAssist } = this.props;
- if (!isAssist) {
- this.props.setSessionPath(history.location.pathname)
- }
- history.push(withSiteId(sessionRoute(sessionId), siteId))
- }
// eslint-disable-next-line complexity
render() {
const {
@@ -61,7 +53,7 @@ export default class SessionItem extends React.PureComponent {
live
},
timezone,
- onUserClick,
+ onUserClick = () => null,
hasUserFilter = false,
disableUser = false
} = this.props;
@@ -122,9 +114,9 @@ export default class SessionItem extends React.PureComponent {
diff --git a/frontend/app/components/shared/SessionSearch/SessionSearch.tsx b/frontend/app/components/shared/SessionSearch/SessionSearch.tsx
new file mode 100644
index 000000000..46bdd845e
--- /dev/null
+++ b/frontend/app/components/shared/SessionSearch/SessionSearch.tsx
@@ -0,0 +1,94 @@
+import React from 'react';
+import { List } from 'immutable';
+import FilterList from 'Shared/Filters/FilterList';
+import FilterSelection from 'Shared/Filters/FilterSelection';
+import SaveFilterButton from 'Shared/SaveFilterButton';
+import { connect } from 'react-redux';
+import { IconButton, Button } from 'UI';
+import { edit, addFilter } from 'Duck/search';
+import SaveFunnelButton from '../SaveFunnelButton';
+
+interface Props {
+ appliedFilter: any;
+ edit: typeof edit;
+ addFilter: typeof addFilter;
+}
+function SessionSearch(props) {
+ const { appliedFilter } = props;
+ const hasEvents = appliedFilter.filters.filter(i => i.isEvent).size > 0;
+ const hasFilters = appliedFilter.filters.filter(i => !i.isEvent).size > 0;
+
+ const onAddFilter = (filter) => {
+ props.addFilter(filter);
+ // filter.value = [""]
+ // const newFilters = appliedFilter.filters.concat(filter);
+ // props.edit({
+ // ...appliedFilter.filter,
+ // filters: newFilters,
+ // });
+ }
+
+ const onUpdateFilter = (filterIndex, filter) => {
+ const newFilters = appliedFilter.filters.map((_filter, i) => {
+ if (i === filterIndex) {
+ return filter;
+ } else {
+ return _filter;
+ }
+ });
+
+ props.edit({
+ ...appliedFilter,
+ filters: newFilters,
+ });
+ }
+
+ const onRemoveFilter = (filterIndex) => {
+ const newFilters = appliedFilter.filters.filter((_filter, i) => {
+ return i !== filterIndex;
+ });
+
+ props.edit({
+ filters: newFilters,
+ });
+ }
+
+ const onChangeEventsOrder = (e, { name, value }) => {
+ props.edit({
+ eventsOrder: value,
+ });
+ }
+
+ return (hasEvents || hasFilters) ? (
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {/* */}
+
+
+
+ ) : <>>;
+}
+
+export default connect(state => ({
+ appliedFilter: state.getIn([ 'search', 'instance' ]),
+}), { edit, addFilter })(SessionSearch);
\ No newline at end of file
diff --git a/frontend/app/components/shared/SessionSearch/index.ts b/frontend/app/components/shared/SessionSearch/index.ts
new file mode 100644
index 000000000..d9c909f0d
--- /dev/null
+++ b/frontend/app/components/shared/SessionSearch/index.ts
@@ -0,0 +1 @@
+export { default } from './SessionSearch';
\ No newline at end of file
diff --git a/frontend/app/components/shared/SessionSearchField/SessionSearchField.css b/frontend/app/components/shared/SessionSearchField/SessionSearchField.css
new file mode 100644
index 000000000..6a3a268ba
--- /dev/null
+++ b/frontend/app/components/shared/SessionSearchField/SessionSearchField.css
@@ -0,0 +1,10 @@
+.searchField {
+ box-shadow: none !important;
+ & input {
+ box-shadow: none !important;
+ border-radius: 3 !important;
+ border: solid thin $gray-light !important;
+ height: 34px !important;
+ font-size: 16px;
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/components/shared/SessionSearchField/SessionSearchField.tsx b/frontend/app/components/shared/SessionSearchField/SessionSearchField.tsx
new file mode 100644
index 000000000..a24e18ab0
--- /dev/null
+++ b/frontend/app/components/shared/SessionSearchField/SessionSearchField.tsx
@@ -0,0 +1,59 @@
+import React, { useState } from 'react';
+import { connect } from 'react-redux';
+import stl from './SessionSearchField.css';
+import { Input } from 'UI';
+import FilterModal from 'Shared/Filters/FilterModal';
+import { fetchFilterSearch } from 'Duck/search';
+import { debounce } from 'App/utils';
+import { edit as editFilter, addFilterByKeyAndValue } from 'Duck/search';
+
+interface Props {
+ fetchFilterSearch: (query: any) => void;
+ editFilter: typeof editFilter;
+ addFilterByKeyAndValue: (key: string, value: string) => void;
+}
+function SessionSearchField(props: Props) {
+ const debounceFetchFilterSearch = debounce(props.fetchFilterSearch, 1000)
+ const [showModal, setShowModal] = useState(false)
+ const [searchQuery, setSearchQuery] = useState('')
+
+ const onSearchChange = (e, { value }) => {
+ setSearchQuery(value)
+ debounceFetchFilterSearch({ q: value });
+ }
+
+ const onAddFilter = (filter) => {
+ props.addFilterByKeyAndValue(filter.key, filter.value)
+ }
+
+ return (
+
+
setShowModal(true) }
+ onBlur={ () => setTimeout(setShowModal, 200, false) }
+ onChange={ onSearchChange }
+ icon="search"
+ iconPosition="left"
+ placeholder={ 'Search sessions using any captured event (click, input, page, error...)'}
+ fluid
+ id="search"
+ type="search"
+ autoComplete="off"
+ />
+
+ { showModal && (
+
+
+
+ )}
+
+ );
+}
+
+export default connect(null, { fetchFilterSearch, editFilter, addFilterByKeyAndValue })(SessionSearchField);
\ No newline at end of file
diff --git a/frontend/app/components/shared/SessionSearchField/index.ts b/frontend/app/components/shared/SessionSearchField/index.ts
new file mode 100644
index 000000000..1f99e0c0b
--- /dev/null
+++ b/frontend/app/components/shared/SessionSearchField/index.ts
@@ -0,0 +1 @@
+export { default } from './SessionSearchField';
\ No newline at end of file
diff --git a/frontend/app/components/shared/SharePopup/SessionCopyLink/SessionCopyLink.tsx b/frontend/app/components/shared/SharePopup/SessionCopyLink/SessionCopyLink.tsx
new file mode 100644
index 000000000..296b411ab
--- /dev/null
+++ b/frontend/app/components/shared/SharePopup/SessionCopyLink/SessionCopyLink.tsx
@@ -0,0 +1,31 @@
+import React from 'react';
+import { IconButton } from 'UI';
+import copy from 'copy-to-clipboard';
+import { connectPlayer } from 'Player';
+
+interface Props {
+ content: string;
+ time: any;
+}
+function SessionCopyLink({ content = '', time }: Props) {
+ const [copied, setCopied] = React.useState(false)
+
+ const copyHandler = () => {
+ setCopied(true);
+ copy(window.location.origin + window.location.pathname + '?jumpto=' + Math.round(time));
+ setTimeout(() => {
+ setCopied(false);
+ }, 1000);
+ };
+
+ return (
+
+
+ { copied &&
Copied to Clipboard
}
+
+ )
+}
+
+export default connectPlayer(state => ({
+ time: state.time,
+}))(SessionCopyLink);
\ No newline at end of file
diff --git a/frontend/app/components/shared/SharePopup/SessionCopyLink/index.ts b/frontend/app/components/shared/SharePopup/SessionCopyLink/index.ts
new file mode 100644
index 000000000..c7c88f6de
--- /dev/null
+++ b/frontend/app/components/shared/SharePopup/SessionCopyLink/index.ts
@@ -0,0 +1 @@
+ export { default } from './SessionCopyLink';
\ No newline at end of file
diff --git a/frontend/app/components/shared/SharePopup/SharePopup.js b/frontend/app/components/shared/SharePopup/SharePopup.js
index 347a95733..43c960bba 100644
--- a/frontend/app/components/shared/SharePopup/SharePopup.js
+++ b/frontend/app/components/shared/SharePopup/SharePopup.js
@@ -5,6 +5,7 @@ import { Popup, Dropdown, Icon, IconButton } from 'UI';
import { pause } from 'Player';
import styles from './sharePopup.css';
import IntegrateSlackButton from '../IntegrateSlackButton/IntegrateSlackButton';
+import SessionCopyLink from './SessionCopyLink';
@connect(state => ({
channels: state.getIn([ 'slack', 'list' ]),
@@ -46,7 +47,7 @@ export default class SharePopup extends React.PureComponent {
changeChannel = (e, { value }) => this.setState({ channelId: value })
render() {
- const { trigger, loading, channels } = this.props;
+ const { trigger, loading, channels, showCopyLink = false } = this.props;
const { comment, isOpen, channelId } = this.state;
const options = channels.map(({ webhookId, name }) => ({ value: webhookId, text: name })).toJS();
@@ -62,9 +63,16 @@ export default class SharePopup extends React.PureComponent {
{ 'Comment' }
{ options.length === 0 ?
-
-
-
+ <>
+
+
+
+ { showCopyLink && (
+
+
+
+ )}
+ >
:
@@ -72,38 +80,43 @@ export default class SharePopup extends React.PureComponent {
name="message"
id="message"
cols="30"
- rows="6"
+ rows="4"
resize="none"
onChange={ this.editMessage }
value={ comment }
placeholder="Type here..."
className="p-4"
/>
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
}
}
on="click"
- position="top center"
+ position="top right"
className={ styles.popup }
hideOnScroll
/>
diff --git a/frontend/app/components/shared/SharePopup/sharePopup.css b/frontend/app/components/shared/SharePopup/sharePopup.css
index adbc29ff8..c28285457 100644
--- a/frontend/app/components/shared/SharePopup/sharePopup.css
+++ b/frontend/app/components/shared/SharePopup/sharePopup.css
@@ -35,13 +35,18 @@
border-radius: 3px;
resize: none;
}
+ margin-bottom: 14px;
}
.footer {
- display: flex;
- align-items: center;
- justify-content: space-between;
- padding: 10px 0;
+ /* display: flex; */
+ /* align-items: center; */
+ /* justify-content: space-between; */
+ /* padding: 10px 0; */
+ border-top: solid thin $gray-light;
+ margin: 0 -14px;
+ padding: 0 14px;
+ /* border-bottom: solid thin $gray-light; */
}
textarea {
diff --git a/frontend/app/components/shared/TrackingCodeModal/CopyButton/CopyButton.js b/frontend/app/components/shared/TrackingCodeModal/CopyButton/CopyButton.js
index d080b1a02..5e74e7b80 100644
--- a/frontend/app/components/shared/TrackingCodeModal/CopyButton/CopyButton.js
+++ b/frontend/app/components/shared/TrackingCodeModal/CopyButton/CopyButton.js
@@ -17,7 +17,7 @@ function CopyButton({ content, className }) {
className={ className }
onClick={ copyHandler }
>
- { copied ? 'copied' : 'copy' }
+ { copied ? 'Copied' : 'Copy' }
)
}
diff --git a/frontend/app/components/shared/TrackingCodeModal/TrackingCodeModal.js b/frontend/app/components/shared/TrackingCodeModal/TrackingCodeModal.js
index 26e2d709c..1a8eb17bf 100644
--- a/frontend/app/components/shared/TrackingCodeModal/TrackingCodeModal.js
+++ b/frontend/app/components/shared/TrackingCodeModal/TrackingCodeModal.js
@@ -41,7 +41,7 @@ class TrackingCodeModal extends React.PureComponent {
{ title } { subTitle && {subTitle}}
-
+
diff --git a/frontend/app/components/shared/WidgetSection/WidgetSection.js b/frontend/app/components/shared/WidgetSection/WidgetSection.js
index 3893ff392..45b339236 100644
--- a/frontend/app/components/shared/WidgetSection/WidgetSection.js
+++ b/frontend/app/components/shared/WidgetSection/WidgetSection.js
@@ -2,13 +2,13 @@ import React from 'react'
import cn from 'classnames'
import AddWidgets from '../AddWidgets';
-function WidgetSection({ className, title, children, description, type }) {
+function WidgetSection({ className, title, children, description, type, widgets = [] }) {
return (
{description &&
{description}
}
diff --git a/frontend/app/components/ui/Button/Button.js b/frontend/app/components/ui/Button/Button.js
index 7f2579114..b8599eab7 100644
--- a/frontend/app/components/ui/Button/Button.js
+++ b/frontend/app/components/ui/Button/Button.js
@@ -14,6 +14,7 @@ export default ({
success = false,
error = false,
minWidth,
+ disabled = false,
...props
}) => (
)
diff --git a/frontend/app/components/ui/IconButton/IconButton.js b/frontend/app/components/ui/IconButton/IconButton.js
index 53077f06c..eb708f21a 100644
--- a/frontend/app/components/ui/IconButton/IconButton.js
+++ b/frontend/app/components/ui/IconButton/IconButton.js
@@ -24,11 +24,13 @@ const IconButton = React.forwardRef(({
name,
disabled = false,
tooltip = false,
+ tooltipPosition = 'top',
compact = false,
...rest
}, ref) => (
{ list.map(item => (
- this.setActiveItem(item) }
- >
- { item.icon &&
}
-
{ item.name }
-
+ trigger={
+ !item.disabled && this.setActiveItem(item) }
+ >
+ { item.icon &&
}
+
{ item.name }
+
+ }
+ disabled={!item.disabled}
+ content={ `Coming soon` }
+ size="tiny"
+ inverted
+ position="top center"
+ />
))
}
diff --git a/frontend/app/components/ui/SegmentSelection/segmentSelection.css b/frontend/app/components/ui/SegmentSelection/segmentSelection.css
index 543016246..20007b010 100644
--- a/frontend/app/components/ui/SegmentSelection/segmentSelection.css
+++ b/frontend/app/components/ui/SegmentSelection/segmentSelection.css
@@ -3,7 +3,7 @@
align-items: center;
justify-content: space-around;
border: solid thin $gray-light;
- border-radius: 5px;
+ border-radius: 3px;
overflow: hidden;
& .item {
@@ -12,12 +12,13 @@
padding: 10px;
flex: 1;
text-align: center;
- border-right: solid thin $gray-light;
+ border-right: solid thin $teal;
cursor: pointer;
background-color: $gray-lightest;
display: flex;
align-items: center;
justify-content: center;
+ white-space: nowrap;
& span svg {
fill: $gray-medium;
@@ -61,4 +62,9 @@
.small .item {
padding: 4px 8px;
+}
+
+.extraSmall .item {
+ padding: 0 4px;
+ font-size: 12px;
}
\ No newline at end of file
diff --git a/frontend/app/components/ui/Tooltip/Tooltip.js b/frontend/app/components/ui/Tooltip/Tooltip.js
index 50e23647b..05a723f52 100644
--- a/frontend/app/components/ui/Tooltip/Tooltip.js
+++ b/frontend/app/components/ui/Tooltip/Tooltip.js
@@ -22,7 +22,7 @@ export default class Tooltip extends React.PureComponent {
}
render() {
- const { trigger, tooltip } = this.props;
+ const { trigger, tooltip, position } = this.props;
const { open } = this.state;
return (
{
+ return options.filter(option => keys.includes(option.key));
+};
+
+export const baseOperators = options.filter(({key}) => filterKeys.includes(key));
+export const stringOperators = options.filter(({key}) => stringFilterKeys.includes(key));
+export const targetOperators = options.filter(({key}) => targetFilterKeys.includes(key));
+export const booleanOperators = [
+ { key: 'true', text: 'true', value: 'true' },
+ { key: 'false', text: 'false', value: 'false' },
+]
+
+export const customOperators = [
+ { key: '=', text: '=', value: '=' },
+ { key: '<', text: '<', value: '<' },
+ { key: '>', text: '>', value: '>' },
+ { key: '<=', text: '<=', value: '<=' },
+ { key: '>=', text: '>=', value: '>=' },
+]
+
+export default {
+ options,
+ baseOperators,
+ stringOperators,
+ targetOperators,
+ booleanOperators,
+ customOperators,
+ getOperatorsByKeys,
+}
\ No newline at end of file
diff --git a/frontend/app/constants/index.js b/frontend/app/constants/index.js
index ba6f53cf0..239c7478a 100644
--- a/frontend/app/constants/index.js
+++ b/frontend/app/constants/index.js
@@ -10,6 +10,7 @@ export { default as alertConditions } from './alertConditions';
export { default as alertMetrics } from './alertMetrics';
export { default as regions } from './regions';
export { default as links } from './links';
+export { default as platformOptions } from './platformOptions';
export {
DAYS as SCHEDULE_DAYS,
HOURS as SCHEDULE_HOURS,
@@ -18,3 +19,4 @@ export {
SLACK as CHANNEL_SLACK,
WEBHOOK as CHANNEL_WEBHOOK
} from './schedule';
+export { default } from './filterOptions';
diff --git a/frontend/app/constants/platformOptions.js b/frontend/app/constants/platformOptions.js
new file mode 100644
index 000000000..46747ea2e
--- /dev/null
+++ b/frontend/app/constants/platformOptions.js
@@ -0,0 +1,5 @@
+export default [
+ { value: 'desktop', text: 'Desktop' },
+ { value: 'mobile', text: 'Mobile' },
+ { value: 'tablet', text: 'Tablet' },
+]
\ No newline at end of file
diff --git a/frontend/app/duck/alerts.js b/frontend/app/duck/alerts.js
index b93ff5878..3783474d4 100644
--- a/frontend/app/duck/alerts.js
+++ b/frontend/app/duck/alerts.js
@@ -1,9 +1,38 @@
import Alert from 'Types/alert';
+import { Map } from 'immutable';
import crudDuckGenerator from './tools/crudDuck';
+import withRequestState, { RequestTypes } from 'Duck/requestStateCreator';
+import { reduceDucks } from 'Duck/tools';
+const name = 'alert'
const idKey = 'alertId';
-const crudDuck = crudDuckGenerator('alert', Alert, { idKey: idKey });
+const crudDuck = crudDuckGenerator(name, Alert, { idKey: idKey });
export const { fetchList, init, edit, remove } = crudDuck.actions;
+const FETCH_TRIGGER_OPTIONS = new RequestTypes(`${name}/FETCH_TRIGGER_OPTIONS`);
+
+const initialState = Map({
+ definedPercent: 0,
+ triggerOptions: [],
+});
+
+const reducer = (state = initialState, action = {}) => {
+ switch (action.type) {
+ // case GENERATE_LINK.SUCCESS:
+ // return state.update(
+ // 'list',
+ // list => list
+ // .map(member => {
+ // if(member.id === action.id) {
+ // return Member({...member.toJS(), invitationLink: action.data.invitationLink })
+ // }
+ // return member
+ // })
+ // );
+ case FETCH_TRIGGER_OPTIONS.SUCCESS:
+ return state.set('triggerOptions', action.data.map(({ name, value }) => ({ text: name, value })));
+ }
+ return state;
+};
export function save(instance) {
return {
@@ -12,4 +41,12 @@ export function save(instance) {
};
}
-export default crudDuck.reducer;
+export function fetchTriggerOptions() {
+ return {
+ types: FETCH_TRIGGER_OPTIONS.toArray(),
+ call: client => client.get('/alerts/triggers'),
+ };
+}
+
+// export default crudDuck.reducer;
+export default reduceDucks(crudDuck, { initialState, reducer }).reducer;
diff --git a/frontend/app/duck/customField.js b/frontend/app/duck/customField.js
index c9b9607da..ad93c21c6 100644
--- a/frontend/app/duck/customField.js
+++ b/frontend/app/duck/customField.js
@@ -4,6 +4,9 @@ import { fetchListType, saveType, editType, initType, removeType } from './funcT
import { createItemInListUpdater, mergeReducers, success, array } from './funcTools/tools';
import { createEdit, createInit } from './funcTools/crud';
import { createRequestReducer } from './funcTools/request';
+import { addElementToFiltersMap, addElementToLiveFiltersMap } from 'Types/filter/newFilter';
+import { FilterCategory } from '../types/filter/filterType';
+import { refreshFilterOptions } from './search'
const name = "integration/variable";
const idKey = 'index';
@@ -25,7 +28,7 @@ const FETCH_SOURCES_SUCCESS = success(FETCH_SOURCES);
// const defaultMeta = [{key: 'user_id', index: 0}, {key: 'user_anonymous_id', index: 0}];
const initialState = Map({
- list: List([{key: 'user_id'}, {key: 'user_anonymous_id'}]),
+ list: List(),
instance: CustomField(),
sources: List(),
});
@@ -33,6 +36,10 @@ const initialState = Map({
const reducer = (state = initialState, action = {}) => {
switch(action.type) {
case FETCH_SUCCESS:
+ action.data.forEach(item => {
+ addElementToFiltersMap(FilterCategory.METADATA, item.key);
+ addElementToLiveFiltersMap(FilterCategory.METADATA, item.key);
+ });
return state.set('list', List(action.data).map(CustomField)) //.concat(defaultMeta))
case FETCH_SOURCES_SUCCESS:
return state.set('sources', List(action.data.map(({ value, ...item}) => ({label: value, key: value, ...item}))).map(CustomField))
@@ -53,13 +60,14 @@ const reducer = (state = initialState, action = {}) => {
export const edit = createEdit(name);
export const init = createInit(name);
-export const fetchList = (siteId) => {
- return {
+export const fetchList = (siteId) => (dispatch, getState) => {
+ return dispatch({
types: array(FETCH_LIST),
call: client => client.get(siteId ? `/${siteId}/metadata` : '/metadata'),
- }
+ }).then(() => {
+ dispatch(refreshFilterOptions());
+ });
}
-
export const fetchSources = () => {
return {
types: array(FETCH_SOURCES),
diff --git a/frontend/app/duck/customMetrics.js b/frontend/app/duck/customMetrics.js
new file mode 100644
index 000000000..f27429130
--- /dev/null
+++ b/frontend/app/duck/customMetrics.js
@@ -0,0 +1,242 @@
+import { List, Map } from 'immutable';
+import CustomMetric, { FilterSeries } from 'Types/customMetric'
+import { createFetch, fetchListType, fetchType, saveType, removeType, editType, createRemove, createEdit } from './funcTools/crud';
+import { createRequestReducer, ROOT_KEY } from './funcTools/request';
+import { array, request, success, failure, createListUpdater, mergeReducers } from './funcTools/tools';
+import Filter from 'Types/filter';
+import Session from 'Types/session';
+
+const name = "custom_metric";
+const idKey = "metricId";
+
+const FETCH_LIST = fetchListType(name);
+const FETCH_SESSION_LIST = fetchListType(`${name}/FETCH_SESSION_LIST`);
+const FETCH = fetchType(name);
+const SAVE = saveType(name);
+
+const ADD_SERIES = `${name}/ADD_SERIES`;
+const REMOVE_SERIES = `${name}/REMOVE_SERIES`;
+
+const ADD_SERIES_FILTER_FILTER = `${name}/ADD_SERIES_FILTER_FILTER`;
+const REMOVE_SERIES_FILTER_FILTER = `${name}/REMOVE_SERIES_FILTER_FILTER`;
+
+const EDIT_SERIES_FILTER = `${name}/EDIT_SERIES_FILTER`;
+const EDIT_SERIES_FILTER_FILTER = `${name}/EDIT_SERIES_FILTER_FILTER`;
+const UPDATE_ACTIVE_STATE = saveType(`${name}/UPDATE_ACTIVE_STATE`);
+const EDIT = editType(name);
+const INIT = `${name}/INIT`;
+const SET_ACTIVE_WIDGET = `${name}/SET_ACTIVE_WIDGET`;
+const REMOVE = removeType(name);
+const UPDATE_SERIES = `${name}/UPDATE_SERIES`;
+const SET_ALERT_METRIC_ID = `${name}/SET_ALERT_METRIC_ID`;
+
+function chartWrapper(chart = []) {
+ return chart.map(point => ({ ...point, count: Math.max(point.count, 0) }));
+}
+
+const updateItemInList = createListUpdater(idKey);
+const updateInstance = (state, instance) => state.getIn([ "instance", idKey ]) === instance[ idKey ]
+ ? state.mergeIn([ "instance" ], instance)
+ : state;
+
+const defaultInstance = CustomMetric({
+ name: 'New',
+ series: List([
+ {
+ name: 'Series 1',
+ filter: new Filter({ filters: List(), eventsOrder: 'then' }),
+ },
+ ])
+})
+
+const initialState = Map({
+ list: List(),
+ sessionList: List(),
+ alertMetricId: null,
+ instance: null,
+ activeWidget: null,
+});
+
+// Metric - Series - [] - filters
+function reducer(state = initialState, action = {}) {
+ switch (action.type) {
+ // Custom Metric
+ case INIT:
+ return state.set('instance', action.instance);
+ case EDIT:
+ return state.mergeIn([ 'instance' ], action.instance);
+ case ADD_SERIES:
+ const series = new FilterSeries(action.series);
+ return state.updateIn([ 'instance', 'series' ], list => list.push(series));
+ case REMOVE_SERIES:
+ return state.updateIn([ 'instance', 'series' ], list => list.delete(action.index));
+ case UPDATE_SERIES:
+ return state.mergeIn(['instance', 'series', action.index], action.series);
+
+ // Custom Metric - Series - Filters
+ case EDIT_SERIES_FILTER:
+ return state.mergeIn(['instance', 'series', action.seriesIndex, 'filter'], action.filter);
+
+ // Custom Metric - Series - Filter - Filters
+ case EDIT_SERIES_FILTER_FILTER:
+ return state.updateIn([ 'instance', 'series', action.seriesIndex, 'filter', 'filters' ], filters => filters.set(action.filterIndex, action.filter));
+ case ADD_SERIES_FILTER_FILTER:
+ return state.updateIn([ 'instance', 'series', action.seriesIndex, 'filter', 'filters' ], filters => filters.push(action.filter));
+ case REMOVE_SERIES_FILTER_FILTER:
+ return state.updateIn([ 'instance', 'series', action.seriesIndex, 'filter', 'filters' ], filters => filters.delete(action.index));
+
+
+
+ case success(SAVE):
+ return updateItemInList(updateInstance(state, action.data), action.data);
+ case success(REMOVE):
+ return state.update('list', list => list.filter(item => item.metricId !== action.id));
+ case success(FETCH):
+ return state.set("instance", CustomMetric(action.data));
+ case success(FETCH_LIST):
+ const { data } = action;
+ return state.set("list", List(data.map(CustomMetric)));
+ case success(FETCH_SESSION_LIST):
+ return state.set("sessionList", List(action.data.map(item => ({ ...item, sessions: item.sessions.map(Session) }))));
+ case SET_ACTIVE_WIDGET:
+ return state.set("activeWidget", action.widget).set('sessionList', List());
+ }
+ return state;
+}
+
+export default mergeReducers(
+ reducer,
+ createRequestReducer({
+ [ ROOT_KEY ]: FETCH_LIST,
+ fetch: FETCH,
+ save: SAVE,
+ fetchSessionList: FETCH_SESSION_LIST,
+ }),
+);
+
+export const edit = createEdit(name);
+export const remove = createRemove(name);
+
+export const updateSeries = (index, series) => ({
+ type: UPDATE_SERIES,
+ index,
+ series,
+});
+
+export function fetch(id) {
+ return {
+ id,
+ types: array(FETCH),
+ call: c => c.get(`/errors/${id}`),
+ }
+}
+
+export const save = (instance) => (dispatch, getState) => {
+ return dispatch({
+ types: SAVE.array,
+ call: client => client.post( `/${ instance.exists() ? name + 's/' + instance[idKey] : name + 's'}`, instance.toSaveData()),
+ }).then(() => {
+ dispatch(fetchList());
+ });
+};
+
+export function fetchList() {
+ return {
+ types: array(FETCH_LIST),
+ call: client => client.get(`/${name}s`),
+ };
+}
+
+export function setAlertMetricId(id) {
+ return {
+ type: SET_ALERT_METRIC_ID,
+ id,
+ };
+}
+
+export const addSeries = (series = null) => (dispatch, getState) => {
+ const instance = getState().getIn([ 'customMetrics', 'instance' ]);
+ const seriesIndex = instance.series.size;
+ const newSeries = series || {
+ name: `Series ${seriesIndex + 1}`,
+ filter: new Filter({ filters: [], eventsOrder: 'then' }),
+ };
+
+ dispatch({
+ type: ADD_SERIES,
+ series: newSeries,
+ });
+}
+
+export const removeSeries = (index) => (dispatch, getState) => {
+ dispatch({
+ type: REMOVE_SERIES,
+ index,
+ });
+}
+
+export const init = (instance = null, forceNull = false) => (dispatch, getState) => {
+ dispatch({
+ type: INIT,
+ instance: forceNull ? null : (instance || defaultInstance),
+ });
+}
+
+
+
+export const fetchSessionList = (params) => (dispatch, getState) => {
+ dispatch({
+ types: array(FETCH_SESSION_LIST),
+ call: client => client.post(`/custom_metrics/sessions`, { ...params }),
+ });
+}
+
+export const setActiveWidget = (widget) => (dispatch, getState) => {
+ return dispatch({
+ type: SET_ACTIVE_WIDGET,
+ widget,
+ });
+}
+
+export const updateActiveState = (metricId, state) => (dispatch, getState) => {
+ return dispatch({
+ types: UPDATE_ACTIVE_STATE.array,
+ call: client => client.post(`/custom_metrics/${metricId}/status`, { active: state }),
+ metricId
+ }).then(() => {
+ dispatch(fetchList());
+ });
+}
+
+export const editSeriesFilter = (seriesIndex, filter) => (dispatch, getState) => {
+ return dispatch({
+ type: EDIT_SERIES_FILTER,
+ seriesIndex,
+ filter,
+ });
+}
+
+export const addSeriesFilterFilter = (seriesIndex, filter) => (dispatch, getState) => {
+ return dispatch({
+ type: ADD_SERIES_FILTER_FILTER,
+ seriesIndex,
+ filter,
+ });
+}
+
+export const removeSeriesFilterFilter = (seriesIndex, filterIndex) => (dispatch, getState) => {
+ return dispatch({
+ type: REMOVE_SERIES_FILTER_FILTER,
+ seriesIndex,
+ index: filterIndex,
+ });
+}
+
+export const editSeriesFilterFilter = (seriesIndex, filterIndex, filter) => (dispatch, getState) => {
+ return dispatch({
+ type: EDIT_SERIES_FILTER_FILTER,
+ seriesIndex,
+ filterIndex,
+ filter,
+ });
+}
\ No newline at end of file
diff --git a/frontend/app/duck/filters.js b/frontend/app/duck/filters.js
index a06a0b6bc..132996797 100644
--- a/frontend/app/duck/filters.js
+++ b/frontend/app/duck/filters.js
@@ -7,8 +7,17 @@ import CustomFilter, { KEYS } from 'Types/filter/customFilter';
import withRequestState, { RequestTypes } from './requestStateCreator';
import { fetchList as fetchSessionList } from './sessions';
import { fetchList as fetchErrorsList } from './errors';
+import { fetchListType, fetchType, saveType, editType, initType, removeType } from './funcTools/crud/types';
import logger from 'App/logger';
+import { newFiltersList } from 'Types/filter'
+import NewFilter, { filtersMap } from 'Types/filter/newFilter';
+
+
+// for (var i = 0; i < newFiltersList.length; i++) {
+// filterOptions[newFiltersList[i].category] = newFiltersList.filter(filter => filter.category === newFiltersList[i].category)
+// }
+
const ERRORS_ROUTE = errorsRoute();
const FETCH_LIST = new RequestTypes('filters/FETCH_LIST');
@@ -16,6 +25,7 @@ const FETCH_FILTER_OPTIONS = new RequestTypes('filters/FETCH_FILTER_OPTIONS');
const SET_FILTER_OPTIONS = 'filters/SET_FILTER_OPTIONS';
const SAVE = new RequestTypes('filters/SAVE');
const REMOVE = new RequestTypes('filters/REMOVE');
+const EDIT = editType('funnel/EDIT');
const SET_SEARCH_QUERY = 'filters/SET_SEARCH_QUERY';
const SET_ACTIVE = 'filters/SET_ACTIVE';
@@ -35,7 +45,12 @@ const EDIT_ATTRIBUTE = 'filters/EDIT_ATTRIBUTE';
const REMOVE_ATTRIBUTE = 'filters/REMOVE_ATTRIBUTE';
const SET_ACTIVE_FLOW = 'filters/SET_ACTIVE_FLOW';
+const UPDATE_VALUE = 'filters/UPDATE_VALUE';
+
+const REFRESH_FILTER_OPTIONS = 'filters/REFRESH_FILTER_OPTIONS';
+
const initialState = Map({
+ instance: Filter(),
activeFilter: null,
list: List(),
appliedFilter: Filter(),
@@ -71,6 +86,8 @@ const updateList = (state, instance) => state.update('list', (list) => {
const reducer = (state = initialState, action = {}) => {
let optionsMap = null;
switch (action.type) {
+ case EDIT:
+ return state.mergeIn([ 'appliedFilter' ], action.instance);
case FETCH_FILTER_OPTIONS.SUCCESS:
optionsMap = state.getIn(['filterOptions', action.key]).map(i => i.value).toJS();
return state.mergeIn(['filterOptions', action.key], Set(action.data.filter(i => !optionsMap.includes(i.value))));
@@ -177,6 +194,8 @@ const reducer = (state = initialState, action = {}) => {
return state.removeIn([ 'appliedFilter', 'filters', action.index ]);
case SET_SEARCH_QUERY:
return state.set('searchQuery', action.query);
+ case UPDATE_VALUE:
+ return state.setIn([ 'appliedFilter', action.filterType, action.index, 'value' ], action.value);
default:
return state;
}
@@ -234,7 +253,7 @@ export function removeAttribute(index) {
export function fetchList(range) {
return {
types: FETCH_LIST.toArray(),
- call: client => client.get(`/flows${range ? '?range_value=' + range : ''}`),
+ call: client => client.get(`/saved_search`),
};
}
@@ -257,7 +276,7 @@ export function setFilterOption(key, filterOption) {
export function save(instance) {
return {
types: SAVE.toArray(),
- call: client => client.post('/filters', instance.toData()),
+ call: client => client.post('/saved_search', instance.toData()),
instance,
};
}
@@ -367,4 +386,21 @@ export function setSearchQuery(query) {
type: SET_SEARCH_QUERY,
query
}
+}
+
+export const edit = instance => {
+ return {
+ type: EDIT,
+ instance,
+ }
+};
+
+// filterType: 'events' or 'filters'
+export const updateValue = (filterType, index, value) => {
+ return {
+ type: UPDATE_VALUE,
+ filterType,
+ index,
+ value
+ }
}
\ No newline at end of file
diff --git a/frontend/app/duck/funnels.js b/frontend/app/duck/funnels.js
index 7dad8550a..a30916777 100644
--- a/frontend/app/duck/funnels.js
+++ b/frontend/app/duck/funnels.js
@@ -7,6 +7,7 @@ import { createItemInListUpdater, mergeReducers, success, array } from './funcTo
import { createRequestReducer } from './funcTools/request';
import { getDateRangeFromValue } from 'App/dateRange';
import { LAST_7_DAYS } from 'Types/app/period';
+import { filterMap as searchFilterMap } from './search';
const name = 'funnel';
const idKey = 'funnelId';
@@ -117,7 +118,7 @@ const reducer = (state = initialState, action = {}) => {
.set('issueTypesMap', tmpMap);
case FETCH_INSIGHTS_SUCCESS:
let stages = [];
- if (action.isRefresh) {
+ if (action.isRefresh) {
const activeStages = state.get('activeStages');
const oldInsights = state.get('insights');
const lastStage = action.data.stages[action.data.stages.length - 1]
@@ -265,15 +266,20 @@ export const fetchIssueTypes = () => {
}
}
-export const save = (instance) => {
- const url = instance.exists()
- ? `/funnels/${ instance[idKey] }`
+export const save = (instance) => (dispatch, getState) => {
+// export const save = (instance) => {
+ const filter = getState().getIn([ 'search', 'instance']).toData();
+ filter.filters = filter.filters.map(searchFilterMap);
+
+ const _instance = instance instanceof Funnel ? instance : Funnel(instance);
+ const url = _instance.exists()
+ ? `/funnels/${ _instance[idKey] }`
: `/funnels`;
- return {
- types: array(instance.exists() ? SAVE : UPDATE),
- call: client => client.post(url, instance.toData()),
- }
+ return dispatch({
+ types: array(_instance.exists() ? SAVE : UPDATE),
+ call: client => client.post(url, { ..._instance.toData(), filter }),
+ });
}
export const updateFunnelFilters = (funnelId, filter) => {
diff --git a/frontend/app/duck/index.js b/frontend/app/duck/index.js
index c8d7a7c65..5ad487c93 100644
--- a/frontend/app/duck/index.js
+++ b/frontend/app/duck/index.js
@@ -34,6 +34,9 @@ import errors from './errors';
import funnels from './funnels';
import config from './config';
import roles from './roles';
+import customMetrics from './customMetrics';
+import search from './search';
+import liveSearch from './liveSearch';
export default combineReducers({
jwt,
@@ -68,6 +71,9 @@ export default combineReducers({
funnels,
config,
roles,
+ customMetrics,
+ search,
+ liveSearch,
...integrations,
...sources,
});
diff --git a/frontend/app/duck/liveSearch.js b/frontend/app/duck/liveSearch.js
new file mode 100644
index 000000000..38f90d35b
--- /dev/null
+++ b/frontend/app/duck/liveSearch.js
@@ -0,0 +1,103 @@
+import { List, Map } from 'immutable';
+import { fetchType, editType } from './funcTools/crud';
+import { createRequestReducer } from './funcTools/request';
+import { mergeReducers } from './funcTools/tools';
+import Filter from 'Types/filter';
+import SavedFilter from 'Types/filter/savedFilter';
+import { fetchList as fetchSessionList } from './sessions';
+import { liveFiltersMap } from 'Types/filter/newFilter';
+import { filterMap, checkFilterValue, hasFilterApplied } from './search';
+import { FilterKey } from '../types/filter/filterType';
+
+const name = "liveSearch";
+const idKey = "searchId";
+
+const FETCH = fetchType(name);
+const EDIT = editType(name);
+const CLEAR_SEARCH = `${name}/CLEAR_SEARCH`;
+const APPLY = `${name}/APPLY`;
+const UPDATE_CURRENT_PAGE = `${name}/UPDATE_CURRENT_PAGE`;
+
+const initialState = Map({
+ list: List(),
+ instance: new Filter({ filters: [] }),
+ filterSearchList: {},
+ currentPage: 1,
+});
+
+
+function reducer(state = initialState, action = {}) {
+ switch (action.type) {
+ case EDIT:
+ return state.mergeIn(['instance'], action.instance);
+ case UPDATE_CURRENT_PAGE:
+ return state.set('currentPage', action.page);
+ }
+ return state;
+}
+
+export default mergeReducers(
+ reducer,
+ createRequestReducer({
+ fetch: FETCH,
+ }),
+);
+
+const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getState) => {
+ dispatch(actionCreator(...args));
+ const filter = getState().getIn([ 'search', 'instance']).toData();
+ filter.filters = filter.filters.map(filterMap);
+
+ return dispatch(fetchSessionList(filter));
+};
+
+export const edit = reduceThenFetchResource((instance) => ({
+ type: EDIT,
+ instance,
+}));
+
+export const applyFilter = reduceThenFetchResource((filter, fromUrl=false) => ({
+ type: APPLY,
+ filter,
+ fromUrl,
+}));
+
+export const fetchSessions = (filter) => (dispatch, getState) => {
+ const _filter = filter ? filter : getState().getIn([ 'search', 'instance']);
+ return dispatch(applyFilter(_filter));
+};
+
+export const clearSearch = () => (dispatch, getState) => {
+ dispatch(edit(new Filter({ filters: [] })));
+ return dispatch({
+ type: CLEAR_SEARCH,
+ });
+}
+
+export const addFilter = (filter) => (dispatch, getState) => {
+ filter.value = checkFilterValue(filter.value);
+ const instance = getState().getIn([ 'liveSearch', 'instance']);
+
+ if (hasFilterApplied(instance.filters, filter)) {
+ // const index = instance.filters.findIndex(f => f.key === filter.key);
+ // const oldFilter = instance.filters.get(index);
+ // oldFilter.value = oldFilter.value.concat(filter.value);
+ // return dispatch(edit(instance.setIn(['filters', index], oldFilter)));
+ } else {
+ const filters = instance.filters.push(filter);
+ return dispatch(edit(instance.set('filters', filters)));
+ }
+}
+
+export const addFilterByKeyAndValue = (key, value) => (dispatch, getState) => {
+ let defaultFilter = liveFiltersMap[key];
+ defaultFilter.value = value;
+ dispatch(addFilter(defaultFilter));
+}
+
+export function updateCurrentPage(page) {
+ return {
+ type: UPDATE_CURRENT_PAGE,
+ page,
+ };
+}
\ No newline at end of file
diff --git a/frontend/app/duck/search.js b/frontend/app/duck/search.js
new file mode 100644
index 000000000..6cec66a27
--- /dev/null
+++ b/frontend/app/duck/search.js
@@ -0,0 +1,250 @@
+import { List, Map } from 'immutable';
+import { fetchListType, fetchType, saveType, removeType, editType, createRemove } from './funcTools/crud';
+import { createRequestReducer, ROOT_KEY } from './funcTools/request';
+import { array, request, success, failure, createListUpdater, mergeReducers } from './funcTools/tools';
+import Filter from 'Types/filter';
+import SavedFilter from 'Types/filter/savedFilter';
+import { errors as errorsRoute, isRoute } from "App/routes";
+import { fetchList as fetchSessionList } from './sessions';
+import { fetchList as fetchErrorsList } from './errors';
+import { FilterCategory, FilterKey } from '../types/filter/filterType';
+import { filtersMap, liveFiltersMap, generateFilterOptions, generateLiveFilterOptions } from 'Types/filter/newFilter';
+
+const ERRORS_ROUTE = errorsRoute();
+
+const name = "search";
+const idKey = "searchId";
+
+const FETCH_LIST = fetchListType(name);
+const FETCH_FILTER_SEARCH = fetchListType(`${name}/FILTER_SEARCH`);
+const FETCH = fetchType(name);
+const SAVE = saveType(name);
+const EDIT = editType(name);
+const EDIT_SAVED_SEARCH = editType(`${name}/SAVED_SEARCH`);
+const REMOVE = removeType(name);
+const ADD_FILTER = `${name}/ADD_FILTER`;
+const APPLY_SAVED_SEARCH = `${name}/APPLY_SAVED_SEARCH`;
+const CLEAR_SEARCH = `${name}/CLEAR_SEARCH`;
+const UPDATE = `${name}/UPDATE`;
+const APPLY = `${name}/APPLY`;
+const SET_ALERT_METRIC_ID = `${name}/SET_ALERT_METRIC_ID`;
+
+const REFRESH_FILTER_OPTIONS = 'filters/REFRESH_FILTER_OPTIONS';
+
+function chartWrapper(chart = []) {
+ return chart.map(point => ({ ...point, count: Math.max(point.count, 0) }));
+}
+
+const savedSearchIdKey = 'searchId'
+const updateItemInList = createListUpdater(savedSearchIdKey);
+const updateInstance = (state, instance) => state.getIn([ "savedSearch", savedSearchIdKey ]) === instance[savedSearchIdKey]
+ ? state.mergeIn([ "savedSearch" ], instance)
+ : state;
+
+const initialState = Map({
+ filterList: generateFilterOptions(filtersMap),
+ filterListLive: generateLiveFilterOptions(liveFiltersMap),
+ list: List(),
+ alertMetricId: null,
+ instance: new Filter({ filters: [] }),
+ savedSearch: new SavedFilter({}),
+ filterSearchList: {},
+});
+
+// Metric - Series - [] - filters
+function reducer(state = initialState, action = {}) {
+ switch (action.type) {
+ case REFRESH_FILTER_OPTIONS:
+ return state.set('filterList', generateFilterOptions(filtersMap))
+ .set('filterListLive', generateLiveFilterOptions(liveFiltersMap));
+ case EDIT:
+ return state.mergeIn(['instance'], action.instance);
+ case APPLY:
+ return action.fromUrl
+ ? state.set('instance', Filter(action.filter))
+ : state.mergeIn(['instance'], action.filter);
+ case success(SAVE):
+ return updateItemInList(updateInstance(state, action.data), action.data);
+ case success(REMOVE):
+ return state.update('list', list => list.filter(item => item.searchId !== action.id));
+ case success(FETCH):
+ return state.set("instance", action.data);
+ case success(FETCH_LIST):
+ const { data } = action;
+ return state.set("list", List(data.map(SavedFilter)));
+ case success(FETCH_FILTER_SEARCH):
+ const groupedList = action.data.reduce((acc, item) => {
+ const { projectId, type, value } = item;
+ const key = type;
+ if (!acc[key]) {
+ acc[key] = [];
+ }
+ acc[key].push({ projectId, value });
+ return acc;
+ }, {});
+ return state.set('filterSearchList', groupedList);
+ case APPLY_SAVED_SEARCH:
+ return state.set('savedSearch', action.filter);
+ case EDIT_SAVED_SEARCH:
+ return state.mergeIn([ 'savedSearch' ], action.instance);
+ }
+ return state;
+}
+
+export default mergeReducers(
+ reducer,
+ createRequestReducer({
+ [ ROOT_KEY ]: FETCH_LIST,
+ fetch: FETCH,
+ fetchFilterSearch: FETCH_FILTER_SEARCH
+ }),
+);
+
+const checkValues = (key, value) => {
+ if (key === FilterKey.DURATION) {
+ return value[0] === '' || value[0] === null ? [0, value[1]] : value;
+ }
+ return value.filter(i => i !== '' && i !== null);
+}
+
+export const checkFilterValue = (value) => {
+ return Array.isArray(value) ? (value.length === 0 ? [""] : value) : [value];
+}
+
+export const filterMap = ({category, value, key, operator, sourceOperator, source, custom, isEvent }) => ({
+ value: checkValues(key, value),
+ custom,
+ type: category === FilterCategory.METADATA ? FilterKey.METADATA : key,
+ operator,
+ source: category === FilterCategory.METADATA ? key : source,
+ sourceOperator,
+ isEvent
+});
+
+const reduceThenFetchResource = actionCreator => (...args) => (dispatch, getState) => {
+ dispatch(actionCreator(...args));
+ const filter = getState().getIn([ 'search', 'instance']).toData();
+ filter.filters = filter.filters.map(filterMap);
+
+ return isRoute(ERRORS_ROUTE, window.location.pathname)
+ ? dispatch(fetchErrorsList(filter))
+ : dispatch(fetchSessionList(filter));
+};
+
+export const edit = reduceThenFetchResource((instance) => ({
+ type: EDIT,
+ instance,
+}));
+
+export const remove = createRemove(name, (id) => `/saved_search/${id}`);
+
+export const applyFilter = reduceThenFetchResource((filter, fromUrl=false) => ({
+ type: APPLY,
+ filter,
+ fromUrl,
+}));
+
+export const applySavedSearch = (filter) => (dispatch, getState) => {
+ dispatch(edit({ filters: filter ? filter.filter.filters : [] }));
+ return dispatch({
+ type: APPLY_SAVED_SEARCH,
+ filter,
+ })
+};
+
+export const fetchSessions = (filter) => (dispatch, getState) => {
+ const _filter = filter ? filter : getState().getIn([ 'search', 'instance']);
+ return dispatch(applyFilter(_filter));
+};
+
+export const updateSeries = (index, series) => ({
+ type: UPDATE,
+ index,
+ series,
+});
+
+export function fetch(id) {
+ return {
+ id,
+ types: array(FETCH),
+ call: c => c.get(`/errors/${id}`),
+ }
+}
+
+export const save = (id) => (dispatch, getState) => {
+// export function save(id) {
+ const filter = getState().getIn([ 'search', 'instance']).toData();
+ filter.filters = filter.filters.map(filterMap);
+
+ const instance = getState().getIn([ 'search', 'savedSearch']).toData();
+ // instance = instance instanceof SavedFilter ? instance : new SavedFilter(instance);
+ return dispatch({
+ types: SAVE.array,
+ call: client => client.post(!id ? '/saved_search' : `/saved_search/${id}`, { ...instance, filter })
+ });
+}
+
+export function fetchList() {
+ return {
+ types: array(FETCH_LIST),
+ call: client => client.get(`/saved_search`),
+ };
+}
+
+export function setAlertMetricId(id) {
+ return {
+ type: SET_ALERT_METRIC_ID,
+ id,
+ };
+}
+
+export function fetchFilterSearch(params) {
+ return {
+ types: FETCH_FILTER_SEARCH.array,
+ call: client => client.get('/events/search', params),
+ params,
+ };
+}
+
+export const clearSearch = () => (dispatch, getState) => {
+ dispatch(applySavedSearch(new SavedFilter({})));
+ dispatch(edit(new Filter({ filters: [] })));
+ return dispatch({
+ type: CLEAR_SEARCH,
+ });
+}
+
+export const hasFilterApplied = (filters, filter) => {
+ return !filter.isEvent && filters.some(f => f.key === filter.key);
+}
+
+export const addFilter = (filter) => (dispatch, getState) => {
+ filter.value = checkFilterValue(filter.value);
+ const instance = getState().getIn([ 'search', 'instance']);
+
+ if (hasFilterApplied(instance.filters, filter)) {
+
+ } else {
+ const filters = instance.filters.push(filter);
+ return dispatch(edit(instance.set('filters', filters)));
+ }
+}
+
+export const addFilterByKeyAndValue = (key, value) => (dispatch, getState) => {
+ let defaultFilter = filtersMap[key];
+ defaultFilter.value = value;
+ dispatch(addFilter(defaultFilter));
+}
+
+export const editSavedSearch = instance => {
+ return {
+ type: EDIT_SAVED_SEARCH,
+ instance,
+ }
+};
+
+export const refreshFilterOptions = () => {
+ return {
+ type: REFRESH_FILTER_OPTIONS
+ }
+}
\ No newline at end of file
diff --git a/frontend/app/duck/sessions.js b/frontend/app/duck/sessions.js
index 977af85bb..2ab1e5a5a 100644
--- a/frontend/app/duck/sessions.js
+++ b/frontend/app/duck/sessions.js
@@ -74,7 +74,6 @@ const reducer = (state = initialState, action = {}) => {
case FETCH_ERROR_STACK.SUCCESS:
return state.set('errorStack', List(action.data.trace).map(ErrorStack)).set('sourcemapUploaded', action.data.sourcemapUploaded)
case FETCH_LIVE_LIST.SUCCESS:
- // const { sessions, total } = action.data;
const liveList = List(action.data).map(s => new Session({...s, live: true}));
return state
.set('liveSessions', liveList)
@@ -284,6 +283,13 @@ export const fetchList = (params = {}, clear = false, live = false) => (dispatch
})
}
+// export const fetchLiveList = (id) => (dispatch, getState) => {
+// return dispatch({
+// types: FETCH_LIVE_LIST.toArray(),
+// call: client => client.get('/assist/sessions'),
+// })
+// }
+
export function fetchErrorStackList(sessionId, errorId) {
return {
types: FETCH_ERROR_STACK.toArray(),
diff --git a/frontend/app/duck/user.js b/frontend/app/duck/user.js
index 13ff44af5..857fcd9e5 100644
--- a/frontend/app/duck/user.js
+++ b/frontend/app/duck/user.js
@@ -178,7 +178,7 @@ export function resendEmailVerification(email) {
export function updateAppearance(appearance) {
return {
types: UPDATE_APPEARANCE.toArray(),
- call: client => client.post('/account', {
+ call: client => client.post('/account/appearance', {
appearance: Record.isRecord(appearance) ? appearance.toData() : appearance
}),
appearance,
diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.ts b/frontend/app/player/MessageDistributor/MessageDistributor.ts
index 69e4b4836..ed3c49cce 100644
--- a/frontend/app/player/MessageDistributor/MessageDistributor.ts
+++ b/frontend/app/player/MessageDistributor/MessageDistributor.ts
@@ -181,7 +181,6 @@ export default class MessageDistributor extends StatedScreen {
while (r.hasNext()) {
const next = r.next();
if (next != null) {
- this.lastMessageTime = next[0].time;
this.distributeMessage(next[0], next[1]);
msgs.push(next[0]);
}
@@ -326,6 +325,8 @@ export default class MessageDistributor extends StatedScreen {
/* Binded */
distributeMessage = (msg: Message, index: number): void => {
+ this.lastMessageTime = msg.time;
+
if ([
"mouse_move",
"mouse_click",
diff --git a/frontend/app/player/MessageDistributor/managers/AssistManager.ts b/frontend/app/player/MessageDistributor/managers/AssistManager.ts
index 2b0ac4e63..0b09469a1 100644
--- a/frontend/app/player/MessageDistributor/managers/AssistManager.ts
+++ b/frontend/app/player/MessageDistributor/managers/AssistManager.ts
@@ -1,5 +1,6 @@
+import type { Socket } from 'socket.io-client';
import type Peer from 'peerjs';
-import type { DataConnection, MediaConnection } from 'peerjs';
+import type { MediaConnection } from 'peerjs';
import type MessageDistributor from '../MessageDistributor';
import type { Message } from '../messages'
import store from 'App/store';
@@ -11,11 +12,12 @@ import MStreamReader from '../messages/MStreamReader';;
import JSONRawMessageReader from '../messages/JSONRawMessageReader'
export enum CallingState {
- Reconnecting,
+ NoCall,
+ Connecting,
Requesting,
- True,
- False,
-};
+ Reconnecting,
+ OnCall,
+}
export enum ConnectionStatus {
Connecting,
@@ -24,7 +26,13 @@ export enum ConnectionStatus {
Inactive,
Disconnected,
Error,
-};
+}
+
+export enum RemoteControlStatus {
+ Disabled = 0,
+ Requesting,
+ Enabled,
+}
export function getStatusText(status: ConnectionStatus): string {
@@ -47,13 +55,13 @@ export function getStatusText(status: ConnectionStatus): string {
export interface State {
calling: CallingState,
peerConnectionStatus: ConnectionStatus,
- remoteControl: boolean,
+ remoteControl: RemoteControlStatus,
}
export const INITIAL_STATE: State = {
- calling: CallingState.False,
+ calling: CallingState.NoCall,
peerConnectionStatus: ConnectionStatus.Connecting,
- remoteControl: false,
+ remoteControl: RemoteControlStatus.Disabled,
}
const MAX_RECONNECTION_COUNT = 4;
@@ -85,201 +93,121 @@ export default class AssistManager {
return `${this.session.projectKey}-${this.session.sessionId}`
}
- private peer: Peer | null = null;
- connectionAttempts: number = 0;
- private peeropened: boolean = false;
- connect() {
- if (this.peer != null) {
- console.error("AssistManager: trying to connect more than once");
- return;
+ private onVisChange = () => {
+ let inactiveTimeout: ReturnType | undefined
+ if (document.hidden) {
+ inactiveTimeout = setTimeout(() => {
+ if (document.hidden && getState().calling === CallingState.NoCall) {
+ this.socket?.close()
+ }
+ }, 15000)
+ } else {
+ inactiveTimeout && clearTimeout(inactiveTimeout)
+ this.socket?.open()
}
- this.setStatus(ConnectionStatus.Connecting)
- // @ts-ignore
- const urlObject = new URL(window.ENV.API_EDP)
- import('peerjs').then(({ default: Peer }) => {
- if (this.closed) {return}
- const _config = {
- host: urlObject.hostname,
- path: '/assist',
- port: urlObject.port === "" ? (location.protocol === 'https:' ? 443 : 80 ): parseInt(urlObject.port),
- }
-
- if (this.config) {
- _config['config'] = {
- iceServers: this.config,
- sdpSemantics: 'unified-plan',
- iceTransportPolicy: 'relay',
- };
- }
-
- const peer = new Peer(_config);
- this.peer = peer;
- peer.on('error', e => {
- if (e.type !== 'peer-unavailable') {
- console.warn("AssistManager PeerJS peer error: ", e.type, e)
- }
- if (['peer-unavailable', 'network', 'webrtc'].includes(e.type)) {
- if (this.peer) {
- this.setStatus(this.connectionAttempts++ < MAX_RECONNECTION_COUNT
- ? ConnectionStatus.Connecting
- : ConnectionStatus.Disconnected);
- this.connectToPeer();
- }
- } else {
- console.error(`PeerJS error (on peer). Type ${e.type}`, e);
- this.setStatus(ConnectionStatus.Error)
- }
- })
- peer.on("open", () => {
- if (this.peeropened) { return; }
- this.peeropened = true;
- this.connectToPeer();
- });
- });
}
- private connectToPeer() {
- if (!this.peer) { return; }
- this.setStatus(ConnectionStatus.Connecting);
- const id = this.peerID;
- const conn = this.peer.connect(id, { serialization: "json", reliable: true});
- conn.on('open', () => {
- window.addEventListener("beforeunload", ()=>conn.open &&conn.send("unload"));
+ private socket: Socket | null = null
+ connect() {
+ const jmr = new JSONRawMessageReader()
+ const reader = new MStreamReader(jmr)
+ let waitingForMessages = true
+ let showDisconnectTimeout: ReturnType | undefined
+ import('socket.io-client').then(({ default: io }) => {
+ if (this.cleaned) { return }
+ if (this.socket) { this.socket.close() } // TODO: single socket connection
+ // @ts-ignore
+ const urlObject = new URL(window.ENV.API_EDP) // does it handle ssl automatically?
- //console.log("peer connected")
+ // @ts-ignore WTF, socket.io ???
+ const socket: Socket = this.socket = io(urlObject.origin, {
+ path: '/ws-assist/socket',
+ query: {
+ peerId: this.peerID,
+ identity: "agent",
+ //agentInfo: JSON.stringify({})
+ }
+ })
+ //socket.onAny((...args) => console.log(...args))
+ socket.on("connect", () => {
+ waitingForMessages = true
+ this.setStatus(ConnectionStatus.WaitingMessages)
+ })
+ socket.on("disconnect", () => {
+ this.toggleRemoteControl(false)
+ })
+ socket.on('messages', messages => {
+ showDisconnectTimeout && clearTimeout(showDisconnectTimeout);
+ jmr.append(messages) // as RawMessage[]
-
- if (getState().calling === CallingState.Reconnecting) {
- this._call()
- }
-
- let firstMessage = true;
-
- this.setStatus(ConnectionStatus.WaitingMessages)
-
- const jmr = new JSONRawMessageReader()
- const reader = new MStreamReader(jmr)
-
- conn.on('data', (data) => {
- this.disconnectTimeout && clearTimeout(this.disconnectTimeout);
-
-
- if (Array.isArray(data)) {
- jmr.append(data) // as RawMessage[]
- } else if (data instanceof ArrayBuffer) {
- //rawMessageReader.append(new Uint8Array(data))
- } else { return this.handleCommand(data); }
-
- if (firstMessage) {
- firstMessage = false;
+ if (waitingForMessages) {
+ waitingForMessages = false // TODO: more explicit
this.setStatus(ConnectionStatus.Connected)
+
+ // Call State
+ if (getState().calling === CallingState.Reconnecting) {
+ this._call() // reconnecting call (todo improve code separation)
+ }
}
for (let msg = reader.readNext();msg !== null;msg = reader.readNext()) {
//@ts-ignore
- this.md.distributeMessage(msg, msg._index);
+ this.md.distributeMessage(msg, msg._index)
}
- });
- });
+ })
+ socket.on("control_granted", id => {
+ this.toggleRemoteControl(id === socket.id)
+ })
+ socket.on("control_rejected", id => {
+ id === socket.id && this.toggleRemoteControl(false)
+ })
+ socket.on('SESSION_DISCONNECTED', e => {
+ waitingForMessages = true
+ showDisconnectTimeout = setTimeout(() => {
+ if (this.cleaned) { return }
+ this.setStatus(ConnectionStatus.Disconnected)
+ }, 12000)
+ if (getState().remoteControl === RemoteControlStatus.Requesting) {
+ this.toggleRemoteControl(false)
+ }
- const onDataClose = () => {
- this.onCallDisconnect()
- this.connectToPeer();
- }
+ // Call State
+ if (getState().calling === CallingState.OnCall) {
+ update({ calling: CallingState.Reconnecting })
+ }
+ })
+ socket.on('error', e => {
+ console.warn("Socket error: ", e )
+ this.setStatus(ConnectionStatus.Error);
+ this.toggleRemoteControl(false)
+ })
+ socket.on('call_end', this.onRemoteCallEnd)
+
+ document.addEventListener('visibilitychange', this.onVisChange)
- conn.on('close', onDataClose);// What case does it work ?
- conn.on("error", (e) => {
- this.setStatus(ConnectionStatus.Error);
})
}
-
- private get dataConnection(): DataConnection | undefined {
- return this.peer?.connections[this.peerID]?.find(c => c.type === 'data' && c.open);
- }
- private get callConnection(): MediaConnection | undefined {
- return this.peer?.connections[this.peerID]?.find(c => c.type === 'media' && c.open);
- }
- private send(data: any) {
- this.dataConnection?.send(data);
- }
-
-
- private forceCallEnd() {
- this.callConnection?.close();
- }
- private notifyCallEnd() {
- const dataConn = this.dataConnection;
- if (dataConn) {
- dataConn.send("call_end");
- }
- }
- private initiateCallEnd = () => {
- this.forceCallEnd();
- this.notifyCallEnd();
- this.localCallData && this.localCallData.onCallEnd();
- }
-
- private onTrackerCallEnd = () => {
- console.log('onTrackerCallEnd')
- this.forceCallEnd();
- if (getState().calling === CallingState.Requesting) {
- this.localCallData && this.localCallData.onReject();
- }
- this.localCallData && this.localCallData.onCallEnd();
- }
-
- private onCallDisconnect = () => {
- if (getState().calling === CallingState.True) {
- update({ calling: CallingState.Reconnecting });
- }
- }
-
-
- private disconnectTimeout: ReturnType | undefined;
- private closeDataConnectionTimeout: ReturnType | undefined;
- private handleCommand(command: string) {
- console.log("Data command", command)
- switch (command) {
- case "unload":
- //this.onTrackerCallEnd();
- this.closeDataConnectionTimeout = setTimeout(() => {
- this.onCallDisconnect()
- this.dataConnection?.close();
- }, 1500);
- this.disconnectTimeout = setTimeout(() => {
- this.onTrackerCallEnd();
- this.setStatus(ConnectionStatus.Disconnected);
- }, 15000); // TODO: more convenient way
- return;
- case "call_end":
- this.onTrackerCallEnd();
- return;
- case "call_error":
- this.onTrackerCallEnd();
- this.setStatus(ConnectionStatus.Error);
- return;
- }
- }
+ /* ==== Remote Control ==== */
private onMouseMove = (e: MouseEvent): void => {
- const data = this.md.getInternalCoordinates(e);
- this.send({ x: Math.round(data.x), y: Math.round(data.y) });
+ if (!this.socket) { return }
+ const data = this.md.getInternalCoordinates(e)
+ this.socket.emit("move", [ Math.round(data.x), Math.round(data.y) ])
}
-
private onWheel = (e: WheelEvent): void => {
e.preventDefault()
+ if (!this.socket) { return }
//throttling makes movements less smooth, so it is omitted
//this.onMouseMove(e)
- this.send({ type: "scroll", delta: [ e.deltaX, e.deltaY ]})
+ this.socket.emit("scroll", [ e.deltaX, e.deltaY ])
}
private onMouseClick = (e: MouseEvent): void => {
- const conn = this.dataConnection;
- if (!conn) { return; }
- const data = this.md.getInternalCoordinates(e);
+ if (!this.socket) { return; }
+ const data = this.md.getInternalViewportCoordinates(e);
// const el = this.md.getElementFromPoint(e); // requires requestiong node_id from domManager
const el = this.md.getElementFromInternalPoint(data)
if (el instanceof HTMLElement) {
@@ -287,25 +215,114 @@ export default class AssistManager {
el.oninput = e => e.preventDefault();
el.onkeydown = e => e.preventDefault();
}
- conn.send({ type: "click", x: Math.round(data.x), y: Math.round(data.y) });
+ this.socket.emit("click", [ Math.round(data.x), Math.round(data.y) ]);
}
- private toggleRemoteControl = (flag?: boolean) => {
- const state = getState().remoteControl;
- const newState = typeof flag === 'boolean' ? flag : !state;
- if (state === newState) { return }
+ private toggleRemoteControl(newState: boolean){
if (newState) {
- this.md.overlay.addEventListener("click", this.onMouseClick);
+ this.md.overlay.addEventListener("mousemove", this.onMouseMove)
+ this.md.overlay.addEventListener("click", this.onMouseClick)
this.md.overlay.addEventListener("wheel", this.onWheel)
- update({ remoteControl: true })
+ update({ remoteControl: RemoteControlStatus.Enabled })
} else {
- this.md.overlay.removeEventListener("click", this.onMouseClick);
- this.md.overlay.removeEventListener("wheel", this.onWheel);
- update({ remoteControl: false })
+ this.md.overlay.removeEventListener("mousemove", this.onMouseMove)
+ this.md.overlay.removeEventListener("click", this.onMouseClick)
+ this.md.overlay.removeEventListener("wheel", this.onWheel)
+ update({ remoteControl: RemoteControlStatus.Disabled })
}
}
- private localCallData: {
+ requestReleaseRemoteControl = () => {
+ if (!this.socket) { return }
+ const remoteControl = getState().remoteControl
+ if (remoteControl === RemoteControlStatus.Requesting) { return }
+ if (remoteControl === RemoteControlStatus.Disabled) {
+ update({ remoteControl: RemoteControlStatus.Requesting })
+ this.socket.emit("request_control")
+ // setTimeout(() => {
+ // if (getState().remoteControl !== RemoteControlStatus.Requesting) { return }
+ // this.socket?.emit("release_control")
+ // update({ remoteControl: RemoteControlStatus.Disabled })
+ // }, 8000)
+ } else {
+ this.socket.emit("release_control")
+ this.toggleRemoteControl(false)
+ }
+ }
+
+
+ /* ==== PeerJS Call ==== */
+
+ private _peer: Peer | null = null
+ private connectionAttempts: number = 0
+ private callConnection: MediaConnection | null = null
+ private getPeer(): Promise {
+ if (this._peer && !this._peer.disconnected) { return Promise.resolve(this._peer) }
+
+ // @ts-ignore
+ const urlObject = new URL(window.ENV.API_EDP)
+ return import('peerjs').then(({ default: Peer }) => {
+ if (this.cleaned) {return Promise.reject("Already cleaned")}
+ const peerOpts = {
+ host: urlObject.hostname,
+ path: '/assist',
+ port: urlObject.port === "" ? (location.protocol === 'https:' ? 443 : 80 ): parseInt(urlObject.port),
+ }
+ if (this.config) {
+ peerOpts['config'] = {
+ iceServers: this.config,
+ sdpSemantics: 'unified-plan',
+ iceTransportPolicy: 'relay',
+ };
+ }
+ const peer = this._peer = new Peer(peerOpts)
+ peer.on('error', e => {
+ if (e.type === 'disconnected') {
+ return peer.reconnect()
+ } else if (e.type !== 'peer-unavailable') {
+ console.error(`PeerJS error (on peer). Type ${e.type}`, e);
+ }
+
+ //call-reconnection connected
+ // if (['peer-unavailable', 'network', 'webrtc'].includes(e.type)) {
+ // this.setStatus(this.connectionAttempts++ < MAX_RECONNECTION_COUNT
+ // ? ConnectionStatus.Connecting
+ // : ConnectionStatus.Disconnected);
+ // Reconnect...
+ })
+
+ return new Promise(resolve => {
+ peer.on("open", () => resolve(peer))
+ })
+ });
+
+ }
+
+
+ private handleCallEnd() {
+ this.callArgs && this.callArgs.onCallEnd()
+ this.callConnection && this.callConnection.close()
+ update({ calling: CallingState.NoCall })
+ this.callArgs = null
+ }
+
+ private initiateCallEnd = () => {
+ this.socket?.emit("call_end")
+ this.handleCallEnd()
+ }
+
+ private onRemoteCallEnd = () => {
+ if (getState().calling === CallingState.Requesting) {
+ this.callArgs && this.callArgs.onReject()
+ this.callConnection && this.callConnection.close()
+ update({ calling: CallingState.NoCall })
+ this.callArgs = null
+ } else {
+ this.handleCallEnd()
+ }
+ }
+
+ private callArgs: {
localStream: LocalStream,
onStream: (s: MediaStream)=>void,
onCallEnd: () => void,
@@ -313,79 +330,79 @@ export default class AssistManager {
onError?: ()=> void
} | null = null
- call(localStream: LocalStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void): { end: Function, toggleRemoteControl: Function } {
- this.localCallData = {
+ call(
+ localStream: LocalStream,
+ onStream: (s: MediaStream)=>void,
+ onCallEnd: () => void,
+ onReject: () => void,
+ onError?: ()=> void): { end: Function } {
+ this.callArgs = {
localStream,
onStream,
- onCallEnd: () => {
- onCallEnd();
- this.toggleRemoteControl(false);
- this.md.overlay.removeEventListener("mousemove", this.onMouseMove);
- this.md.overlay.removeEventListener("click", this.onMouseClick);
- update({ calling: CallingState.False });
- this.localCallData = null;
- },
+ onCallEnd,
onReject,
onError,
}
this._call()
return {
end: this.initiateCallEnd,
- toggleRemoteControl: this.toggleRemoteControl,
}
}
private _call() {
- if (!this.peer || !this.localCallData || ![CallingState.False, CallingState.Reconnecting].includes(getState().calling)) { return null; }
-
- update({ calling: CallingState.Requesting });
+ if (![CallingState.NoCall, CallingState.Reconnecting].includes(getState().calling)) { return }
+ update({ calling: CallingState.Connecting })
+ this.getPeer().then(peer => {
+ if (!this.callArgs) { return console.log("No call Args. Must not happen.") }
+ update({ calling: CallingState.Requesting })
- //console.log('calling...', this.localCallData.localStream)
-
- const call = this.peer.call(this.peerID, this.localCallData.localStream.stream);
- this.localCallData.localStream.onVideoTrack(vTrack => {
- const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video")
- if (!sender) {
- //logger.warn("No video sender found")
- return
- }
- //logger.log("sender found:", sender)
- sender.replaceTrack(vTrack)
- })
+ // TODO: in a proper way
+ this.socket && this.socket.emit("_agent_name", store.getState().getIn([ 'user', 'account', 'name']))
+
+ const call = this.callConnection = peer.call(this.peerID, this.callArgs.localStream.stream)
+ this.callArgs.localStream.onVideoTrack(vTrack => {
+ const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video")
+ if (!sender) {
+ console.warn("No video sender found")
+ return
+ }
+ //logger.log("sender found:", sender)
+ sender.replaceTrack(vTrack)
+ })
- call.on('stream', stream => {
- update({ calling: CallingState.True });
- this.localCallData && this.localCallData.onStream(stream);
- this.send({
- name: store.getState().getIn([ 'user', 'account', 'name']),
+ call.on('stream', stream => {
+ update({ calling: CallingState.OnCall })
+ this.callArgs && this.callArgs.onStream(stream)
+ });
+ //call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))
+
+ call.on("close", this.onRemoteCallEnd)
+ call.on("error", (e) => {
+ console.error("PeerJS error (on call):", e)
+ this.initiateCallEnd();
+ this.callArgs && this.callArgs.onError && this.callArgs.onError();
});
- this.md.overlay.addEventListener("mousemove", this.onMouseMove)
- this.md.overlay.addEventListener("click", this.onMouseClick)
- });
- //call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))
-
- call.on("close", this.localCallData.onCallEnd);
- call.on("error", (e) => {
- console.error("PeerJS error (on call):", e)
- this.initiateCallEnd();
- this.localCallData && this.localCallData.onError && this.localCallData.onError();
- });
-
- window.addEventListener("beforeunload", this.initiateCallEnd)
+ })
}
- closed = false
+
+ /* ==== Cleaning ==== */
+ private cleaned: boolean = false
clear() {
- this.closed =true
+ this.cleaned = true // sometimes cleaned before modules loaded
this.initiateCallEnd();
- if (this.peer) {
+ if (this._peer) {
console.log("destroying peer...")
- const peer = this.peer; // otherwise it calls reconnection on data chan close
- this.peer = null;
+ const peer = this._peer; // otherwise it calls reconnection on data chan close
+ this._peer = null;
peer.disconnect();
peer.destroy();
}
+ if (this.socket) {
+ this.socket.close()
+ document.removeEventListener('visibilitychange', this.onVisChange)
+ }
}
}
diff --git a/frontend/app/player/MessageDistributor/managers/AssistManager_old.ts b/frontend/app/player/MessageDistributor/managers/AssistManager_old.ts
deleted file mode 100644
index b901dc076..000000000
--- a/frontend/app/player/MessageDistributor/managers/AssistManager_old.ts
+++ /dev/null
@@ -1,486 +0,0 @@
-// import type Peer from 'peerjs';
-// import type { DataConnection, MediaConnection } from 'peerjs';
-// import type MessageDistributor from '../MessageDistributor';
-// import type { Message } from '../messages'
-// import store from 'App/store';
-// import type { LocalStream } from './LocalStream';
-// import { update, getState } from '../../store';
-// import { iceServerConfigFromString } from 'App/utils'
-
-
-// export enum CallingState {
-// Reconnecting,
-// Requesting,
-// True,
-// False,
-// };
-
-// export enum ConnectionStatus {
-// Connecting,
-// WaitingMessages,
-// Connected,
-// Inactive,
-// Disconnected,
-// Error,
-// };
-
-
-// export function getStatusText(status: ConnectionStatus): string {
-// switch(status) {
-// case ConnectionStatus.Connecting:
-// return "Connecting...";
-// case ConnectionStatus.Connected:
-// return "";
-// case ConnectionStatus.Inactive:
-// return "Client tab is inactive";
-// case ConnectionStatus.Disconnected:
-// return "Disconnected";
-// case ConnectionStatus.Error:
-// return "Something went wrong. Try to reload the page.";
-// case ConnectionStatus.WaitingMessages:
-// return "Connected. Waiting for the data... (The tab might be inactive)"
-// }
-// }
-
-// export interface State {
-// calling: CallingState,
-// peerConnectionStatus: ConnectionStatus,
-// remoteControl: boolean,
-// }
-
-// export const INITIAL_STATE: State = {
-// calling: CallingState.False,
-// peerConnectionStatus: ConnectionStatus.Connecting,
-// remoteControl: false,
-// }
-
-// const MAX_RECONNECTION_COUNT = 4;
-
-
-// function resolveURL(baseURL: string, relURL: string): string {
-// if (relURL.startsWith('#') || relURL === "") {
-// return relURL;
-// }
-// return new URL(relURL, baseURL).toString();
-// }
-
-
-// var match = /bar/.exec("foobar");
-// const re1 = /url\(("[^"]*"|'[^']*'|[^)]*)\)/g
-// const re2 = /@import "(.*?)"/g
-// function cssUrlsIndex(css: string): Array<[number, number]> {
-// const idxs: Array<[number, number]> = [];
-// const i1 = css.matchAll(re1);
-// // @ts-ignore
-// for (let m of i1) {
-// // @ts-ignore
-// const s: number = m.index + m[0].indexOf(m[1]);
-// const e: number = s + m[1].length;
-// idxs.push([s, e]);
-// }
-// const i2 = css.matchAll(re2);
-// // @ts-ignore
-// for (let m of i2) {
-// // @ts-ignore
-// const s = m.index + m[0].indexOf(m[1]);
-// const e = s + m[1].length;
-// idxs.push([s, e])
-// }
-// return idxs;
-// }
-// function unquote(str: string): [string, string] {
-// str = str.trim();
-// if (str.length <= 2) {
-// return [str, ""]
-// }
-// if (str[0] == '"' && str[str.length-1] == '"') {
-// return [ str.substring(1, str.length-1), "\""];
-// }
-// if (str[0] == '\'' && str[str.length-1] == '\'') {
-// return [ str.substring(1, str.length-1), "'" ];
-// }
-// return [str, ""]
-// }
-// function rewriteCSSLinks(css: string, rewriter: (rawurl: string) => string): string {
-// for (let idx of cssUrlsIndex(css)) {
-// const f = idx[0]
-// const t = idx[1]
-// const [ rawurl, q ] = unquote(css.substring(f, t));
-// css = css.substring(0,f) + q + rewriter(rawurl) + q + css.substring(t);
-// }
-// return css
-// }
-
-// function resolveCSS(baseURL: string, css: string): string {
-// return rewriteCSSLinks(css, rawurl => resolveURL(baseURL, rawurl));
-// }
-
-// export default class AssistManager {
-// constructor(private session, private md: MessageDistributor, private config) {}
-
-// private setStatus(status: ConnectionStatus) {
-// if (status === ConnectionStatus.Connecting) {
-// this.md.setMessagesLoading(true);
-// } else {
-// this.md.setMessagesLoading(false);
-// }
-// if (status === ConnectionStatus.Connected) {
-// this.md.display(true);
-// } else {
-// this.md.display(false);
-// }
-// update({ peerConnectionStatus: status });
-// }
-
-// private get peerID(): string {
-// return `${this.session.projectKey}-${this.session.sessionId}`
-// }
-
-// private peer: Peer | null = null;
-// connectionAttempts: number = 0;
-// private peeropened: boolean = false;
-// connect() {
-// if (this.peer != null) {
-// console.error("AssistManager: trying to connect more than once");
-// return;
-// }
-// this.setStatus(ConnectionStatus.Connecting)
-// import('peerjs').then(({ default: Peer }) => {
-// const _config = {
-// // @ts-ignore
-// host: new URL(window.ENV.API_EDP).host,
-// path: '/assist',
-// port: location.protocol === 'https:' ? 443 : 80,
-// }
-
-// if (this.config) {
-// _config['config'] = {
-// iceServers: this.config,
-// sdpSemantics: 'unified-plan',
-// iceTransportPolicy: 'relay',
-// };
-// }
-
-// const peer = new Peer(_config);
-// this.peer = peer;
-// peer.on('error', e => {
-// if (e.type !== 'peer-unavailable') {
-// console.warn("AssistManager PeerJS peer error: ", e.type, e)
-// }
-// if (['peer-unavailable', 'network', 'webrtc'].includes(e.type)) {
-// if (this.peer && this.connectionAttempts++ < MAX_RECONNECTION_COUNT) {
-// this.setStatus(ConnectionStatus.Connecting);
-// this.connectToPeer();
-// } else {
-// this.setStatus(ConnectionStatus.Disconnected);
-// this.dataCheckIntervalID && clearInterval(this.dataCheckIntervalID);
-// }
-// } else {
-// console.error(`PeerJS error (on peer). Type ${e.type}`, e);
-// this.setStatus(ConnectionStatus.Error)
-// }
-// })
-// peer.on("open", () => {
-// if (this.peeropened) { return; }
-// this.peeropened = true;
-// this.connectToPeer();
-// });
-// });
-// }
-
-// private dataCheckIntervalID: ReturnType | undefined;
-// private connectToPeer() {
-// if (!this.peer) { return; }
-// this.setStatus(ConnectionStatus.Connecting);
-// const id = this.peerID;
-// const conn = this.peer.connect(id, { serialization: 'json', reliable: true});
-// conn.on('open', () => {
-// window.addEventListener("beforeunload", ()=>conn.open &&conn.send("unload"));
-
-// //console.log("peer connected")
-
-
-// if (getState().calling === CallingState.Reconnecting) {
-// this._call()
-// }
-
-// let i = 0;
-// let firstMessage = true;
-
-// this.setStatus(ConnectionStatus.WaitingMessages)
-
-// conn.on('data', (data) => {
-// if (!Array.isArray(data)) { return this.handleCommand(data); }
-// this.disconnectTimeout && clearTimeout(this.disconnectTimeout);
-// if (firstMessage) {
-// firstMessage = false;
-// this.setStatus(ConnectionStatus.Connected)
-// }
-
-// let time = 0;
-// let ts0 = 0;
-// (data as Array).forEach(msg => {
-
-// // TODO: more appropriate way to do it.
-// if (msg._id === 60) {
-// // @ts-ignore
-// if (msg.name === 'src' || msg.name === 'href') {
-// // @ts-ignore
-// msg.value = resolveURL(msg.baseURL, msg.value);
-// // @ts-ignore
-// } else if (msg.name === 'style') {
-// // @ts-ignore
-// msg.value = resolveCSS(msg.baseURL, msg.value);
-// }
-// msg._id = 12;
-// } else if (msg._id === 61) { // "SetCSSDataURLBased"
-// // @ts-ignore
-// msg.data = resolveCSS(msg.baseURL, msg.data);
-// msg._id = 15;
-// } else if (msg._id === 67) { // "insert_rule"
-// // @ts-ignore
-// msg.rule = resolveCSS(msg.baseURL, msg.rule);
-// msg._id = 37;
-// }
-
-
-// msg.tp = ID_TP_MAP[msg._id]; // _id goes from tracker
-
-// if (msg.tp === "timestamp") {
-// ts0 = ts0 || msg.timestamp
-// time = msg.timestamp - ts0;
-// return;
-// }
-// const tMsg: TimedMessage = Object.assign(msg, {
-// time,
-// _index: i,
-// });
-// this.md.distributeMessage(tMsg, i++);
-// });
-// });
-// });
-
-
-// const onDataClose = () => {
-// this.onCallDisconnect()
-// //console.log('closed peer conn. Reconnecting...')
-// this.connectToPeer();
-// }
-
-// // this.dataCheckIntervalID = setInterval(() => {
-// // if (!this.dataConnection && getState().peerConnectionStatus === ConnectionStatus.Connected) {
-// // onDataClose();
-// // }
-// // }, 3000);
-// conn.on('close', onDataClose);// Does it work ?
-// conn.on("error", (e) => {
-// this.setStatus(ConnectionStatus.Error);
-// })
-// }
-
-
-// private get dataConnection(): DataConnection | undefined {
-// return this.peer?.connections[this.peerID]?.find(c => c.type === 'data' && c.open);
-// }
-
-// private get callConnection(): MediaConnection | undefined {
-// return this.peer?.connections[this.peerID]?.find(c => c.type === 'media' && c.open);
-// }
-
-// private send(data: any) {
-// this.dataConnection?.send(data);
-// }
-
-
-// private forceCallEnd() {
-// this.callConnection?.close();
-// }
-// private notifyCallEnd() {
-// const dataConn = this.dataConnection;
-// if (dataConn) {
-// dataConn.send("call_end");
-// }
-// }
-// private initiateCallEnd = () => {
-// this.forceCallEnd();
-// this.notifyCallEnd();
-// this.localCallData && this.localCallData.onCallEnd();
-// }
-
-// private onTrackerCallEnd = () => {
-// console.log('onTrackerCallEnd')
-// this.forceCallEnd();
-// if (getState().calling === CallingState.Requesting) {
-// this.localCallData && this.localCallData.onReject();
-// }
-// this.localCallData && this.localCallData.onCallEnd();
-// }
-
-// private onCallDisconnect = () => {
-// if (getState().calling === CallingState.True) {
-// update({ calling: CallingState.Reconnecting });
-// }
-// }
-
-
-// private disconnectTimeout: ReturnType | undefined;
-// private handleCommand(command: string) {
-// console.log("Data command", command)
-// switch (command) {
-// case "unload":
-// //this.onTrackerCallEnd();
-// this.onCallDisconnect()
-// this.dataConnection?.close();
-// this.disconnectTimeout = setTimeout(() => {
-// this.onTrackerCallEnd();
-// this.setStatus(ConnectionStatus.Disconnected);
-// }, 15000); // TODO: more convenient way
-// //this.dataConnection?.close();
-// return;
-// case "call_end":
-// this.onTrackerCallEnd();
-// return;
-// case "call_error":
-// this.onTrackerCallEnd();
-// this.setStatus(ConnectionStatus.Error);
-// return;
-// }
-// }
-
-// // private mmtid?:ReturnType
-// private onMouseMove = (e: MouseEvent): void => {
-// // this.mmtid && clearTimeout(this.mmtid)
-// // this.mmtid = setTimeout(() => {
-// const data = this.md.getInternalCoordinates(e);
-// this.send({ x: Math.round(data.x), y: Math.round(data.y) });
-// // }, 5)
-// }
-
-
-// // private wtid?: ReturnType
-// // private scrollDelta: [number, number] = [0,0]
-// private onWheel = (e: WheelEvent): void => {
-// e.preventDefault()
-// //throttling makes movements less smooth
-// // this.wtid && clearTimeout(this.wtid)
-// // this.scrollDelta[0] += e.deltaX
-// // this.scrollDelta[1] += e.deltaY
-// // this.wtid = setTimeout(() => {
-// this.send({ type: "scroll", delta: [ e.deltaX, e.deltaY ]})//this.scrollDelta });
-// this.onMouseMove(e)
-// // this.scrollDelta = [0,0]
-// // }, 20)
-// }
-
-// private onMouseClick = (e: MouseEvent): void => {
-// const conn = this.dataConnection;
-// if (!conn) { return; }
-// const data = this.md.getInternalCoordinates(e);
-// // const el = this.md.getElementFromPoint(e); // requires requestiong node_id from domManager
-// const el = this.md.getElementFromInternalPoint(data)
-// if (el instanceof HTMLElement) {
-// el.focus()
-// el.oninput = e => e.preventDefault();
-// el.onkeydown = e => e.preventDefault();
-// }
-// conn.send({ type: "click", x: Math.round(data.x), y: Math.round(data.y) });
-// }
-
-// private toggleRemoteControl = (flag?: boolean) => {
-// const state = getState().remoteControl;
-// const newState = typeof flag === 'boolean' ? flag : !state;
-// if (state === newState) { return }
-// if (newState) {
-// this.md.overlay.addEventListener("click", this.onMouseClick);
-// this.md.overlay.addEventListener("wheel", this.onWheel)
-// update({ remoteControl: true })
-// } else {
-// this.md.overlay.removeEventListener("click", this.onMouseClick);
-// this.md.overlay.removeEventListener("wheel", this.onWheel);
-// update({ remoteControl: false })
-// }
-// }
-
-// private localCallData: {
-// localStream: LocalStream,
-// onStream: (s: MediaStream)=>void,
-// onCallEnd: () => void,
-// onReject: () => void,
-// onError?: ()=> void
-// } | null = null
-
-// call(localStream: LocalStream, onStream: (s: MediaStream)=>void, onCallEnd: () => void, onReject: () => void, onError?: ()=> void): { end: Function, toggleRemoteControl: Function } {
-// this.localCallData = {
-// localStream,
-// onStream,
-// onCallEnd: () => {
-// onCallEnd();
-// this.toggleRemoteControl(false);
-// this.md.overlay.removeEventListener("mousemove", this.onMouseMove);
-// this.md.overlay.removeEventListener("click", this.onMouseClick);
-// update({ calling: CallingState.False });
-// this.localCallData = null;
-// },
-// onReject,
-// onError,
-// }
-// this._call()
-// return {
-// end: this.initiateCallEnd,
-// toggleRemoteControl: this.toggleRemoteControl,
-// }
-// }
-
-// private _call() {
-// if (!this.peer || !this.localCallData || ![CallingState.False, CallingState.Reconnecting].includes(getState().calling)) { return null; }
-
-// update({ calling: CallingState.Requesting });
-
-// //console.log('calling...', this.localCallData.localStream)
-
-// const call = this.peer.call(this.peerID, this.localCallData.localStream.stream);
-// this.localCallData.localStream.onVideoTrack(vTrack => {
-// const sender = call.peerConnection.getSenders().find(s => s.track?.kind === "video")
-// if (!sender) {
-// //logger.warn("No video sender found")
-// return
-// }
-// //logger.log("sender found:", sender)
-// sender.replaceTrack(vTrack)
-// })
-
-// call.on('stream', stream => {
-// update({ calling: CallingState.True });
-// this.localCallData && this.localCallData.onStream(stream);
-// this.send({
-// name: store.getState().getIn([ 'user', 'account', 'name']),
-// });
-
-// this.md.overlay.addEventListener("mousemove", this.onMouseMove)
-// // this.md.overlay.addEventListener("click", this.onMouseClick)
-// });
-// //call.peerConnection.addEventListener("track", e => console.log('newtrack',e.track))
-
-// call.on("close", this.localCallData.onCallEnd);
-// call.on("error", (e) => {
-// console.error("PeerJS error (on call):", e)
-// this.initiateCallEnd();
-// this.localCallData && this.localCallData.onError && this.localCallData.onError();
-// });
-
-// window.addEventListener("beforeunload", this.initiateCallEnd)
-// }
-
-// clear() {
-// this.initiateCallEnd();
-// this.dataCheckIntervalID && clearInterval(this.dataCheckIntervalID);
-// if (this.peer) {
-// //console.log("destroying peer...")
-// const peer = this.peer; // otherwise it calls reconnection on data chan close
-// this.peer = null;
-// peer.destroy();
-// }
-// }
-// }
-
-
diff --git a/frontend/app/player/Player.ts b/frontend/app/player/Player.ts
index f99434be5..67875b530 100644
--- a/frontend/app/player/Player.ts
+++ b/frontend/app/player/Player.ts
@@ -87,7 +87,7 @@ export default class Player extends MessageDistributor {
const diffTime = messagesLoading || cssLoading || disconnected
? 0
- : Math.max(animationCurrentTime - animationPrevTime, 0) * speed;
+ : Math.max(animationCurrentTime - animationPrevTime, 0) * (live ? 1 : speed);
let time = prevTime + diffTime;
diff --git a/frontend/app/player/singletone.js b/frontend/app/player/singletone.js
index 9d811023e..177bb0388 100644
--- a/frontend/app/player/singletone.js
+++ b/frontend/app/player/singletone.js
@@ -69,6 +69,7 @@ export const markElement = initCheck((...args) => instance.marker && instance.ma
export const scale = initCheck(() => instance.scale());
export const toggleInspectorMode = initCheck((...args) => instance.toggleInspectorMode(...args));
export const callPeer = initCheck((...args) => instance.assistManager.call(...args))
+export const requestReleaseRemoteControl = initCheck((...args) => instance.assistManager.requestReleaseRemoteControl(...args))
export const markTargets = initCheck((...args) => instance.markTargets(...args))
export const activeTarget = initCheck((...args) => instance.activeTarget(...args))
diff --git a/frontend/app/styles/main.css b/frontend/app/styles/main.css
index c77533f3c..f27155e36 100644
--- a/frontend/app/styles/main.css
+++ b/frontend/app/styles/main.css
@@ -106,4 +106,15 @@
opacity: .5;
}
+.form-group {
+ margin-bottom: 25px;
+ & label {
+ display: inline-block;
+ margin-bottom: 5px;
+ }
+}
+.disabled {
+ opacity: 0.4;
+ pointer-events: none;
+}
\ No newline at end of file
diff --git a/frontend/app/styles/semantic.css b/frontend/app/styles/semantic.css
index cf6034861..7fe14933b 100644
--- a/frontend/app/styles/semantic.css
+++ b/frontend/app/styles/semantic.css
@@ -313,4 +313,27 @@ a:hover {
.ui.toggle.checkbox {
min-height: 20px !important;
+}
+
+.ui.search.dropdown>input.search {
+ bottom: 0 !important;
+}
+
+.filterDropdown.ui.search.dropdown.active>input.search,
+.filterDropdown.ui.search.dropdown.visible>input.search {
+ border: solid thin $teal !important;
+ border-radius: 3px;
+ height: 26px;
+ border-top-left-radius: 3px;
+ border-bottom-left-radius: 3px;
+ border-top-right-radius: 0;
+ border-bottom-right-radius: 0;
+ whitespace: nowrap;
+}
+
+.filterDropdown.ui.search.dropdown>.text {
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ margin-right: 15px;
}
\ No newline at end of file
diff --git a/frontend/app/svg/icons/close.svg b/frontend/app/svg/icons/close.svg
index e1a7e4bc2..ddb768e16 100644
--- a/frontend/app/svg/icons/close.svg
+++ b/frontend/app/svg/icons/close.svg
@@ -1,3 +1,4 @@
-