Merge pull request #229 from openreplay/dev

v1.3.6
This commit is contained in:
Shekar Siri 2021-12-04 03:06:18 +05:30 committed by GitHub
commit bf2c901733
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
212 changed files with 5524 additions and 3532 deletions

View file

@ -47,7 +47,15 @@ jobs:
#
# Getting the images to build
#
git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3 | uniq > backend/images_to_build.txt
{
git diff --name-only HEAD HEAD~1 | grep backend/services | grep -vE ^ee/ | cut -d '/' -f3
git diff --name-only HEAD HEAD~1 | grep backend/pkg | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do
grep -rl "pkg/$pkg_name" backend/services | cut -d '/' -f3
done
} | uniq > backend/images_to_build.txt
[[ $(cat backend/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 0)
#
# Pushing image to registry

View file

@ -54,7 +54,8 @@
"S3_SECRET": "",
"invitation_link": "/api/users/invitation?token=%s",
"change_password_link": "/reset-password?invitation=%s&&pass=%s",
"version_number": "1.3.5"
"iosBucket": "openreplay-ios-images",
"version_number": "1.3.6"
},
"lambda_timeout": 150,
"lambda_memory_size": 400,

View file

@ -7,7 +7,7 @@ from chalicelib.blueprints import bp_authorizers
from chalicelib.blueprints import bp_core, bp_core_crons
from chalicelib.blueprints.app import v1_api
from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons
from chalicelib.blueprints.subs import bp_dashboard,bp_insights
from chalicelib.blueprints.subs import bp_dashboard
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.helper import environ
@ -106,5 +106,4 @@ app.register_blueprint(bp_core_crons.app)
app.register_blueprint(bp_core_dynamic.app)
app.register_blueprint(bp_core_dynamic_crons.app)
app.register_blueprint(bp_dashboard.app)
app.register_blueprint(bp_insights.app)
app.register_blueprint(v1_api.app)

View file

@ -1,5 +1,3 @@
from chalicelib.utils.helper import environ
from chalice import Blueprint
from chalice import Response
@ -11,9 +9,10 @@ from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assig
log_tool_stackdriver, reset_password, sessions_favorite_viewed, \
log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \
log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \
assist, heatmaps
assist, heatmaps, mobile
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import email_helper
from chalicelib.utils.helper import environ
app = Blueprint(__name__)
_overrides.chalice_app(app)
@ -897,3 +896,14 @@ def sessions_live_search(projectId, context):
def get_heatmaps_by_url(projectId, context):
data = app.current_request.json_body
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
@app.route('/general_stats', methods=['GET'], authorizer=None)
def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}
@app.route('/{projectId}/mobile/{sessionId}/urls', methods=['POST'])
def mobile_signe(projectId, sessionId, context):
data = app.current_request.json_body
return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data["keys"])}

View file

@ -11,6 +11,8 @@ from chalicelib.core import signup
from chalicelib.core import tenants
from chalicelib.core import users
from chalicelib.core import webhook
from chalicelib.core import license
from chalicelib.core import assist
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import captcha
from chalicelib.utils import helper

View file

@ -1,69 +0,0 @@
from chalice import Blueprint
from chalicelib.utils import helper
from chalicelib import _overrides
from chalicelib.core import dashboard, insights
from chalicelib.core import metadata
app = Blueprint(__name__)
_overrides.chalice_app(app)
#
# @app.route('/{projectId}/dashboard/metadata', methods=['GET'])
# def get_metadata_map(projectId, context):
# metamap = []
# for m in metadata.get(project_id=projectId):
# metamap.append({"name": m["key"], "key": f"metadata{m['index']}"})
# return {"data": metamap}
#
#
@app.route('/{projectId}/insights/journey', methods=['GET', 'POST'])
def get_insights_journey(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": insights.get_journey(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/insights/users_retention', methods=['GET', 'POST'])
def get_users_retention(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": insights.get_retention(project_id=projectId, **{**data, **args})}
#
#
# @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET'])
# def get_dashboard_autocomplete(projectId, widget, context):
# params = app.current_request.query_params
# if params is None or params.get('q') is None or len(params.get('q')) == 0:
# return {"data": []}
# params['q'] = '^' + params['q']
#
# if widget in ['performance']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), performance=True)
# elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render',
# 'impacted_sessions_by_slow_pages', 'pages_response_time']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), pages_only=True)
# elif widget in ['resources_loading_time']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), performance=False)
# elif widget in ['time_between_events', 'events']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), performance=False, events_only=True)
# elif widget in ['metadata']:
# data = dashboard.search(params.get('q', ''), None, project_id=projectId,
# platform=params.get('platform', None), metadata=True, key=params.get("key"))
# else:
# return {"errors": [f"unsupported widget: {widget}"]}
# return {'data': data}

View file

@ -1,5 +1,6 @@
import requests
from chalicelib.core import projects, sessions, sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.core import projects, sessions, sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.helper import environ

View file

@ -1,211 +0,0 @@
from chalicelib.core import sessions_metas
from chalicelib.utils import args_transformer
from chalicelib.utils import helper, dev
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
import math
from chalicelib.core.dashboard import __get_constraints, __get_constraint_values
def __transform_journey(rows):
nodes = []
links = []
for r in rows:
source = r["source_event"][r["source_event"].index("_"):]
target = r["target_event"][r["target_event"].index("_"):]
if source not in nodes:
nodes.append(source)
if target not in nodes:
nodes.append(target)
links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]})
return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)}
JOURNEY_DEPTH = 5
JOURNEY_TYPES = {
"PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"},
"CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"},
"VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"},
"EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"}
}
@dev.timed
def get_journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args):
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
event_start = None
event_table = JOURNEY_TYPES["PAGES"]["table"]
event_column = JOURNEY_TYPES["PAGES"]["column"]
event_table_id = JOURNEY_TYPES["PAGES"]["table_id"]
extra_values = {}
for f in filters:
if f["type"] == "START_POINT":
event_start = f["value"]
elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_table = JOURNEY_TYPES[f["value"]]["table"]
event_column = JOURNEY_TYPES[f["value"]]["column"]
elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
pg_sub_query_subset.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT source_event,
target_event,
MAX(target_id) max_target_id,
MAX(source_id) max_source_id,
count(*) AS value
FROM (SELECT event_number || '_' || value as target_event,
message_id AS target_id,
LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event,
LAG(message_id, 1) OVER ( PARTITION BY session_rank ) AS source_id
FROM (SELECT value,
session_rank,
message_id,
ROW_NUMBER() OVER ( PARTITION BY session_rank ORDER BY timestamp ) AS event_number
{f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY timestamp ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN timestamp ELSE NULL END as mark"
if event_start else ""}
FROM (SELECT session_id,
message_id,
timestamp,
value,
SUM(new_session) OVER (ORDER BY session_id, timestamp) AS session_rank
FROM (SELECT *,
CASE
WHEN source_timestamp IS NULL THEN 1
ELSE 0 END AS new_session
FROM (SELECT session_id,
{event_table_id} AS message_id,
timestamp,
{event_column} AS value,
LAG(timestamp)
OVER (PARTITION BY session_id ORDER BY timestamp) AS source_timestamp
FROM {event_table} INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
) AS related_events) AS ranked_events) AS processed
{") AS marked) AS maxed WHERE timestamp >= max) AS filtered" if event_start else ""}
) AS sorted_events
WHERE event_number <= %(JOURNEY_DEPTH)s) AS final
WHERE source_event IS NOT NULL
and target_event IS NOT NULL
GROUP BY source_event, target_event
ORDER BY value DESC
LIMIT 20;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH,
**__get_constraint_values(args), **extra_values}
# print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
return __transform_journey(rows)
def __compute_retention_percentage(rows):
if rows is None or len(rows) == 0:
return rows
t = -1
for r in rows:
if r["week"] == 0:
t = r["usersCount"]
r["percentage"] = r["usersCount"] / t
return rows
def __complete_retention(rows, start_date, end_date=None):
if rows is None or len(rows) == 0:
return rows
max_week = 10
week = 0
delta_date = 0
while max_week > 0:
start_date += TimeUTC.MS_WEEK
if end_date is not None and start_date >= end_date:
break
delta = 0
if delta_date + week >= len(rows) \
or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date:
for i in range(max_week):
if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date:
break
neutral = {
"firstConnexionWeek": start_date,
"week": i,
"usersCount": 0,
"connectedUsers": [],
"percentage": 0
}
rows.insert(delta_date + week + i, neutral)
delta = i
else:
for i in range(max_week):
if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date:
break
neutral = {
"firstConnexionWeek": start_date,
"week": i,
"usersCount": 0,
"connectedUsers": [],
"percentage": 0
}
if delta_date + week + i < len(rows) \
and i != rows[delta_date + week + i]["week"]:
rows.insert(delta_date + week + i, neutral)
elif delta_date + week + i >= len(rows):
rows.append(neutral)
delta = i
week += delta
max_week -= 1
delta_date += 1
return rows
@dev.timed
def get_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[],
**args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week,
FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week,
COUNT(DISTINCT connexions_list.user_id) AS users_count,
ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users
FROM (SELECT DISTINCT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week
FROM sessions
WHERE {" AND ".join(pg_sub_query)}
AND user_id IS NOT NULL
AND NOT EXISTS((SELECT 1
FROM sessions AS bsess
WHERE bsess.start_ts<EXTRACT('EPOCH' FROM DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000))) * 1000
AND project_id = %(project_id)s
AND bsess.user_id = sessions.user_id
LIMIT 1))
GROUP BY user_id) AS users_list
LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week,
user_id
FROM sessions
WHERE users_list.user_id = sessions.user_id
AND first_connexion_week <=
DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp)
AND sessions.project_id = 1
AND sessions.start_ts < (%(endTimestamp)s - 1)
GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE)
GROUP BY first_connexion_week, week
ORDER BY first_connexion_week, week;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
# print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __compute_retention_percentage(helper.list_to_camel_case(rows))
return __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now())

View file

@ -0,0 +1,13 @@
from chalicelib.core import projects
from chalicelib.utils import s3
from chalicelib.utils.helper import environ
def sign_keys(project_id, session_id, keys):
result = []
project_key = projects.get_project_key(project_id)
for k in keys:
result.append(s3.get_presigned_url_for_sharing(bucket=environ["iosBucket"],
key=f"{project_key}/{session_id}/{k}",
expires_in=60 * 60))
return result

View file

@ -1,5 +1,5 @@
from chalicelib.utils import email_helper, captcha, helper
from chalicelib.core import users
from chalicelib.utils import email_helper, captcha, helper
def reset(data):
@ -10,7 +10,8 @@ def reset(data):
return {"errors": ["Invalid captcha."]}
if "email" not in data:
return {"errors": ["email not found in body"]}
if not helper.has_smtp():
return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]}
a_users = users.get_by_email_only(data["email"])
if len(a_users) > 1:
print(f"multiple users found for [{data['email']}] please contact our support")

View file

@ -79,10 +79,6 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id,
session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id)
data["socket"] = socket_ios.start_replay(project_id=project_id, session_id=session_id,
device=data["userDevice"],
os_version=data["userOsVersion"],
mob_url=data["mobsUrl"])
else:
data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id,
group_clickrage=True)
@ -162,6 +158,7 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False
"projectId": project_id,
"userId": user_id}
with pg_client.PostgresClient() as cur:
ss_constraints = []
extra_constraints = [
cur.mogrify("s.project_id = %(project_id)s", {"project_id": project_id}),
cur.mogrify("s.duration IS NOT NULL", {})
@ -173,7 +170,96 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False
extra_constraints.append(cur.mogrify("fs.user_id = %(userId)s", {"userId": user_id}))
events_query_part = ""
if "filters" in data:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
for f in data["filters"]:
if not isinstance(f.get("value"), list):
f["value"] = [f.get("value")]
if len(f["value"]) == 0 or f["value"][0] is None:
continue
filter_type = f["type"].upper()
f["value"] = __get_sql_value_multiple(f["value"])
if filter_type == sessions_metas.meta_type.USERBROWSER:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_browser {op} %(value)s', {"value": f["value"]}))
ss_constraints.append(cur.mogrify(f'ms.user_browser {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_os {op} %(value)s', {"value": f["value"]}))
ss_constraints.append(cur.mogrify(f'ms.user_os {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_device {op} %(value)s', {"value": f["value"]}))
ss_constraints.append(cur.mogrify(f'ms.user_device {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_country {op} %(value)s', {"value": f["value"]}))
ss_constraints.append(cur.mogrify(f'ms.user_country {op} %(value)s', {"value": f["value"]}))
elif filter_type == "duration".upper():
if len(f["value"]) > 0 and f["value"][0] is not None:
extra_constraints.append(
cur.mogrify("s.duration >= %(minDuration)s", {"minDuration": f["value"][0]}))
ss_constraints.append(
cur.mogrify("ms.duration >= %(minDuration)s", {"minDuration": f["value"][0]}))
if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0:
extra_constraints.append(
cur.mogrify("s.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]}))
ss_constraints.append(
cur.mogrify("ms.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]}))
elif filter_type == sessions_metas.meta_type.REFERRER:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(
cur.mogrify(f"p.base_referrer {op} %(referrer)s", {"referrer": f["value"]}))
elif filter_type == events.event_type.METADATA.ui_type:
op = __get_sql_operator(f["operator"])
if f.get("key") in meta_keys.keys():
extra_constraints.append(
cur.mogrify(f"s.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)}))
ss_constraints.append(
cur.mogrify(f"ms.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)}))
elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.user_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
ss_constraints.append(
cur.mogrify(f"ms.user_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID,
sessions_metas.meta_type.USERANONYMOUSID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.user_anonymous_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
ss_constraints.append(
cur.mogrify(f"ms.user_anonymous_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.rev_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
ss_constraints.append(
cur.mogrify(f"ms.rev_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
# ---------------------------------------------------------------------------
if len(data.get("events", [])) > 0:
ss_constraints = [s.decode('UTF-8') for s in ss_constraints]
events_query_from = []
event_index = 0
@ -268,7 +354,8 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False
else:
continue
if event_index == 0:
event_where += ss_constraints
if is_not:
if event_index == 0:
events_query_from.append(cur.mogrify(f"""\
@ -316,73 +403,6 @@ def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False
else:
data["events"] = []
# ---------------------------------------------------------------------------
if "filters" in data:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
for f in data["filters"]:
if not isinstance(f.get("value"), list):
f["value"] = [f.get("value")]
if len(f["value"]) == 0 or f["value"][0] is None:
continue
filter_type = f["type"].upper()
f["value"] = __get_sql_value_multiple(f["value"])
if filter_type == sessions_metas.meta_type.USERBROWSER:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(
cur.mogrify(f's.user_browser {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_os {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_device {op} %(value)s', {"value": f["value"]}))
elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]:
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(cur.mogrify(f's.user_country {op} %(value)s', {"value": f["value"]}))
elif filter_type == "duration".upper():
if len(f["value"]) > 0 and f["value"][0] is not None:
extra_constraints.append(
cur.mogrify("s.duration >= %(minDuration)s", {"minDuration": f["value"][0]}))
if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0:
extra_constraints.append(
cur.mogrify("s.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]}))
elif filter_type == sessions_metas.meta_type.REFERRER:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
op = __get_sql_operator_multiple(f["operator"])
extra_constraints.append(
cur.mogrify(f"p.base_referrer {op} %(referrer)s", {"referrer": f["value"]}))
elif filter_type == events.event_type.METADATA.ui_type:
op = __get_sql_operator(f["operator"])
if f.get("key") in meta_keys.keys():
extra_constraints.append(
cur.mogrify(f"s.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.user_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID,
sessions_metas.meta_type.USERANONYMOUSID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.user_anonymous_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]:
op = __get_sql_operator(f["operator"])
extra_constraints.append(
cur.mogrify(f"s.rev_id {op} %(value)s",
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
)
# ---------------------------------------------------------------------------
if data.get("startDate") is not None:
@ -741,3 +761,9 @@ def delete_sessions_by_user_ids(project_id, user_ids):
cur.execute(query=query)
return True
def count_all():
with pg_client.PostgresClient() as cur:
row = cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions")
return row.get("count", 0)

View file

@ -9,6 +9,9 @@ from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import environ
from chalicelib.core import tenants, assist
import secrets
def __generate_invitation_token():
return secrets.token_urlsafe(64)
@ -438,7 +441,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password):
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True)
c["smtp"] = helper.has_smtp()
c["iceServers"]: assist.get_ice_servers()
c["iceServers"]= assist.get_ice_servers()
return {
'jwt': r.pop('jwt'),
'data': {
@ -466,7 +469,7 @@ def set_password_invitation(user_id, new_password):
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True)
c["smtp"] = helper.has_smtp()
c["iceServers"]: assist.get_ice_servers()
c["iceServers"]= assist.get_ice_servers()
return {
'jwt': r.pop('jwt'),
'data': {

View file

@ -1,4 +1,5 @@
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import environ
from chalicelib.utils.helper import get_issue_title
@ -30,7 +31,11 @@ def edit_config(user_id, weekly_report):
def cron():
with pg_client.PostgresClient() as cur:
cur.execute("""\
params = {"3_days_ago": TimeUTC.midnight(delta_days=-3),
"1_week_ago": TimeUTC.midnight(delta_days=-7),
"2_week_ago": TimeUTC.midnight(delta_days=-14),
"5_week_ago": TimeUTC.midnight(delta_days=-35)}
cur.execute(cur.mogrify("""\
SELECT project_id,
name AS project_name,
users.emails AS emails,
@ -44,7 +49,7 @@ def cron():
SELECT sessions.project_id
FROM public.sessions
WHERE sessions.project_id = projects.project_id
AND start_ts >= (EXTRACT(EPOCH FROM now() - INTERVAL '3 days') * 1000)::BIGINT
AND start_ts >= %(3_days_ago)s
LIMIT 1) AS recently_active USING (project_id)
INNER JOIN LATERAL (
SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails
@ -54,14 +59,14 @@ def cron():
AND users.weekly_report
) AS users ON (TRUE)
LEFT JOIN LATERAL (
SELECT COUNT(issues.*) AS count
SELECT COUNT(1) AS count
FROM events_common.issues
INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
) AS week_0_issues ON (TRUE)
LEFT JOIN LATERAL (
SELECT COUNT(issues.*) AS count
SELECT COUNT(1) AS count
FROM events_common.issues
INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id
@ -69,16 +74,17 @@ def cron():
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT
) AS week_1_issues ON (TRUE)
LEFT JOIN LATERAL (
SELECT COUNT(issues.*) AS count
SELECT COUNT(1) AS count
FROM events_common.issues
INNER JOIN public.sessions USING (session_id)
WHERE sessions.project_id = projects.project_id
AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT
) AS month_1_issues ON (TRUE)
WHERE projects.deleted_at ISNULL;""")
WHERE projects.deleted_at ISNULL;"""), params)
projects_data = cur.fetchall()
for p in projects_data:
params["project_id"] = p["project_id"]
print(f"checking {p['project_name']} : {p['project_id']}")
if len(p["emails"]) == 0 \
or p["this_week_issues_count"] + p["past_week_issues_count"] + p["past_month_issues_count"] == 0:
@ -104,7 +110,7 @@ def cron():
DATE_TRUNC('day', now()) - INTERVAL '1 day',
'1 day'::INTERVAL
) AS timestamp_i
ORDER BY timestamp_i;""", {"project_id": p["project_id"]}))
ORDER BY timestamp_i;""", params))
days_partition = cur.fetchall()
max_days_partition = max(x['issues_count'] for x in days_partition)
for d in days_partition:
@ -120,7 +126,7 @@ def cron():
AND timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '7 days') * 1000)::BIGINT
GROUP BY type
ORDER BY count DESC, type
LIMIT 4;""", {"project_id": p["project_id"]}))
LIMIT 4;""", params))
issues_by_type = cur.fetchall()
max_issues_by_type = sum(i["count"] for i in issues_by_type)
for i in issues_by_type:
@ -149,7 +155,7 @@ def cron():
'1 day'::INTERVAL
) AS timestamp_i
GROUP BY timestamp_i
ORDER BY timestamp_i;""", {"project_id": p["project_id"]}))
ORDER BY timestamp_i;""", params))
issues_breakdown_by_day = cur.fetchall()
for i in issues_breakdown_by_day:
i["sum"] = sum(x["count"] for x in i["partition"])
@ -195,7 +201,7 @@ def cron():
WHERE mi.project_id = %(project_id)s AND sessions.project_id = %(project_id)s AND sessions.duration IS NOT NULL
AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
GROUP BY type
ORDER BY issue_count DESC;""", {"project_id": p["project_id"]}))
ORDER BY issue_count DESC;""", params))
issues_breakdown_list = cur.fetchall()
if len(issues_breakdown_list) > 4:
others = {"type": "Others",

View file

@ -115,6 +115,13 @@ class TimeUTC:
def get_utc_offset():
return int((datetime.now(pytz.utc).now() - datetime.now(pytz.utc).replace(tzinfo=None)).total_seconds() * 1000)
@staticmethod
def trunc_day(timestamp):
dt = TimeUTC.from_ms_timestamp(timestamp)
return TimeUTC.datetime_to_timestamp(dt
.replace(hour=0, minute=0, second=0, microsecond=0)
.astimezone(pytz.utc))
@staticmethod
def trunc_week(timestamp):
dt = TimeUTC.from_ms_timestamp(timestamp)

View file

@ -1,8 +1,8 @@
import math
import random
import re
import string
import math
import requests
local_prefix = 'local-'
@ -367,3 +367,7 @@ def get_internal_project_id(project_id64):
def has_smtp():
return environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0
def get_edition():
return "foss" if is_free_open_source_edition() else "ee"

View file

@ -0,0 +1,120 @@
import re
from urllib.parse import urlparse
def style(url):
""" Determine 'style' of a given S3 url
>>> style("s3://my-bucket/my-key/")
's3'
>>> style("s3://user@my-bucket/my-key/")
's3-credential'
>>> style("https://my-bucket.s3.amazonaws.com/my-key/")
'bucket-in-netloc'
>>> style("https://s3.amazonaws.com/my-bucket/my-key/")
'bucket-in-path'
"""
o = urlparse(url)
if o.scheme == 's3':
if '@' in o.netloc:
return 's3-credential'
else:
return 's3'
if re.search(r'^s3[.-](\w{2}-\w{4,9}-\d\.)?amazonaws\.com', o.netloc):
return 'bucket-in-path'
if re.search(r'\.s3[.-](\w{2}-\w{4,9}-\d\.)?amazonaws\.com', o.netloc):
return 'bucket-in-netloc'
raise ValueError(f'Unknown url style: {url}')
def build_url(url_type, bucket, key=None, region=None, credential_name=None):
""" Construct an S3 URL
Args:
url_type: one of 's3', 's3-credential', 'bucket-in-path', 'bucket-in-netloc'
bucket: S3 bucket name
key: Key within bucket (optional)
region: S3 region name (optional)
credential_name: user/credential name to use in S3 scheme url (optional)
Returns
(string) S3 URL
"""
if url_type == 's3':
credential = f'{credential_name}@' if credential_name else ""
return f's3://{credential}{bucket}/{key or ""}'
if url_type == 'bucket-in-path':
return f'https://s3{"-" if region else ""}{region or ""}.amazonaws.com/{bucket}/{key}'
if url_type == 'bucket-in-netloc':
return f'https://{bucket}.s3.amazonaws.com/{key}'
raise ValueError(f'Invalid url_type: {url_type}')
def parse_s3_credential_url(url):
""" Parse S3 scheme url containing a user/credential name
>>> parse_s3_url("s3://user@my-bucket/my-key")
{'bucket': 'my-bucket', 'key': 'my-key/', 'credential_name': 'user'}
"""
o = urlparse(url)
cred_name, bucket = o.netloc.split('@')
key = o.path if o.path[0] != '/' else o.path[1:]
return {'bucket': bucket, 'key': key, 'credential_name': cred_name}
def parse_s3_url(url):
""" Parse S3 scheme url
>>> parse_s3_url("s3://my-bucket/my-key")
{'bucket': 'my-bucket', 'key': 'my-key/'}
"""
o = urlparse(url)
bucket = o.netloc
key = o.path if o.path[0] != '/' else o.path[1:]
return {'bucket': bucket, 'key': key}
def parse_bucket_in_path_url(url):
""" Parse url with bucket name path
>>> parse_bucket_in_path_url("https://s3-eu-west-1.amazonaws.com/my-bucket/my-key/")
{'bucket': 'my-bucket', 'key': 'my-key/'}
"""
path = urlparse(url).path
bucket = path.split('/')[1]
key = '/'.join(path.split('/')[2:])
return {'bucket': bucket, 'key': key}
def parse_bucket_in_netloc_url(url):
""" Parse url with bucket name in host/netloc
>>> parse_bucket_in_netloc_url("https://my-bucket.s3.amazonaws.com/my-key/")
{'bucket': 'my-bucket', 'key': 'my-key/'}
"""
o = urlparse(url)
bucket = o.netloc.split('.')[0]
key = o.path if o.path[0] != '/' else o.path[1:]
return {'bucket': bucket, 'key': key}
def parse_url(url):
url_style = style(url)
if url_style == 's3-credential':
return parse_s3_credential_url(url)
if url_style == 's3':
return parse_s3_url(url)
if url_style == 'bucket-in-path':
return parse_bucket_in_path_url(url)
if url_style == 'bucket-in-netloc':
return parse_bucket_in_netloc_url(url)

View file

@ -28,11 +28,11 @@ ENV TZ=UTC \
BEACON_SIZE_LIMIT=7000000 \
KAFKA_USE_SSL=true \
REDIS_STREAMS_MAX_LEN=3000 \
TOPIC_RAW=raw \
TOPIC_RAW_WEB=raw \
TOPIC_RAW_IOS=raw-ios \
TOPIC_CACHE=cache \
TOPIC_ANALYTICS=analytics \
TOPIC_TRIGGER=trigger \
TOPIC_EVENTS=events \
GROUP_SINK=sink \
GROUP_STORAGE=storage \
GROUP_DB=db \
@ -41,7 +41,7 @@ ENV TZ=UTC \
AWS_REGION_WEB=eu-central-1 \
AWS_REGION_IOS=eu-west-1 \
AWS_REGION_ASSETS=eu-central-1 \
CACHE_ASSETS=false \
CACHE_ASSETS=true \
ASSETS_SIZE_LIMIT=6291456 \
FS_CLEAN_HRS=72

View file

@ -29,11 +29,11 @@ ENV TZ=UTC \
BEACON_SIZE_LIMIT=1000000 \
KAFKA_USE_SSL=true \
REDIS_STREAMS_MAX_LEN=3000 \
TOPIC_RAW=raw \
TOPIC_RAW_WEB=raw \
TOPIC_RAW_IOS=raw-ios \
TOPIC_CACHE=cache \
TOPIC_ANALYTICS=analytics \
TOPIC_TRIGGER=trigger \
TOPIC_EVENTS=events \
GROUP_SINK=sink \
GROUP_STORAGE=storage \
GROUP_DB=db \

View file

@ -8,7 +8,7 @@ require (
github.com/Masterminds/squirrel v1.5.0
github.com/aws/aws-sdk-go v1.35.23
github.com/btcsuite/btcutil v1.0.2
github.com/confluentinc/confluent-kafka-go v1.5.2 // indirect
github.com/confluentinc/confluent-kafka-go v1.7.0 // indirect
github.com/elastic/go-elasticsearch/v7 v7.13.1
github.com/go-redis/redis v6.15.9+incompatible
github.com/google/uuid v1.1.2
@ -24,6 +24,6 @@ require (
github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce
github.com/ua-parser/uap-go v0.0.0-20200325213135-e1c09f13e2fe
google.golang.org/api v0.50.0
gopkg.in/confluentinc/confluent-kafka-go.v1 v1.5.2
gopkg.in/confluentinc/confluent-kafka-go.v1 v1.7.0
)

View file

@ -28,3 +28,52 @@ func (c *PGCache) InsertIssueEvent(sessionID uint64, crash *IssueEvent) error {
}
return c.Conn.InsertIssueEvent(sessionID, session.ProjectID, crash)
}
func (c *PGCache) InsertUserID(sessionID uint64, userID *IOSUserID) error {
if err := c.Conn.InsertIOSUserID(sessionID, userID); err != nil {
return err
}
session, err := c.GetSession(sessionID)
if err != nil {
return err
}
session.UserID = &userID.Value
return nil
}
func (c *PGCache) InsertUserAnonymousID(sessionID uint64, userAnonymousID *IOSUserAnonymousID) error {
if err := c.Conn.InsertIOSUserAnonymousID(sessionID, userAnonymousID); err != nil {
return err
}
session, err := c.GetSession(sessionID)
if err != nil {
return err
}
session.UserAnonymousID = &userAnonymousID.Value
return nil
}
func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error {
session, err := c.GetSession(sessionID)
if err != nil {
return err
}
project, err := c.GetProject(session.ProjectID)
if err != nil {
return err
}
keyNo := project.GetMetadataNo(metadata.Key)
if keyNo == 0 {
// insert project metadata
}
if err := c.Conn.InsertMetadata(sessionID, keyNo, metadata.Value); err != nil {
return err
}
session.SetMetadata(keyNo, metadata.Value)
return nil
}

View file

@ -22,6 +22,7 @@ func (c *PGCache) InsertIOSSessionStart(sessionID uint64, s *IOSSessionStart) er
UserOSVersion: s.UserOSVersion,
UserDevice: s.UserDevice,
UserCountry: s.UserCountry,
UserDeviceType: s.UserDeviceType,
}
if err := c.Conn.InsertSessionStart(sessionID, c.sessions[ sessionID ]); err != nil {
c.sessions[ sessionID ] = nil
@ -95,46 +96,3 @@ func (c *PGCache) InsertIOSIssueEvent(sessionID uint64, issueEvent *IOSIssueEven
return nil
}
func (c *PGCache) InsertUserID(sessionID uint64, userID *IOSUserID) error {
if err := c.Conn.InsertIOSUserID(sessionID, userID); err != nil {
return err
}
session, err := c.GetSession(sessionID)
if err != nil {
return err
}
session.UserID = &userID.Value
return nil
}
func (c *PGCache) InsertUserAnonymousID(sessionID uint64, userAnonymousID *IOSUserAnonymousID) error {
if err := c.Conn.InsertIOSUserAnonymousID(sessionID, userAnonymousID); err != nil {
return err
}
session, err := c.GetSession(sessionID)
if err != nil {
return err
}
session.UserAnonymousID = &userAnonymousID.Value
return nil
}
func (c *PGCache) InsertMetadata(sessionID uint64, metadata *Metadata) error {
session, err := c.GetSession(sessionID)
if err != nil {
return err
}
project, err := c.GetProject(session.ProjectID)
if err != nil {
return err
}
keyNo := project.GetMetadataNo(metadata.Key)
if err := c.Conn.InsertMetadata(sessionID, keyNo, metadata.Value); err != nil {
return err
}
session.SetMetadata(keyNo, metadata.Value)
return nil
}

View file

@ -2,6 +2,7 @@ package cache
import (
"time"
"sync"
"openreplay/backend/pkg/db/postgres"
. "openreplay/backend/pkg/db/types"
@ -20,8 +21,8 @@ type ProjectMeta struct {
type PGCache struct {
*postgres.Conn
sessions map[uint64]*Session
projects map[uint32]*ProjectMeta
projectsByKeys map[string]*ProjectMeta
projects map[uint32]*ProjectMeta
projectsByKeys sync.Map // map[string]*ProjectMeta
projectExpirationTimeout time.Duration
}
@ -31,7 +32,7 @@ func NewPGCache(pgConn *postgres.Conn, projectExpirationTimeoutMs int64) *PGCach
Conn: pgConn,
sessions: make(map[uint64]*Session),
projects: make(map[uint32]*ProjectMeta),
projectsByKeys: make(map[string]*ProjectMeta),
//projectsByKeys: make(map[string]*ProjectMeta),
projectExpirationTimeout: time.Duration(1000 * projectExpirationTimeoutMs),
}
}

View file

@ -6,16 +6,21 @@ import (
)
func (c *PGCache) GetProjectByKey(projectKey string) (*Project, error) {
if c.projectsByKeys[ projectKey ] != nil &&
time.Now().Before(c.projectsByKeys[ projectKey ].expirationTime) {
return c.projectsByKeys[ projectKey ].Project, nil
pmInterface, found := c.projectsByKeys.Load(projectKey)
if found {
if pm, ok := pmInterface.(*ProjectMeta); ok {
if time.Now().Before(pm.expirationTime) {
return pm.Project, nil
}
}
}
p, err := c.Conn.GetProjectByKey(projectKey)
if p == nil {
if err != nil {
return nil, err
}
c.projectsByKeys[ projectKey ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) }
c.projects[ p.ProjectID ] = c.projectsByKeys[ projectKey ]
//c.projects[ p.ProjectID ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) }
c.projectsByKeys.Store(projectKey, p)
return p, nil
}
@ -27,11 +32,11 @@ func (c *PGCache) GetProject(projectID uint32) (*Project, error) {
return c.projects[ projectID ].Project, nil
}
p, err := c.Conn.GetProject(projectID)
if p == nil {
if err != nil {
return nil, err
}
c.projects[ projectID ] = &ProjectMeta{ p, time.Now().Add(c.projectExpirationTimeout) }
c.projectsByKeys[ p.ProjectKey ] = c.projects[ projectID ]
//c.projectsByKeys.Store(p.ProjectKey, c.projects[ projectID ])
return p, nil
}

View file

@ -2,15 +2,17 @@ package postgres
import (
"errors"
"github.com/jackc/pgx/v4"
"github.com/jackc/pgconn"
"github.com/jackc/pgerrcode"
)
func IsPkeyViolation(err error) bool {
var pgErr *pgconn.PgError
if errors.As(err, &pgErr) && pgErr.Code == pgerrcode.UniqueViolation {
return true
}
return false
}
return errors.As(err, &pgErr) && pgErr.Code == pgerrcode.UniqueViolation
}
func IsNoRowsErr(err error) bool {
return err == pgx.ErrNoRows
}

View file

@ -1,7 +1,6 @@
package postgres
import (
"github.com/jackc/pgx/v4"
. "openreplay/backend/pkg/db/types"
)
@ -14,9 +13,6 @@ func (conn *Conn) GetProjectByKey(projectKey string) (*Project, error) {
`,
projectKey,
).Scan(&p.MaxSessionDuration, &p.SampleRate, &p.ProjectID); err != nil {
if err == pgx.ErrNoRows {
err = nil
}
return nil, err
}
return p, nil
@ -36,9 +32,6 @@ func (conn *Conn) GetProject(projectID uint32) (*Project, error) {
).Scan(&p.ProjectKey,&p.MaxSessionDuration,
&p.Metadata1, &p.Metadata2, &p.Metadata3, &p.Metadata4, &p.Metadata5,
&p.Metadata6, &p.Metadata7, &p.Metadata8, &p.Metadata9, &p.Metadata10); err != nil {
if err == pgx.ErrNoRows {
err = nil
}
return nil, err
}
return p, nil

View file

@ -30,6 +30,14 @@ func ReadBatch(b []byte, callback func(Message)) error {
timestamp = m.Timestamp
isBatchMeta = true
// continue readLoop
case *IOSBatchMeta:
if index != 0 { // Might be several 0-0 BatchMeta in a row without a error though
return errors.New("Batch Meta found at the end of the batch")
}
index = m.FirstIndex
timestamp = int64(m.Timestamp)
isBatchMeta = true
// continue readLoop
case *Timestamp:
timestamp = int64(m.Timestamp) // TODO(?): replace timestamp type to int64 everywhere (including encoding part in tracker)
// No skipping here for making it easy to encode back the same sequence of message

View file

@ -3,9 +3,9 @@ package messages
func IsReplayerType(id uint64) bool {
return 0 == id || 2 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 69 == id || 70 == id || 90 == id || 93 == id || 100 == id || 102 == id || 103 == id || 105 == id
return 0 == id || 2 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 22 == id || 37 == id || 38 == id || 39 == id || 40 == id || 41 == id || 44 == id || 45 == id || 46 == id || 47 == id || 48 == id || 49 == id || 54 == id || 55 == id || 59 == id || 69 == id || 70 == id || 90 == id || 93 == id || 96 == id || 100 == id || 102 == id || 103 == id || 105 == id
}
func IsIOSType(id uint64) bool {
return 90 == id || 91 == id || 92 == id || 93 == id || 94 == id || 95 == id || 96 == id || 97 == id || 98 == id || 99 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 110 == id || 111 == id
return 107 == id || 90 == id || 91 == id || 92 == id || 93 == id || 94 == id || 95 == id || 96 == id || 97 == id || 98 == id || 99 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 110 == id || 111 == id
}

View file

@ -0,0 +1,65 @@
// Auto-generated, do not edit
package messages
func GetTimestamp(message Message) uint64 {
switch msg := message.(type) {
case *IOSBatchMeta:
return msg.Timestamp
case *IOSSessionStart:
return msg.Timestamp
case *IOSSessionEnd:
return msg.Timestamp
case *IOSMetadata:
return msg.Timestamp
case *IOSCustomEvent:
return msg.Timestamp
case *IOSUserID:
return msg.Timestamp
case *IOSUserAnonymousID:
return msg.Timestamp
case *IOSScreenChanges:
return msg.Timestamp
case *IOSCrash:
return msg.Timestamp
case *IOSScreenEnter:
return msg.Timestamp
case *IOSScreenLeave:
return msg.Timestamp
case *IOSClickEvent:
return msg.Timestamp
case *IOSInputEvent:
return msg.Timestamp
case *IOSPerformanceEvent:
return msg.Timestamp
case *IOSLog:
return msg.Timestamp
case *IOSInternalError:
return msg.Timestamp
case *IOSNetworkCall:
return msg.Timestamp
case *IOSIssueEvent:
return msg.Timestamp
}
return uint64(message.Meta().Timestamp)
}

View file

@ -1192,6 +1192,22 @@ p = WriteUint(msg.ID, buf, p)
return buf[:p]
}
type IOSBatchMeta struct {
*meta
Timestamp uint64
Length uint64
FirstIndex uint64
}
func (msg *IOSBatchMeta) Encode() []byte{
buf := make([]byte, 31 )
buf[0] = 107
p := 1
p = WriteUint(msg.Timestamp, buf, p)
p = WriteUint(msg.Length, buf, p)
p = WriteUint(msg.FirstIndex, buf, p)
return buf[:p]
}
type IOSSessionStart struct {
*meta
Timestamp uint64
@ -1305,14 +1321,22 @@ p = WriteString(msg.Value, buf, p)
type IOSScreenChanges struct {
*meta
Timestamp uint64
SkipData []byte
Length uint64
X uint64
Y uint64
Width uint64
Height uint64
}
func (msg *IOSScreenChanges) Encode() []byte{
buf := make([]byte, 21 + len(msg.SkipData))
buf := make([]byte, 61 )
buf[0] = 96
p := 1
p = WriteUint(msg.Timestamp, buf, p)
p = WriteData(msg.SkipData, buf, p)
p = WriteUint(msg.Length, buf, p)
p = WriteUint(msg.X, buf, p)
p = WriteUint(msg.Y, buf, p)
p = WriteUint(msg.Width, buf, p)
p = WriteUint(msg.Height, buf, p)
return buf[:p]
}

View file

@ -49,7 +49,7 @@ func ReadUint(reader io.Reader) (uint64, error) {
}
if b < 0x80 {
if i > 9 || i == 9 && b > 1 {
return x, errors.New("overflow")
return x, errors.New("uint overflow")
}
return x | uint64(b)<<s, nil
}

View file

@ -532,6 +532,13 @@ if msg.Selector, err = ReadString(reader); err != nil { return nil, err }
if msg.ID, err = ReadUint(reader); err != nil { return nil, err }
return msg, nil
case 107:
msg := &IOSBatchMeta{ meta: &meta{ TypeID: 107} }
if msg.Timestamp, err = ReadUint(reader); err != nil { return nil, err }
if msg.Length, err = ReadUint(reader); err != nil { return nil, err }
if msg.FirstIndex, err = ReadUint(reader); err != nil { return nil, err }
return msg, nil
case 90:
msg := &IOSSessionStart{ meta: &meta{ TypeID: 90} }
if msg.Timestamp, err = ReadUint(reader); err != nil { return nil, err }
@ -584,7 +591,11 @@ if msg.Value, err = ReadString(reader); err != nil { return nil, err }
case 96:
msg := &IOSScreenChanges{ meta: &meta{ TypeID: 96} }
if msg.Timestamp, err = ReadUint(reader); err != nil { return nil, err }
if msg.SkipData, err = ReadData(reader); err != nil { return nil, err }
if msg.Length, err = ReadUint(reader); err != nil { return nil, err }
if msg.X, err = ReadUint(reader); err != nil { return nil, err }
if msg.Y, err = ReadUint(reader); err != nil { return nil, err }
if msg.Width, err = ReadUint(reader); err != nil { return nil, err }
if msg.Height, err = ReadUint(reader); err != nil { return nil, err }
return msg, nil
case 97:

View file

@ -8,7 +8,7 @@ import (
)
func getSessionKey(sessionID uint64) string {
// Based on timestamp, changes once per week. Check out utils/flacker for understanding sessionID
// Based on timestamp, changes once per week. Check pkg/flakeid for understanding sessionID
return strconv.FormatUint(sessionID>>50, 10)
}

View file

@ -15,12 +15,11 @@ import (
"openreplay/backend/services/assets/cacher"
)
func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
GROUP_CACHE := env.String("GROUP_CACHE")
TOPIC_TRIGGER := env.String("TOPIC_TRIGGER")
TOPIC_CACHE := env.String("TOPIC_CACHE")
cacher := cacher.NewCacher(
env.String("AWS_REGION"),
@ -31,7 +30,7 @@ func main() {
consumer := queue.NewMessageConsumer(
GROUP_CACHE,
[]string{ TOPIC_TRIGGER },
[]string{ TOPIC_CACHE },
func(sessionID uint64, message messages.Message, e *types.Meta) {
switch msg := message.(type) {
case *messages.AssetCache:

View file

@ -17,7 +17,6 @@ import (
"openreplay/backend/services/db/heuristics"
)
var pg *cache.PGCache
func main() {
@ -32,13 +31,13 @@ func main() {
consumer := queue.NewMessageConsumer(
env.String("GROUP_DB"),
[]string{
//env.String("TOPIC_RAW"),
env.String("TOPIC_RAW_IOS"),
env.String("TOPIC_TRIGGER"),
},
func(sessionID uint64, msg messages.Message, _ *types.Meta) {
if err := insertMessage(sessionID, msg); err != nil {
if !postgres.IsPkeyViolation(err) {
log.Printf("Message Insertion Error %v, Message %v", err, msg)
log.Printf("Message Insertion Error %v, SessionID: %v, Message: %v", err,sessionID, msg)
}
return
}
@ -46,13 +45,13 @@ func main() {
session, err := pg.GetSession(sessionID)
if err != nil {
// Might happen due to the assets-related message TODO: log only if session is necessary for this kind of message
log.Printf("Error on session retrieving from cache: %v, Message %v, sessionID %v", err, msg, sessionID)
log.Printf("Error on session retrieving from cache: %v, SessionID: %v, Message: %v", err, sessionID, msg)
return;
}
err = insertStats(session, msg)
if err != nil {
log.Printf("Stats Insertion Error %v; Session:%v, Message: %v", err, session, msg)
log.Printf("Stats Insertion Error %v; Session: %v, Message: %v", err, session, msg)
}
heurFinder.HandleMessage(session, msg)
@ -60,14 +59,14 @@ func main() {
// TODO: DRY code (carefully with the return statement logic)
if err := insertMessage(sessionID, msg); err != nil {
if !postgres.IsPkeyViolation(err) {
log.Printf("Message Insertion Error %v, Message %v", err, msg)
log.Printf("Message Insertion Error %v; Session: %v, Message %v", err, session, msg)
}
return
}
err = insertStats(session, msg)
if err != nil {
log.Printf("Stats Insertion Error %v", err)
log.Printf("Stats Insertion Error %v; Session: %v, Message %v", err, session, msg)
}
})
},

View file

@ -108,11 +108,11 @@ func (b *builder) buildInputEvent() {
}
func (b *builder) handleMessage(message Message, messageID uint64) {
timestamp := uint64(message.Meta().Timestamp)
if b.timestamp <= timestamp { // unnecessary. TODO: test and remove
timestamp := GetTimestamp(message)
if b.timestamp <= timestamp { // unnecessary? TODO: test and remove
b.timestamp = timestamp
}
// Before the first timestamp.
// Might happen before the first timestamp.
switch msg := message.(type) {
case *SessionStart,
*Metadata,

View file

@ -16,7 +16,6 @@ import (
"openreplay/backend/services/ender/builder"
)
func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
@ -30,7 +29,8 @@ func main() {
consumer := queue.NewMessageConsumer(
GROUP_EVENTS,
[]string{
env.String("TOPIC_RAW"),
env.String("TOPIC_RAW_WEB"),
env.String("TOPIC_RAW_IOS"),
},
func(sessionID uint64, msg messages.Message, meta *types.Meta) {
lastTs = meta.Timestamp

View file

@ -7,7 +7,7 @@ import (
func sendAssetForCache(sessionID uint64, baseURL string, relativeURL string) {
if fullURL, cacheable := assets.GetFullCachableURL(baseURL, relativeURL); cacheable {
producer.Produce(TOPIC_TRIGGER, sessionID, messages.Encode(&messages.AssetCache{
producer.Produce(TOPIC_CACHE, sessionID, messages.Encode(&messages.AssetCache{
URL: fullURL,
}))
}

View file

@ -1,287 +1,41 @@
package main
import (
"encoding/json"
"errors"
"io"
"io/ioutil"
"log"
"math/rand"
"net/http"
"strconv"
"time"
gzip "github.com/klauspost/pgzip"
"openreplay/backend/pkg/db/postgres"
. "openreplay/backend/pkg/messages"
"openreplay/backend/pkg/token"
)
const JSON_SIZE_LIMIT int64 = 1e3 // 1Kb
func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
type request struct {
Token string `json:"token"`
UserUUID *string `json:"userUUID"`
RevID string `json:"revID"`
Timestamp uint64 `json:"timestamp"`
TrackerVersion string `json:"trackerVersion"`
IsSnippet bool `json:"isSnippet"`
DeviceMemory uint64 `json:"deviceMemory"`
JsHeapSizeLimit uint64 `json:"jsHeapSizeLimit"`
ProjectKey *string `json:"projectKey"`
Reset bool `json:"reset"`
}
type response struct {
Timestamp int64 `json:"timestamp"`
Delay int64 `json:"delay"`
Token string `json:"token"`
UserUUID string `json:"userUUID"`
SessionID string `json:"sessionID"`
BeaconSizeLimit int64 `json:"beaconSizeLimit"`
}
startTime := time.Now()
req := &request{}
body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) // what if Body == nil?? // use r.ContentLength to return specific error?
//defer body.Close()
if err := json.NewDecoder(body).Decode(req); err != nil {
responseWithError(w, http.StatusBadRequest, err)
return
}
if req.ProjectKey == nil {
responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"))
return
}
p, err := pgconn.GetProjectByKey(*req.ProjectKey)
if p == nil {
if err == nil {
responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active"))
} else {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
}
return
}
userUUID := getUUID(req.UserUUID)
tokenData, err := tokenizer.Parse(req.Token)
if err != nil || req.Reset { // Starting the new one
dice := byte(rand.Intn(100)) // [0, 100)
if dice >= p.SampleRate {
responseWithError(w, http.StatusForbidden, errors.New("cancel"))
return
}
ua := uaParser.ParseFromHTTPRequest(r)
if ua == nil {
responseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6))
if err != nil {
responseWithError(w, http.StatusInternalServerError, err)
return
}
// TODO: if EXPIRED => send message for two sessions association
expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6}
country := geoIP.ExtractISOCodeFromHTTPRequest(r)
producer.Produce(TOPIC_RAW, tokenData.ID, Encode(&SessionStart{
Timestamp: req.Timestamp,
ProjectID: uint64(p.ProjectID),
TrackerVersion: req.TrackerVersion,
RevID: req.RevID,
UserUUID: userUUID,
UserAgent: r.Header.Get("User-Agent"),
UserOS: ua.OS,
UserOSVersion: ua.OSVersion,
UserBrowser: ua.Browser,
UserBrowserVersion: ua.BrowserVersion,
UserDevice: ua.Device,
UserDeviceType: ua.DeviceType,
UserCountry: country,
UserDeviceMemorySize: req.DeviceMemory,
UserDeviceHeapSize: req.JsHeapSizeLimit,
}))
}
//delayDuration := time.Now().Sub(startTime)
responseWithJSON(w, &response{
//Timestamp: startTime.UnixNano() / 1e6,
//Delay: delayDuration.Nanoseconds() / 1e6,
Token: tokenizer.Compose(*tokenData),
UserUUID: userUUID,
SessionID: strconv.FormatUint(tokenData.ID, 10),
BeaconSizeLimit: BEACON_SIZE_LIMIT,
})
}
func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) {
func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64, topicName string) {
body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT)
//defer body.Close()
var reader io.ReadCloser
var err error
switch r.Header.Get("Content-Encoding") {
case "gzip":
reader, err := gzip.NewReader(body)
log.Println("Gzip", reader)
reader, err = gzip.NewReader(body)
if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: stage-dependent responce
return
}
log.Println("Gzip reader init", reader)
defer reader.Close()
default:
reader = body
}
log.Println("Reader after switch:", reader)
buf, err := ioutil.ReadAll(reader)
if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
return
}
producer.Produce(TOPIC_RAW, sessionID, buf) // What if not able to send?
w.WriteHeader(http.StatusOK)
}
func pushMessagesHandler(w http.ResponseWriter, r *http.Request) {
sessionData, err := tokenizer.ParseFromHTTPRequest(r)
if err != nil {
responseWithError(w, http.StatusUnauthorized, err)
return
}
pushMessages(w, r, sessionData.ID)
}
func pushMessagesSeparatelyHandler(w http.ResponseWriter, r *http.Request) {
sessionData, err := tokenizer.ParseFromHTTPRequest(r)
if err != nil {
responseWithError(w, http.StatusUnauthorized, err)
return
}
body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT)
//defer body.Close()
buf, err := ioutil.ReadAll(body)
if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
return
}
//log.Printf("Sending batch...")
//startTime := time.Now()
// analyticsMessages := make([]Message, 0, 200)
rewritenBuf, err := RewriteBatch(buf, func(msg Message) Message {
switch m := msg.(type) {
case *SetNodeAttributeURLBased:
if m.Name == "src" || m.Name == "href" {
msg = &SetNodeAttribute{
ID: m.ID,
Name: m.Name,
Value: handleURL(sessionData.ID, m.BaseURL, m.Value),
}
} else if m.Name == "style" {
msg = &SetNodeAttribute{
ID: m.ID,
Name: m.Name,
Value: handleCSS(sessionData.ID, m.BaseURL, m.Value),
}
}
case *SetCSSDataURLBased:
msg = &SetCSSData{
ID: m.ID,
Data: handleCSS(sessionData.ID, m.BaseURL, m.Data),
}
case *CSSInsertRuleURLBased:
msg = &CSSInsertRule{
ID: m.ID,
Index: m.Index,
Rule: handleCSS(sessionData.ID, m.BaseURL, m.Rule),
}
}
// switch msg.(type) {
// case *BatchMeta, // TODO: watchout! Meta().Index'es are changed here (though it is still unique for the topic-session pair)
// *SetPageLocation,
// *PageLoadTiming,
// *PageRenderTiming,
// *PerformanceTrack,
// *SetInputTarget,
// *SetInputValue,
// *MouseClick,
// *RawErrorEvent,
// *JSException,
// *ResourceTiming,
// *RawCustomEvent,
// *CustomIssue,
// *Fetch,
// *StateAction,
// *GraphQL,
// *CreateElementNode,
// *CreateTextNode,
// *RemoveNode,
// *CreateDocument,
// *RemoveNodeAttribute,
// *MoveNode,
// *SetCSSData,
// *CSSInsertRule,
// *CSSDeleteRule:
// analyticsMessages = append(analyticsMessages, msg)
//}
return msg
})
if err != nil {
responseWithError(w, http.StatusForbidden, err)
return
}
producer.Produce(TOPIC_RAW, sessionData.ID, rewritenBuf)
//producer.Produce(TOPIC_ANALYTICS, sessionData.ID, WriteBatch(analyticsMessages))
//duration := time.Now().Sub(startTime)
//log.Printf("Sended batch within %v nsec; %v nsek/byte", duration.Nanoseconds(), duration.Nanoseconds()/int64(len(buf)))
w.WriteHeader(http.StatusOK)
}
func notStartedHandler(w http.ResponseWriter, r *http.Request) {
type request struct {
ProjectKey *string `json:"projectKey"`
TrackerVersion string `json:"trackerVersion"`
DoNotTrack bool `json:"DoNotTrack"`
// RevID string `json:"revID"`
}
req := &request{}
body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT)
defer body.Close()
if err := json.NewDecoder(body).Decode(req); err != nil {
responseWithError(w, http.StatusBadRequest, err)
return
}
if req.ProjectKey == nil {
responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"))
return
}
ua := uaParser.ParseFromHTTPRequest(r) // TODO?: insert anyway
if ua == nil {
responseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
country := geoIP.ExtractISOCodeFromHTTPRequest(r)
err := pgconn.InsertUnstartedSession(postgres.UnstartedSession{
ProjectKey: *req.ProjectKey,
TrackerVersion: req.TrackerVersion,
DoNotTrack: req.DoNotTrack,
Platform: "web",
UserAgent: r.Header.Get("User-Agent"),
UserOS: ua.OS,
UserOSVersion: ua.OSVersion,
UserBrowser: ua.Browser,
UserBrowserVersion: ua.BrowserVersion,
UserDevice: ua.Device,
UserDeviceType: ua.DeviceType,
UserCountry: country,
})
if err != nil {
log.Printf("Unable to insert Unstarted Session: %v\n", err)
}
producer.Produce(topicName, sessionID, buf) // What if not able to send?
w.WriteHeader(http.StatusOK)
}

View file

@ -1,145 +1,199 @@
package main
// const FILES_SIZE_LIMIT int64 = 1e8 // 100Mb
import (
"encoding/json"
"net/http"
"errors"
"time"
"math/rand"
"strconv"
"log"
// func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) {
// type request struct {
// // SessionID *string
// EncodedProjectID *uint64 `json:"projectID"`
// TrackerVersion string `json:"trackerVersion"`
// RevID string `json:"revID"`
// UserUUID *string `json:"userUUID"`
// //UserOS string `json"userOS"` //hardcoded 'MacOS'
// UserOSVersion string `json:"userOSVersion"`
// UserDevice string `json:"userDevice"`
// Timestamp uint64 `json:"timestamp"`
// // UserDeviceType uint 0:phone 1:pad 2:tv 3:carPlay 5:mac
// // “performances”:{
// // “activeProcessorCount”:8,
// // “isLowPowerModeEnabled”:0,
// // “orientation”:0,
// // “systemUptime”:585430,
// // “batteryState”:0,
// // “thermalState”:0,
// // “batteryLevel”:0,
// // “processorCount”:8,
// // “physicalMemory”:17179869184
// // },
// }
// type response struct {
// Token string `json:"token"`
// ImagesHashList []string `json:"imagesHashList"`
// UserUUID string `json:"userUUID"`
// SESSION_ID uint64 `json:"SESSION_ID"` ///TEMP
// }
// startTime := time.Now()
// req := &request{}
// body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT)
// //defer body.Close()
// if err := json.NewDecoder(body).Decode(req); err != nil {
// responseWithError(w, http.StatusBadRequest, err)
// return
// }
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/token"
. "openreplay/backend/pkg/messages"
)
// if req.EncodedProjectID == nil {
// responseWithError(w, http.StatusForbidden, errors.New("ProjectID value required"))
// return
// }
// projectID := decodeProjectID(*(req.EncodedProjectID))
// if projectID == 0 {
// responseWithError(w, http.StatusUnprocessableEntity, errors.New("ProjectID value is invalid"))
// return
// }
// p, err := pgconn.GetProject(uint32(projectID))
// if p == nil {
// if err == nil {
// responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active"))
// } else {
// responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
// }
// return
// }
// sessionID, err := flaker.Compose(req.Timestamp)
// if err != nil {
// responseWithError(w, http.StatusInternalServerError, err)
// return
// }
// userUUID := getUUID(req.UserUUID)
// country := geoIP.ExtractISOCodeFromHTTPRequest(r)
// expirationTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
const FILES_SIZE_LIMIT int64 = 1e7 // 10Mb
// imagesHashList, err := s3.GetFrequentlyUsedKeys(*(req.EncodedProjectID)) // TODO: reuse index: ~ frequency * size
// if err != nil {
// responseWithError(w, http.StatusInternalServerError, err)
// return
// }
func startSessionHandlerIOS(w http.ResponseWriter, r *http.Request) {
type request struct {
Token string `json:"token"`
ProjectKey *string `json:"projectKey"`
TrackerVersion string `json:"trackerVersion"`
RevID string `json:"revID"`
UserUUID *string `json:"userUUID"`
//UserOS string `json"userOS"` //hardcoded 'MacOS'
UserOSVersion string `json:"userOSVersion"`
UserDevice string `json:"userDevice"`
Timestamp uint64 `json:"timestamp"`
// UserDeviceType uint 0:phone 1:pad 2:tv 3:carPlay 5:mac
// “performances”:{
// “activeProcessorCount”:8,
// “isLowPowerModeEnabled”:0,
// “orientation”:0,
// “systemUptime”:585430,
// “batteryState”:0,
// “thermalState”:0,
// “batteryLevel”:0,
// “processorCount”:8,
// “physicalMemory”:17179869184
// },
}
type response struct {
Token string `json:"token"`
ImagesHashList []string `json:"imagesHashList"`
UserUUID string `json:"userUUID"`
BeaconSizeLimit int64 `json:"beaconSizeLimit"`
SessionID string `json:"sessionID"`
}
startTime := time.Now()
req := &request{}
body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT)
//defer body.Close()
if err := json.NewDecoder(body).Decode(req); err != nil {
responseWithError(w, http.StatusBadRequest, err)
return
}
// responseWithJSON(w, &response{
// Token: tokenizer.Compose(sessionID, uint64(expirationTime.UnixNano()/1e6)),
// ImagesHashList: imagesHashList,
// UserUUID: userUUID,
// //TEMP:
// SESSION_ID: sessionID,
// })
// producer.Produce(topicRaw, sessionID, messages.Encode(&messages.IOSSessionStart{
// Timestamp: req.Timestamp,
// ProjectID: projectID,
// TrackerVersion: req.TrackerVersion,
// RevID: req.RevID,
// UserUUID: userUUID,
// UserOS: "MacOS",
// UserOSVersion: req.UserOSVersion,
// UserDevice: MapIOSDevice(req.UserDevice),
// UserDeviceType: GetIOSDeviceType(req.UserDevice), // string `json:"userDeviceType"` // From UserDevice; ENUM ?
// UserCountry: country,
// }))
// }
if req.ProjectKey == nil {
responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"))
return
}
p, err := pgconn.GetProjectByKey(*req.ProjectKey)
if err != nil {
if postgres.IsNoRowsErr(err) {
responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or is not active"))
} else {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
}
return
}
userUUID := getUUID(req.UserUUID)
tokenData, err := tokenizer.Parse(req.Token)
if err != nil { // Starting the new one
dice := byte(rand.Intn(100)) // [0, 100)
if dice >= p.SampleRate {
responseWithError(w, http.StatusForbidden, errors.New("cancel"))
return
}
ua := uaParser.ParseFromHTTPRequest(r)
if ua == nil {
responseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6))
if err != nil {
responseWithError(w, http.StatusInternalServerError, err)
return
}
// TODO: if EXPIRED => send message for two sessions association
expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6}
country := geoIP.ExtractISOCodeFromHTTPRequest(r)
// The difference with web is mostly here:
producer.Produce(TOPIC_RAW_IOS, tokenData.ID, Encode(&IOSSessionStart{
Timestamp: req.Timestamp,
ProjectID: uint64(p.ProjectID),
TrackerVersion: req.TrackerVersion,
RevID: req.RevID,
UserUUID: userUUID,
UserOS: "IOS",
UserOSVersion: req.UserOSVersion,
UserDevice: MapIOSDevice(req.UserDevice),
UserDeviceType: GetIOSDeviceType(req.UserDevice),
UserCountry: country,
}))
}
// imagesHashList, err := s3.GetFrequentlyUsedKeys(*(req.EncodedProjectID)) // TODO: reuse index: ~ frequency * size
// if err != nil {
// responseWithError(w, http.StatusInternalServerError, err)
// return
// }
responseWithJSON(w, &response{
// ImagesHashList: imagesHashList,
Token: tokenizer.Compose(*tokenData),
UserUUID: userUUID,
SessionID: strconv.FormatUint(tokenData.ID, 10),
BeaconSizeLimit: BEACON_SIZE_LIMIT,
})
}
// func pushLateMessagesHandler(w http.ResponseWriter, r *http.Request) {
// sessionData, err := tokenizer.ParseFromHTTPRequest(r)
// if err != nil && err != token.EXPIRED {
// responseWithError(w, http.StatusUnauthorized, err)
// return
// }
// // Check timestamps here?
// pushMessages(w, r, sessionData.ID)
// }
func pushMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) {
sessionData, err := tokenizer.ParseFromHTTPRequest(r)
if err != nil {
responseWithError(w, http.StatusUnauthorized, err)
return
}
pushMessages(w, r, sessionData.ID, TOPIC_RAW_IOS)
}
// func iosImagesUploadHandler(w http.ResponseWriter, r *http.Request) {
// r.Body = http.MaxBytesReader(w, r.Body, FILES_SIZE_LIMIT)
// // defer r.Body.Close()
// err := r.ParseMultipartForm(1e5) // 100Kb
// if err == http.ErrNotMultipart || err == http.ErrMissingBoundary {
// responseWithError(w, http.StatusUnsupportedMediaType, err)
// // } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB
// } else if err != nil {
// responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
// }
// if len(r.MultipartForm.Value["projectID"]) == 0 {
// responseWithError(w, http.StatusBadRequest, errors.New("projectID parameter required")) // status for missing/wrong parameter?
// return
// }
// // encodedProjectID, err := strconv.ParseUint(r.MultipartForm.Value["projectID"][0], 10, 64)
// // projectID := decodeProjectID(encodedProjectID)
// // if projectID == 0 || err != nil {
// // responseWithError(w, http.StatusUnprocessableEntity, errors.New("projectID value is incorrect"))
// // return
// // }
// prefix := r.MultipartForm.Value["projectID"][0] + "/" //strconv.FormatUint(uint64(projectID), 10) + "/"
func pushLateMessagesHandlerIOS(w http.ResponseWriter, r *http.Request) {
sessionData, err := tokenizer.ParseFromHTTPRequest(r)
if err != nil && err != token.EXPIRED {
responseWithError(w, http.StatusUnauthorized, err)
return
}
// Check timestamps here?
pushMessages(w, r, sessionData.ID,TOPIC_RAW_IOS)
}
// for _, fileHeaderList := range r.MultipartForm.File {
// for _, fileHeader := range fileHeaderList {
// file, err := fileHeader.Open()
// if err != nil {
// continue // TODO: send server error or accumulate successful files
// }
// key := prefix + fileHeader.Filename // TODO: Malicious image put: use jwt?
// go s3.Upload(file, key, "image/png", false)
// }
// }
// w.WriteHeader(http.StatusOK)
// }
func imagesUploadHandlerIOS(w http.ResponseWriter, r *http.Request) {
log.Printf("recieved imagerequest")
sessionData, err := tokenizer.ParseFromHTTPRequest(r)
if err != nil { // Should accept expired token?
responseWithError(w, http.StatusUnauthorized, err)
return
}
r.Body = http.MaxBytesReader(w, r.Body, FILES_SIZE_LIMIT)
// defer r.Body.Close()
err = r.ParseMultipartForm(1e6) // ~1Mb
if err == http.ErrNotMultipart || err == http.ErrMissingBoundary {
responseWithError(w, http.StatusUnsupportedMediaType, err)
// } else if err == multipart.ErrMessageTooLarge // if non-files part exceeds 10 MB
} else if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
}
if (r.MultipartForm == nil) {
responseWithError(w, http.StatusInternalServerError, errors.New("Multipart not parsed"))
}
if len(r.MultipartForm.Value["projectKey"]) == 0 {
responseWithError(w, http.StatusBadRequest, errors.New("projectKey parameter missing")) // status for missing/wrong parameter?
return
}
prefix := r.MultipartForm.Value["projectKey"][0] + "/" + strconv.FormatUint(sessionData.ID, 10) + "/"
for _, fileHeaderList := range r.MultipartForm.File {
for _, fileHeader := range fileHeaderList {
file, err := fileHeader.Open()
if err != nil {
continue // TODO: send server error or accumulate successful files
}
key := prefix + fileHeader.Filename
log.Printf("Uploading image... %v", key)
go func() { //TODO: mime type from header
if err := s3.Upload(file, key, "image/jpeg", false); err != nil {
log.Printf("Upload ios screen error. %v", err)
}
}()
}
}
w.WriteHeader(http.StatusOK)
}

View file

@ -0,0 +1,249 @@
package main
import (
"encoding/json"
"errors"
"io/ioutil"
"log"
"math/rand"
"net/http"
"strconv"
"time"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/token"
. "openreplay/backend/pkg/messages"
)
func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
type request struct {
Token string `json:"token"`
UserUUID *string `json:"userUUID"`
RevID string `json:"revID"`
Timestamp uint64 `json:"timestamp"`
TrackerVersion string `json:"trackerVersion"`
IsSnippet bool `json:"isSnippet"`
DeviceMemory uint64 `json:"deviceMemory"`
JsHeapSizeLimit uint64 `json:"jsHeapSizeLimit"`
ProjectKey *string `json:"projectKey"`
Reset bool `json:"reset"`
}
type response struct {
Timestamp int64 `json:"timestamp"`
Delay int64 `json:"delay"`
Token string `json:"token"`
UserUUID string `json:"userUUID"`
SessionID string `json:"sessionID"`
BeaconSizeLimit int64 `json:"beaconSizeLimit"`
}
startTime := time.Now()
req := &request{}
body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT) // what if Body == nil?? // use r.ContentLength to return specific error?
//defer body.Close()
if err := json.NewDecoder(body).Decode(req); err != nil {
responseWithError(w, http.StatusBadRequest, err)
return
}
if req.ProjectKey == nil {
responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"))
return
}
p, err := pgconn.GetProjectByKey(*req.ProjectKey)
if err != nil {
if postgres.IsNoRowsErr(err) {
responseWithError(w, http.StatusNotFound, errors.New("Project doesn't exist or capture limit has been reached"))
} else {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
}
return
}
userUUID := getUUID(req.UserUUID)
tokenData, err := tokenizer.Parse(req.Token)
if err != nil || req.Reset { // Starting the new one
dice := byte(rand.Intn(100)) // [0, 100)
if dice >= p.SampleRate {
responseWithError(w, http.StatusForbidden, errors.New("cancel"))
return
}
ua := uaParser.ParseFromHTTPRequest(r)
if ua == nil {
responseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
sessionID, err := flaker.Compose(uint64(startTime.UnixNano() / 1e6))
if err != nil {
responseWithError(w, http.StatusInternalServerError, err)
return
}
// TODO: if EXPIRED => send message for two sessions association
expTime := startTime.Add(time.Duration(p.MaxSessionDuration) * time.Millisecond)
tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6}
country := geoIP.ExtractISOCodeFromHTTPRequest(r)
producer.Produce(TOPIC_RAW_WEB, tokenData.ID, Encode(&SessionStart{
Timestamp: req.Timestamp,
ProjectID: uint64(p.ProjectID),
TrackerVersion: req.TrackerVersion,
RevID: req.RevID,
UserUUID: userUUID,
UserAgent: r.Header.Get("User-Agent"),
UserOS: ua.OS,
UserOSVersion: ua.OSVersion,
UserBrowser: ua.Browser,
UserBrowserVersion: ua.BrowserVersion,
UserDevice: ua.Device,
UserDeviceType: ua.DeviceType,
UserCountry: country,
UserDeviceMemorySize: req.DeviceMemory,
UserDeviceHeapSize: req.JsHeapSizeLimit,
}))
}
//delayDuration := time.Now().Sub(startTime)
responseWithJSON(w, &response{
//Timestamp: startTime.UnixNano() / 1e6,
//Delay: delayDuration.Nanoseconds() / 1e6,
Token: tokenizer.Compose(*tokenData),
UserUUID: userUUID,
SessionID: strconv.FormatUint(tokenData.ID, 10),
BeaconSizeLimit: BEACON_SIZE_LIMIT,
})
}
func pushMessagesHandlerWeb(w http.ResponseWriter, r *http.Request) {
sessionData, err := tokenizer.ParseFromHTTPRequest(r)
if err != nil {
responseWithError(w, http.StatusUnauthorized, err)
return
}
body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT)
//defer body.Close()
buf, err := ioutil.ReadAll(body)
if err != nil {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
return
}
//log.Printf("Sending batch...")
//startTime := time.Now()
// analyticsMessages := make([]Message, 0, 200)
rewritenBuf, err := RewriteBatch(buf, func(msg Message) Message {
switch m := msg.(type) {
case *SetNodeAttributeURLBased:
if m.Name == "src" || m.Name == "href" {
msg = &SetNodeAttribute{
ID: m.ID,
Name: m.Name,
Value: handleURL(sessionData.ID, m.BaseURL, m.Value),
}
} else if m.Name == "style" {
msg = &SetNodeAttribute{
ID: m.ID,
Name: m.Name,
Value: handleCSS(sessionData.ID, m.BaseURL, m.Value),
}
}
case *SetCSSDataURLBased:
msg = &SetCSSData{
ID: m.ID,
Data: handleCSS(sessionData.ID, m.BaseURL, m.Data),
}
case *CSSInsertRuleURLBased:
msg = &CSSInsertRule{
ID: m.ID,
Index: m.Index,
Rule: handleCSS(sessionData.ID, m.BaseURL, m.Rule),
}
}
// switch msg.(type) {
// case *BatchMeta, // TODO: watchout! Meta().Index'es are changed here (though it is still unique for the topic-session pair)
// *SetPageLocation,
// *PageLoadTiming,
// *PageRenderTiming,
// *PerformanceTrack,
// *SetInputTarget,
// *SetInputValue,
// *MouseClick,
// *RawErrorEvent,
// *JSException,
// *ResourceTiming,
// *RawCustomEvent,
// *CustomIssue,
// *Fetch,
// *StateAction,
// *GraphQL,
// *CreateElementNode,
// *CreateTextNode,
// *RemoveNode,
// *CreateDocument,
// *RemoveNodeAttribute,
// *MoveNode,
// *SetCSSData,
// *CSSInsertRule,
// *CSSDeleteRule:
// analyticsMessages = append(analyticsMessages, msg)
//}
return msg
})
if err != nil {
responseWithError(w, http.StatusForbidden, err)
return
}
producer.Produce(TOPIC_RAW_WEB, sessionData.ID, rewritenBuf)
//producer.Produce(TOPIC_ANALYTICS, sessionData.ID, WriteBatch(analyticsMessages))
//duration := time.Now().Sub(startTime)
//log.Printf("Sended batch within %v nsec; %v nsek/byte", duration.Nanoseconds(), duration.Nanoseconds()/int64(len(buf)))
w.WriteHeader(http.StatusOK)
}
func notStartedHandlerWeb(w http.ResponseWriter, r *http.Request) {
type request struct {
ProjectKey *string `json:"projectKey"`
TrackerVersion string `json:"trackerVersion"`
DoNotTrack bool `json:"DoNotTrack"`
// RevID string `json:"revID"`
}
req := &request{}
body := http.MaxBytesReader(w, r.Body, JSON_SIZE_LIMIT)
defer body.Close()
if err := json.NewDecoder(body).Decode(req); err != nil {
responseWithError(w, http.StatusBadRequest, err)
return
}
if req.ProjectKey == nil {
responseWithError(w, http.StatusForbidden, errors.New("ProjectKey value required"))
return
}
ua := uaParser.ParseFromHTTPRequest(r) // TODO?: insert anyway
if ua == nil {
responseWithError(w, http.StatusForbidden, errors.New("browser not recognized"))
return
}
country := geoIP.ExtractISOCodeFromHTTPRequest(r)
err := pgconn.InsertUnstartedSession(postgres.UnstartedSession{
ProjectKey: *req.ProjectKey,
TrackerVersion: req.TrackerVersion,
DoNotTrack: req.DoNotTrack,
Platform: "web",
UserAgent: r.Header.Get("User-Agent"),
UserOS: ua.OS,
UserOSVersion: ua.OSVersion,
UserBrowser: ua.Browser,
UserBrowserVersion: ua.BrowserVersion,
UserDevice: ua.Device,
UserDeviceType: ua.DeviceType,
UserCountry: country,
})
if err != nil {
log.Printf("Unable to insert Unstarted Session: %v\n", err)
}
w.WriteHeader(http.StatusOK)
}

View file

@ -34,10 +34,11 @@ var geoIP *geoip.GeoIP
var tokenizer *token.Tokenizer
var s3 *storage.S3
var TOPIC_RAW string
var TOPIC_RAW_WEB string
var TOPIC_RAW_IOS string
var TOPIC_CACHE string
var TOPIC_TRIGGER string
var TOPIC_ANALYTICS string
// var kafkaTopicEvents string
//var TOPIC_ANALYTICS string
var CACHE_ASSESTS bool
var BEACON_SIZE_LIMIT int64
@ -46,13 +47,15 @@ func main() {
producer = queue.NewProducer()
defer producer.Close(15000)
TOPIC_RAW = env.String("TOPIC_RAW")
TOPIC_RAW_WEB = env.String("TOPIC_RAW_WEB")
TOPIC_RAW_IOS = env.String("TOPIC_RAW_IOS")
TOPIC_CACHE = env.String("TOPIC_CACHE")
TOPIC_TRIGGER = env.String("TOPIC_TRIGGER")
TOPIC_ANALYTICS = env.String("TOPIC_ANALYTICS")
//TOPIC_ANALYTICS = env.String("TOPIC_ANALYTICS")
rewriter = assets.NewRewriter(env.String("ASSETS_ORIGIN"))
pgconn = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000 * 60 * 20)
defer pgconn.Close()
//s3 = storage.NewS3(env.String("S3_BUCKET_IMAGES_IOS"), env.String("AWS_REGION"))
s3 = storage.NewS3(env.String("AWS_REGION"), env.String("S3_BUCKET_IOS_IMAGES"))
tokenizer = token.NewTokenizer(env.String("TOKEN_SECRET"))
uaParser = uaparser.NewUAParser(env.String("UAPARSER_FILE"))
geoIP = geoip.NewGeoIP(env.String("MAXMINDDB_FILE"))
@ -76,13 +79,16 @@ func main() {
return
}
log.Printf("Request: %v - %v ", r.Method, r.URL.Path)
switch r.URL.Path {
case "/":
w.WriteHeader(http.StatusOK)
case "/v1/web/not-started":
switch r.Method {
case http.MethodPost:
notStartedHandler(w, r)
notStartedHandlerWeb(w, r)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
@ -96,38 +102,38 @@ func main() {
case "/v1/web/i":
switch r.Method {
case http.MethodPost:
pushMessagesSeparatelyHandler(w, r)
pushMessagesHandlerWeb(w, r)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
case "/v1/ios/start":
switch r.Method {
case http.MethodPost:
startSessionHandlerIOS(w, r)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
case "/v1/ios/i":
switch r.Method {
case http.MethodPost:
pushMessagesHandlerIOS(w, r)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
case "/v1/ios/late":
switch r.Method {
case http.MethodPost:
pushLateMessagesHandlerIOS(w, r)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
case "/v1/ios/images":
switch r.Method {
case http.MethodPost:
imagesUploadHandlerIOS(w, r)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
// case "/v1/ios/start":
// switch r.Method {
// case http.MethodPost:
// startSessionHandlerIOS(w, r)
// default:
// w.WriteHeader(http.StatusMethodNotAllowed)
// }
// case "/v1/ios/append":
// switch r.Method {
// case http.MethodPost:
// pushMessagesHandler(w, r)
// default:
// w.WriteHeader(http.StatusMethodNotAllowed)
// }
// case "/v1/ios/late":
// switch r.Method {
// case http.MethodPost:
// pushLateMessagesHandler(w, r)
// default:
// w.WriteHeader(http.StatusMethodNotAllowed)
// }
// case "/v1/ios/images":
// switch r.Method {
// case http.MethodPost:
// iosImagesUploadHandler(w, r)
// default:
// w.WriteHeader(http.StatusMethodNotAllowed)
// }
default:
w.WriteHeader(http.StatusNotFound)
}

View file

@ -11,6 +11,7 @@ func responseWithJSON(w http.ResponseWriter, res interface{}) {
if err != nil {
log.Println(err)
}
w.Header().Set("Content-Type", "application/json")
w.Write(body)
}

View file

@ -19,7 +19,7 @@ import (
func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
TOPIC_RAW := env.String("TOPIC_RAW")
TOPIC_RAW_WEB := env.String("TOPIC_RAW_WEB")
POSTGRES_STRING := env.String("POSTGRES_STRING")
pg := postgres.NewConn(POSTGRES_STRING)
@ -80,7 +80,7 @@ func main() {
sessionID = sessData.ID
}
// TODO: send to ready-events topic. Otherwise it have to go through the events worker.
producer.Produce(TOPIC_RAW, sessionID, messages.Encode(event.RawErrorEvent))
producer.Produce(TOPIC_RAW_WEB, sessionID, messages.Encode(event.RawErrorEvent))
case err := <-manager.Errors:
log.Printf("Integration error: %v\n", err)
case i := <-manager.RequestDataUpdates:

View file

@ -10,9 +10,9 @@ import (
"syscall"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/queue"
"openreplay/backend/pkg/queue/types"
. "openreplay/backend/pkg/messages"
)
@ -27,16 +27,17 @@ func main() {
consumer := queue.NewMessageConsumer(
env.String("GROUP_SINK"),
[]string{
env.String("TOPIC_RAW"),
env.String("TOPIC_RAW_WEB"),
env.String("TOPIC_RAW_IOS"),
},
func(sessionID uint64, message messages.Message, _ *types.Meta) {
//typeID, err := messages.GetMessageTypeID(value)
func(sessionID uint64, message Message, _ *types.Meta) {
//typeID, err := GetMessageTypeID(value)
// if err != nil {
// log.Printf("Message type decoding error: %v", err)
// return
// }
typeID := message.Meta().TypeID
if !messages.IsReplayerType(typeID) {
if !IsReplayerType(typeID) {
return
}
@ -44,7 +45,7 @@ func main() {
value := message.Encode()
var data []byte
if messages.IsIOSType(typeID) {
if IsIOSType(typeID) {
data = value
} else {
data = make([]byte, len(value)+8)

View file

@ -55,15 +55,20 @@
"S3_HOST": "",
"S3_KEY": "",
"S3_SECRET": "",
"version_number": "1.0.0",
"LICENSE_KEY": "",
"SAML2_MD_URL": "",
"idp_entityId": "",
"idp_sso_url": "",
"idp_x509cert": "",
"idp_sls_url": "",
"idp_name": "",
"sso_exp_delta_seconds": "172800",
"sso_landing": "/login?jwt=%s",
"invitation_link": "/api/users/invitation?token=%s",
"change_password_link": "/reset-password?invitation=%s&&pass=%s"
"change_password_link": "/reset-password?invitation=%s&&pass=%s",
"iosBucket": "openreplay-ios-images",
"version_number": "1.3.6",
"assist_secret": ""
},
"lambda_timeout": 150,
"lambda_memory_size": 400,

11
ee/api/.gitignore vendored
View file

@ -204,6 +204,8 @@ Pipfile
/chalicelib/core/log_tool_sentry.py
/chalicelib/core/log_tool_stackdriver.py
/chalicelib/core/log_tool_sumologic.py
/chalicelib/core/mobile.py
/chalicelib/core/sessions.py
/chalicelib/core/sessions_assignments.py
/chalicelib/core/sessions_favorite_viewed.py
/chalicelib/core/sessions_metas.py
@ -233,3 +235,12 @@ Pipfile
/chalicelib/utils/smtp.py
/chalicelib/utils/strings.py
/chalicelib/utils/TimeUTC.py
/chalicelib/core/heatmaps.py
/entrypoint.bundle.sh
/entrypoint.sh
/env_handler.py
/chalicelib/blueprints/app/v1_api.py
/build.sh
/chalicelib/core/assist.py
/chalicelib/blueprints/app/__init__.py
/Dockerfile.bundle

View file

@ -0,0 +1,10 @@
sudo yum update
sudo yum install yum-utils
sudo rpm --import https://repo.clickhouse.com/CLICKHOUSE-KEY.GPG
sudo yum-config-manager --add-repo https://repo.clickhouse.com/rpm/stable/x86_64
sudo yum update
sudo service clickhouse-server restart
#later mus use in clickhouse-client:
#SET allow_experimental_window_functions = 1;

View file

@ -6,13 +6,13 @@ from chalicelib import _overrides
from chalicelib.blueprints import bp_authorizers
from chalicelib.blueprints import bp_core, bp_core_crons
from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons
from chalicelib.blueprints import bp_ee, bp_ee_crons, bp_saml
from chalicelib.blueprints.app import v1_api, v1_api_ee
from chalicelib.blueprints.subs import bp_dashboard
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.helper import environ
from chalicelib.blueprints import bp_ee, bp_ee_crons, bp_saml
app = Chalice(app_name='parrot')
app.debug = not helper.is_production() or helper.is_local()
@ -121,7 +121,8 @@ app.register_blueprint(bp_core_crons.app)
app.register_blueprint(bp_core_dynamic.app)
app.register_blueprint(bp_core_dynamic_crons.app)
app.register_blueprint(bp_dashboard.app)
app.register_blueprint(v1_api.app)
app.register_blueprint(v1_api_ee.app)
# Enterprise
app.register_blueprint(bp_ee.app)
app.register_blueprint(bp_ee_crons.app)

View file

@ -0,0 +1,16 @@
from chalice import Blueprint
from chalicelib import _overrides
from chalicelib.blueprints import bp_authorizers
from chalicelib.utils import assist_helper
app = Blueprint(__name__)
_overrides.chalice_app(app)
@app.route('/v1/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer)
def get_assist_credentials(context):
credentials = assist_helper.get_temporary_credentials()
if "errors" in credentials:
return credentials
return {"data": credentials}

View file

@ -1,10 +1,11 @@
from chalice import Blueprint, Response
from chalicelib import _overrides
from chalicelib.core import assist
from chalicelib.core import boarding
from chalicelib.core import errors
from chalicelib.core import license
from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integrations_manager, assist
from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integrations_manager
from chalicelib.core import notifications
from chalicelib.core import projects
from chalicelib.core import signup
@ -25,9 +26,7 @@ def login():
data = app.current_request.json_body
if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]):
return {"errors": ["Invalid captcha."]}
r = users.authenticate(data['email'], data['password'],
for_plugin=False
)
r = users.authenticate(data['email'], data['password'], for_plugin=False)
if r is None:
return Response(status_code=401, body={
'errors': ['Youve entered invalid Email or Password.']
@ -46,6 +45,9 @@ def login():
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True, version=True)
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
return {
'jwt': r.pop('jwt'),
'data': {
@ -142,7 +144,10 @@ def put_client(context):
@app.route('/signup', methods=['GET'], authorizer=None)
def get_all_signup():
return {"data": tenants.tenants_exists()}
return {"data": {"tenants": tenants.tenants_exists(),
"sso": SAML2_helper.is_saml2_available(),
"ssoProvider": SAML2_helper.get_saml2_provider(),
"edition": helper.get_edition()}}
@app.route('/signup', methods=['POST', 'PUT'], authorizer=None)
@ -347,8 +352,8 @@ def get_members(context):
@app.route('/client/members', methods=['PUT', 'POST'])
def add_member(context):
if SAML2_helper.is_saml2_available():
return {"errors": ["please use your SSO server to add teammates"]}
# if SAML2_helper.is_saml2_available():
# return {"errors": ["please use your SSO server to add teammates"]}
data = app.current_request.json_body
return users.create_member(tenant_id=context['tenantId'], user_id=context['userId'], data=data)

View file

@ -1,9 +1,58 @@
from chalice import Blueprint
from chalicelib import _overrides
from chalicelib.core import roles
from chalicelib.core import unlock
from chalicelib.utils import assist_helper
app = Blueprint(__name__)
_overrides.chalice_app(app)
unlock.check()
@app.route('/client/roles', methods=['GET'])
def get_roles(context):
return {
'data': roles.get_roles(tenant_id=context["tenantId"])
}
@app.route('/client/roles', methods=['POST', 'PUT'])
def add_role(context):
data = app.current_request.json_body
data = roles.create(tenant_id=context['tenantId'], user_id=context['userId'], name=data["name"],
description=data.get("description"), permissions=data["permissions"])
if "errors" in data:
return data
return {
'data': data
}
@app.route('/client/roles/{roleId}', methods=['POST', 'PUT'])
def edit_role(roleId, context):
data = app.current_request.json_body
data = roles.update(tenant_id=context['tenantId'], user_id=context['userId'], role_id=roleId, changes=data)
if "errors" in data:
return data
return {
'data': data
}
@app.route('/client/roles/{roleId}', methods=['DELETE'])
def delete_role(roleId, context):
data = roles.delete(tenant_id=context['tenantId'], user_id=context["userId"], role_id=roleId)
if "errors" in data:
return data
return {
'data': data
}
@app.route('/assist/credentials', methods=['GET'])
def get_assist_credentials(context):
return {"data": assist_helper.get_full_config()}

View file

@ -1,6 +1,7 @@
from chalice import Blueprint
from chalicelib import _overrides
from chalicelib.utils import SAML2_helper
from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth
app = Blueprint(__name__)
@ -9,30 +10,27 @@ _overrides.chalice_app(app)
from chalicelib.utils.helper import environ
from onelogin.saml2.auth import OneLogin_Saml2_Logout_Request
from onelogin.saml2.utils import OneLogin_Saml2_Utils
from chalice import Response
from chalicelib.core import users, tenants
from chalicelib.core import users, tenants, roles
@app.route("/saml2", methods=['GET'], authorizer=None)
@app.route('/sso/saml2', methods=['GET'], authorizer=None)
def start_sso():
app.current_request.path = ''
req = prepare_request(request=app.current_request)
auth = init_saml_auth(req)
sso_built_url = auth.login()
return Response(
# status_code=301,
status_code=307,
body='',
headers={'Location': sso_built_url, 'Content-Type': 'text/plain'})
@app.route('/saml2/acs', methods=['POST'], content_types=['application/x-www-form-urlencoded'], authorizer=None)
@app.route('/sso/saml2/acs', methods=['POST'], content_types=['application/x-www-form-urlencoded'], authorizer=None)
def process_sso_assertion():
req = prepare_request(request=app.current_request)
session = req["cookie"]["session"]
request = req['request']
auth = init_saml_auth(req)
request_id = None
@ -46,92 +44,64 @@ def process_sso_assertion():
if 'AuthNRequestID' in session:
del session['AuthNRequestID']
user_data = auth.get_attributes()
# session['samlUserdata'] = user_data
# session['samlNameId'] = auth.get_nameid()
# session['samlNameIdFormat'] = auth.get_nameid_format()
# session['samlNameIdNameQualifier'] = auth.get_nameid_nq()
# session['samlNameIdSPNameQualifier'] = auth.get_nameid_spnq()
# session['samlSessionIndex'] = auth.get_session_index()
# session['samlSessionExpiration'] = auth.get_session_expiration()
# print('>>>>')
# print(session)
self_url = OneLogin_Saml2_Utils.get_self_url(req)
if 'RelayState' in request.form and self_url != request.form['RelayState']:
print("====>redirect")
return Response(
status_code=307,
body='',
headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'})
elif auth.get_settings().is_debug_active():
error_reason = auth.get_last_error_reason()
return {"errors": [error_reason]}
email = auth.get_nameid()
print("received nameId:")
print(email)
existing = users.get_by_email_only(auth.get_nameid())
internal_id = next(iter(user_data.get("internalId", [])), None)
if len(existing) == 0 or existing[0].get("origin") != 'saml':
tenant_key = user_data.get("tenantKey", [])
if len(tenant_key) == 0:
print("tenantKey not present in assertion")
return Response(
status_code=307,
body={"errors": ["tenantKey not present in assertion"]},
headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'})
else:
t = tenants.get_by_tenant_key(tenant_key[0])
if t is None:
return Response(
status_code=307,
body={"errors": ["Unknown tenantKey"]},
headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'})
if len(existing) == 0:
print("== new user ==")
users.create_sso_user(tenant_id=t['tenantId'], email=email, admin=True, origin='saml',
name=" ".join(user_data.get("firstName", []) + user_data.get("lastName", [])),
internal_id=internal_id)
else:
existing = existing[0]
if existing.get("origin") != 'saml':
print("== migrating user to SAML ==")
users.update(tenant_id=t['tenantId'], user_id=existing["id"],
changes={"origin": 'saml', "internal_id": internal_id})
return users.authenticate_sso(email=email, internal_id=internal_id, exp=auth.get_session_expiration())
@app.route('/saml2/slo', methods=['GET'])
def process_slo_request(context):
req = prepare_request(request=app.current_request)
session = req["cookie"]["session"]
request = req['request']
auth = init_saml_auth(req)
name_id = session_index = name_id_format = name_id_nq = name_id_spnq = None
if 'samlNameId' in session:
name_id = session['samlNameId']
if 'samlSessionIndex' in session:
session_index = session['samlSessionIndex']
if 'samlNameIdFormat' in session:
name_id_format = session['samlNameIdFormat']
if 'samlNameIdNameQualifier' in session:
name_id_nq = session['samlNameIdNameQualifier']
if 'samlNameIdSPNameQualifier' in session:
name_id_spnq = session['samlNameIdSPNameQualifier']
users.change_jwt_iat(context["userId"])
tenant_key = user_data.get("tenantKey", [])
if len(tenant_key) == 0:
print("tenantKey not present in assertion, please check your SP-assertion-configuration")
return {"errors": ["tenantKey not present in assertion, please check your SP-assertion-configuration"]}
else:
t = tenants.get_by_tenant_key(tenant_key[0])
if t is None:
print("invalid tenantKey, please copy the correct value from Preferences > Account")
return {"errors": ["invalid tenantKey, please copy the correct value from Preferences > Account"]}
print(user_data)
role_name = user_data.get("role", [])
if len(role_name) == 0:
print("No role specified, setting role to member")
role_name = ["member"]
role_name = role_name[0]
role = roles.get_role_by_name(tenant_id=t['tenantId'], name=role_name)
if role is None:
return {"errors": [f"role {role_name} not found, please create it in openreplay first"]}
if existing is None:
print("== new user ==")
users.create_sso_user(tenant_id=t['tenantId'], email=email, admin=True,
origin=SAML2_helper.get_saml2_provider(),
name=" ".join(user_data.get("firstName", []) + user_data.get("lastName", [])),
internal_id=internal_id, role_id=role["roleId"])
else:
if t['tenantId'] != existing["tenantId"]:
print("user exists for a different tenant")
return {"errors": ["user exists for a different tenant"]}
if existing.get("origin") is None:
print(f"== migrating user to {SAML2_helper.get_saml2_provider()} ==")
users.update(tenant_id=t['tenantId'], user_id=existing["id"],
changes={"origin": SAML2_helper.get_saml2_provider(), "internal_id": internal_id})
expiration = auth.get_session_expiration()
expiration = expiration if expiration is not None and expiration > 10 * 60 \
else int(environ.get("sso_exp_delta_seconds", 24 * 60 * 60))
jwt = users.authenticate_sso(email=email, internal_id=internal_id, exp=expiration)
if jwt is None:
return {"errors": ["null JWT"]}
return Response(
status_code=307,
status_code=302,
body='',
headers={'Location': auth.logout(name_id=name_id, session_index=session_index, nq=name_id_nq,
name_id_format=name_id_format,
spnq=name_id_spnq), 'Content-Type': 'text/plain'})
headers={'Location': SAML2_helper.get_landing_URL(jwt), 'Content-Type': 'text/plain'})
@app.route('/saml2/sls', methods=['GET'], authorizer=None)
@app.route('/sso/saml2/sls', methods=['GET'], authorizer=None)
def process_sls_assertion():
req = prepare_request(request=app.current_request)
session = req["cookie"]["session"]
request = req['request']
auth = init_saml_auth(req)
request_id = None
if 'LogoutRequestID' in session:
@ -169,7 +139,7 @@ def process_sls_assertion():
headers={'Location': environ["SITE_URL"], 'Content-Type': 'text/plain'})
@app.route('/saml2/metadata', methods=['GET'], authorizer=None)
@app.route('/sso/saml2/metadata', methods=['GET'], authorizer=None)
def saml2_metadata():
req = prepare_request(request=app.current_request)
auth = init_saml_auth(req)

View file

@ -1,10 +1,10 @@
from chalicelib.utils.helper import environ
import jwt
from chalicelib.utils import helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.core import tenants
from chalicelib.core import users
from chalicelib.utils import helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import environ
def jwt_authorizer(token):
@ -44,7 +44,7 @@ def generate_jwt(id, tenant_id, iat, aud, exp=None):
"userId": id,
"tenantId": tenant_id,
"exp": iat // 1000 + int(environ["jwt_exp_delta_seconds"]) + TimeUTC.get_utc_offset() // 1000 \
if exp is None else exp,
if exp is None else exp + TimeUTC.get_utc_offset() // 1000,
"iss": environ["jwt_issuer"],
"iat": iat // 1000,
"aud": aud

View file

@ -321,7 +321,7 @@ def get_details_chart(project_id, error_id, user_id, **data):
"error_id": error_id}
main_ch_query = f"""\
SELECT error_id,
SELECT browser_details.error_id AS error_id,
browsers_partition,
os_partition,
device_partition,
@ -516,7 +516,7 @@ def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=F
FROM errors
WHERE {" AND ".join(ch_sub_query)}
GROUP BY error_id, timestamp
ORDER BY timestamp)
ORDER BY timestamp) AS sub_table
GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;"""
# print("--------------------")

View file

@ -1,36 +1,26 @@
from chalicelib.utils import email_helper, captcha, helper
from chalicelib.core import users
from chalicelib.utils import email_helper, captcha, helper
def step1(data):
print("====================== reset password 1 ===============")
def reset(data):
print("====================== reset password ===============")
print(data)
if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]):
print("error: Invalid captcha.")
return {"errors": ["Invalid captcha."]}
if "email" not in data:
return {"errors": ["email not found in body"]}
a_users = users.get_by_email_only(data["email"])
if len(a_users) > 1:
print(f"multiple users found for [{data['email']}] please contact our support")
return {"errors": ["multiple users, please contact our support"]}
elif len(a_users) == 1:
a_users = a_users[0]
invitation_link = users.generate_new_invitation(user_id=a_users["id"])
if not helper.has_smtp():
return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]}
a_user = users.get_by_email_only(data["email"])
if a_user is not None:
# ---FOR SSO
if a_user.get("origin") is not None and a_user.get("hasPassword", False) is False:
return {"errors": ["Please use your SSO to login"]}
# ----------
invitation_link = users.generate_new_invitation(user_id=a_user["id"])
email_helper.send_forgot_password(recipient=data["email"], invitation_link=invitation_link)
else:
print(f"invalid email address [{data['email']}]")
return {"errors": ["invalid email address"]}
return {"data": {"state": "success"}}
# def step2(data):
# print("====================== change password 2 ===============")
# user = users.get_by_email_reset(data["email"], data["code"])
# if not user:
# print("error: wrong email or reset code")
# return {"errors": ["wrong email or reset code"]}
# users.update(tenant_id=user["tenantId"], user_id=user["id"],
# changes={"token": None, "password": data["password"], "generatedPassword": False,
# "verifiedEmail": True})
# return {"data": {"state": "success"}}

View file

@ -0,0 +1,122 @@
from chalicelib.core import users
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
def update(tenant_id, user_id, role_id, changes):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
if len(changes.keys()) == 0:
return None
ALLOW_EDIT = ["name", "description", "permissions"]
sub_query = []
for key in changes.keys():
if key in ALLOW_EDIT:
sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""\
UPDATE public.roles
SET {" ,".join(sub_query)}
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
AND deleted_at ISNULL
AND protected = FALSE
RETURNING *;""",
{"tenant_id": tenant_id, "role_id": role_id, **changes})
)
row = cur.fetchone()
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, name, description, permissions):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""INSERT INTO roles(tenant_id, name, description, permissions)
VALUES (%(tenant_id)s, %(name)s, %(description)s, %(permissions)s::text[])
RETURNING *;""",
{"tenant_id": tenant_id, "name": name, "description": description, "permissions": permissions})
)
row = cur.fetchone()
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
return helper.dict_to_camel_case(row)
def get_roles(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""SELECT *
FROM public.roles
where tenant_id =%(tenant_id)s
AND deleted_at IS NULL
ORDER BY role_id;""",
{"tenant_id": tenant_id})
)
rows = cur.fetchall()
for r in rows:
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
return helper.list_to_camel_case(rows)
def get_role_by_name(tenant_id, name):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""SELECT *
FROM public.roles
where tenant_id =%(tenant_id)s
AND deleted_at IS NULL
AND name ILIKE %(name)s
;""",
{"tenant_id": tenant_id, "name": name})
)
row = cur.fetchone()
if row is not None:
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
return helper.dict_to_camel_case(row)
def delete(tenant_id, user_id, role_id):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""SELECT 1
FROM public.roles
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
AND protected = TRUE
LIMIT 1;""",
{"tenant_id": tenant_id, "role_id": role_id})
)
if cur.fetchone() is not None:
return {"errors": ["this role is protected"]}
cur.execute(
cur.mogrify("""SELECT 1
FROM public.users
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
LIMIT 1;""",
{"tenant_id": tenant_id, "role_id": role_id})
)
if cur.fetchone() is not None:
return {"errors": ["this role is already attached to other user(s)"]}
cur.execute(
cur.mogrify("""UPDATE public.roles
SET deleted_at = timezone('utc'::text, now())
WHERE role_id = %(role_id)s
AND tenant_id = %(tenant_id)s
AND protected = FALSE;""",
{"tenant_id": tenant_id, "role_id": role_id})
)
return get_roles(tenant_id=tenant_id)

View file

@ -68,10 +68,16 @@ def create_step1(data):
VALUES (%(companyName)s, %(versionNumber)s, 'ee')
RETURNING tenant_id, api_key
),
r AS (
INSERT INTO public.roles(tenant_id, name, description, permissions, protected)
VALUES ((SELECT tenant_id FROM t), 'Owner', 'Owner', '{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], TRUE),
((SELECT tenant_id FROM t), 'Member', 'Member', '{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], FALSE)
RETURNING *
),
u AS (
INSERT INTO public.users (tenant_id, email, role, name, data)
VALUES ((SELECT tenant_id FROM t), %(email)s, 'owner', %(fullname)s,%(data)s)
RETURNING user_id,email,role,name
INSERT INTO public.users (tenant_id, email, role, name, data, role_id)
VALUES ((SELECT tenant_id FROM t), %(email)s, 'owner', %(fullname)s,%(data)s, (SELECT role_id FROM r WHERE name ='Owner'))
RETURNING user_id,email,role,name,role_id
),
au AS (
INSERT INTO public.basic_authentication (user_id, password, generated_password)

View file

@ -3,7 +3,7 @@ import secrets
from chalicelib.core import authorizers, metadata, projects, assist
from chalicelib.core import tenants
from chalicelib.utils import dev
from chalicelib.utils import dev, SAML2_helper
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
@ -14,13 +14,13 @@ def __generate_invitation_token():
return secrets.token_urlsafe(64)
def create_new_member(tenant_id, email, invitation_token, admin, name, owner=False):
def create_new_member(tenant_id, email, invitation_token, admin, name, owner=False, role_id=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
WITH u AS (
INSERT INTO public.users (tenant_id, email, role, name, data)
VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s)
RETURNING user_id,email,role,name,appearance
INSERT INTO public.users (tenant_id, email, role, name, data, role_id)
VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(role_id)s)
RETURNING user_id,email,role,name,appearance, role_id
),
au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at)
VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now()))
@ -35,19 +35,20 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal
(CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member,
au.invitation_token
au.invitation_token,
u.role_id
FROM u,au;""",
{"tenantId": tenant_id, "email": email,
"role": "owner" if owner else "admin" if admin else "member", "name": name,
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}),
"invitation_token": invitation_token})
"invitation_token": invitation_token, "role_id": role_id})
cur.execute(
query
)
return helper.dict_to_camel_case(cur.fetchone())
def restore_member(tenant_id, user_id, email, invitation_token, admin, name, owner=False):
def restore_member(tenant_id, user_id, email, invitation_token, admin, name, owner=False, role_id=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
UPDATE public.users
@ -56,7 +57,8 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own
deleted_at= NULL,
created_at = timezone('utc'::text, now()),
tenant_id= %(tenant_id)s,
api_key= generate_api_key(20)
api_key= generate_api_key(20),
role_id= %(role_id)s
WHERE user_id=%(user_id)s
RETURNING user_id AS id,
email,
@ -65,9 +67,11 @@ def restore_member(tenant_id, user_id, email, invitation_token, admin, name, own
TRUE AS change_password,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member;""",
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
role_id;""",
{"tenant_id": tenant_id, "user_id": user_id, "email": email,
"role": "owner" if owner else "admin" if admin else "member", "name": name})
"role": "owner" if owner else "admin" if admin else "member", "name": name,
"role_id": role_id})
cur.execute(
query
)
@ -157,7 +161,8 @@ def update(tenant_id, user_id, changes):
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance;""",
users.appearance,
users.role_id;""",
{"tenant_id": tenant_id, "user_id": user_id, **changes})
)
if len(sub_query_bauth) > 0:
@ -177,7 +182,8 @@ def update(tenant_id, user_id, changes):
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance;""",
users.appearance,
users.role_id;""",
{"tenant_id": tenant_id, "user_id": user_id, **changes})
)
@ -198,14 +204,15 @@ def create_member(tenant_id, user_id, data):
return {"errors": ["invalid user name"]}
if name is None:
name = data["email"]
role_id = data.get("roleId")
invitation_token = __generate_invitation_token()
user = get_deleted_user_by_email(email=data["email"])
if user is not None:
new_member = restore_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token,
admin=data.get("admin", False), name=name, user_id=user["userId"])
admin=data.get("admin", False), name=name, user_id=user["userId"], role_id=role_id)
else:
new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token,
admin=data.get("admin", False), name=name)
admin=data.get("admin", False), name=name, role_id=role_id)
new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken"))
helper.async_post(environ['email_basic'] % 'member_invitation',
{
@ -243,19 +250,25 @@ def get(user_id, tenant_id):
users.user_id AS id,
email,
role,
name,
users.name,
basic_authentication.generated_password,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
appearance,
api_key,
origin
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
origin,
role_id,
roles.name AS role_name,
roles.permissions,
basic_authentication.password IS NOT NULL AS has_password
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
LEFT JOIN public.roles USING (role_id)
WHERE
users.user_id = %(userId)s
AND tenant_id = %(tenantId)s
AND deleted_at IS NULL
AND users.tenant_id = %(tenantId)s
AND users.deleted_at IS NULL
AND (roles.role_id IS NULL OR roles.deleted_at IS NULL AND roles.tenant_id = %(tenantId)s)
LIMIT 1;""",
{"userId": user_id, "tenantId": tenant_id})
)
@ -280,7 +293,7 @@ def generate_new_api_key(user_id):
def edit(user_id_to_update, tenant_id, changes, editor_id):
ALLOW_EDIT = ["name", "email", "admin", "appearance"]
ALLOW_EDIT = ["name", "email", "admin", "appearance", "roleId"]
user = get(user_id=user_id_to_update, tenant_id=tenant_id)
if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]:
admin = get(tenant_id=tenant_id, user_id=editor_id)
@ -324,15 +337,16 @@ def get_by_email_only(email):
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
origin
origin,
basic_authentication.password IS NOT NULL AS has_password
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
WHERE
users.email = %(email)s
AND users.deleted_at IS NULL;""",
WHERE users.email = %(email)s
AND users.deleted_at IS NULL
LIMIT 1;""",
{"email": email})
)
r = cur.fetchall()
return helper.list_to_camel_case(r)
r = cur.fetchone()
return helper.dict_to_camel_case(r)
def get_by_email_reset(email, reset_token):
@ -375,9 +389,13 @@ def get_members(tenant_id):
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
DATE_PART('day',timezone('utc'::text, now()) \
- COALESCE(basic_authentication.invited_at,'2000-01-01'::timestamp ))>=1 AS expired_invitation,
basic_authentication.password IS NOT NULL AS joined,
invitation_token
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
basic_authentication.password IS NOT NULL OR users.origin IS NOT NULL AS joined,
invitation_token,
role_id,
roles.name AS role_name
FROM public.users
LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
LEFT JOIN public.roles USING (role_id)
WHERE users.tenant_id = %(tenantId)s AND users.deleted_at IS NULL
ORDER BY name, id""",
{"tenantId": tenant_id})
@ -428,8 +446,8 @@ def change_password(tenant_id, user_id, email, old_password, new_password):
item = get(tenant_id=tenant_id, user_id=user_id)
if item is None:
return {"errors": ["access denied"]}
if item["origin"] is not None:
return {"errors": ["cannot change your password because you are logged-in form an SSO service"]}
if item["origin"] is not None and item["hasPassword"] is False:
return {"errors": ["cannot change your password because you are logged-in from an SSO service"]}
if old_password == new_password:
return {"errors": ["old and new password are the same"]}
auth = authenticate(email, old_password, for_change_password=True)
@ -597,19 +615,35 @@ def authenticate(email, password, for_change_password=False, for_plugin=False):
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance,
users.origin
users.origin,
users.role_id,
roles.name AS role_name,
roles.permissions
FROM public.users AS users INNER JOIN public.basic_authentication USING(user_id)
LEFT JOIN public.roles ON (roles.role_id = users.role_id AND roles.tenant_id = users.tenant_id)
WHERE users.email = %(email)s
AND basic_authentication.password = crypt(%(password)s, basic_authentication.password)
AND basic_authentication.user_id = (SELECT su.user_id FROM public.users AS su WHERE su.email=%(email)s AND su.deleted_at IS NULL LIMIT 1)
AND (roles.role_id IS NULL OR roles.deleted_at IS NULL)
LIMIT 1;""",
{"email": email, "password": password})
cur.execute(query)
r = cur.fetchone()
if r is None and SAML2_helper.is_saml2_available():
query = cur.mogrify(
f"""SELECT 1
FROM public.users
WHERE users.email = %(email)s
AND users.deleted_at IS NULL
AND users.origin IS NOT NULL
LIMIT 1;""",
{"email": email})
cur.execute(query)
if cur.fetchone() is not None:
return {"errors": ["must sign-in with SSO"]}
if r is not None:
if r["origin"] is not None:
return {"errors": ["must sign-in with SSO"]}
if for_change_password:
return True
r = helper.dict_to_camel_case(r, ignore_keys=["appearance"])
@ -637,7 +671,8 @@ def authenticate_sso(email, internal_id, exp=None):
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance,
origin
origin,
role_id
FROM public.users AS users
WHERE users.email = %(email)s AND internal_id = %(internal_id)s;""",
{"email": email, "internal_id": internal_id})
@ -645,33 +680,26 @@ def authenticate_sso(email, internal_id, exp=None):
cur.execute(query)
r = cur.fetchone()
if r is not None:
r = helper.dict_to_camel_case(r, ignore_keys=["appearance"])
query = cur.mogrify(
f"""UPDATE public.users
SET jwt_iat = timezone('utc'::text, now())
WHERE user_id = %(user_id)s
RETURNING jwt_iat;""",
{"user_id": r["id"]})
cur.execute(query)
return {
"jwt": authorizers.generate_jwt(r['id'], r['tenantId'],
TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]),
aud=f"front:{helper.get_stage_name()}",
exp=exp),
"email": email,
**r
}
if r is not None:
r = helper.dict_to_camel_case(r, ignore_keys=["appearance"])
jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['id']))
return authorizers.generate_jwt(r['id'], r['tenantId'],
jwt_iat, aud=f"front:{helper.get_stage_name()}",
exp=(exp + jwt_iat // 1000) if exp is not None else None)
return None
def create_sso_user(tenant_id, email, admin, name, origin, internal_id=None):
def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
WITH u AS (
INSERT INTO public.users (tenant_id, email, role, name, data, origin, internal_id)
VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(origin)s, %(internal_id)s)
INSERT INTO public.users (tenant_id, email, role, name, data, origin, internal_id, role_id)
VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(origin)s, %(internal_id)s, %(role_id)s)
RETURNING *
),
au AS (
INSERT INTO public.basic_authentication(user_id)
VALUES ((SELECT user_id FROM u))
)
SELECT u.user_id AS id,
u.email,
@ -686,7 +714,7 @@ def create_sso_user(tenant_id, email, admin, name, origin, internal_id=None):
FROM u;""",
{"tenantId": tenant_id, "email": email, "internal_id": internal_id,
"role": "admin" if admin else "member", "name": name, "origin": origin,
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()})})
"role_id": role_id, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()})})
cur.execute(
query
)

View file

@ -9,13 +9,13 @@ SAML2 = {
"strict": True,
"debug": True,
"sp": {
"entityId": environ["SITE_URL"] + "/api/saml2/metadata/",
"entityId": environ["SITE_URL"] + "/api/sso/saml2/metadata/",
"assertionConsumerService": {
"url": environ["SITE_URL"] + "/api/saml2/acs",
"url": environ["SITE_URL"] + "/api/sso/saml2/acs",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
},
"singleLogoutService": {
"url": environ["SITE_URL"] + "/api/saml2/sls",
"url": environ["SITE_URL"] + "/api/sso/saml2/sls",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
},
"NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
@ -26,7 +26,7 @@ SAML2 = {
}
idp = None
# SAML2 config handler
if len(environ.get("SAML2_MD_URL","")) > 0:
if environ.get("SAML2_MD_URL") is not None and len(environ["SAML2_MD_URL"]) > 0:
print("SAML2_MD_URL provided, getting IdP metadata config")
from onelogin.saml2.idp_metadata_parser import OneLogin_Saml2_IdPMetadataParser
@ -90,7 +90,7 @@ def prepare_request(request):
'https': 'on' if request.headers.get('x-forwarded-proto', 'http') == 'https' else 'off',
'http_host': request.headers['host'],
'server_port': url_data.port,
'script_name': request.path,
'script_name': "/api" + request.path,
'get_data': request.args.copy(),
# Uncomment if using ADFS as IdP, https://github.com/onelogin/python-saml/pull/144
# 'lowercase_urlencoding': True,
@ -102,3 +102,12 @@ def prepare_request(request):
def is_saml2_available():
return idp is not None
def get_saml2_provider():
return environ.get("idp_name", "saml2") if is_saml2_available() and len(
environ.get("idp_name", "saml2")) > 0 else None
def get_landing_URL(jwt):
return environ["SITE_URL"] + environ.get("sso_landing", "/login?jwt=%s") % jwt

View file

@ -0,0 +1,46 @@
import base64
import hashlib
import hmac
from time import time
from chalicelib.core import assist
from chalicelib.utils import helper
from chalicelib.utils.helper import environ
def __get_secret():
return environ["assist_secret"] if environ["assist_secret"] is not None and len(
environ["assist_secret"]) > 0 else None
def get_temporary_credentials():
secret = __get_secret()
if secret is None:
return {"errors": ["secret not defined"]}
user = helper.generate_salt()
ttl = int(environ.get("assist_ttl", 48)) * 3600
timestamp = int(time()) + ttl
username = str(timestamp) + ':' + user
dig = hmac.new(bytes(secret, 'utf-8'), bytes(username, 'utf-8'), hashlib.sha1)
dig = dig.digest()
credential = base64.b64encode(dig).decode()
return {'username': username, 'credential': credential}
def get_full_config():
servers = assist.get_ice_servers()
if servers is None:
return None
servers = servers.split("|")
credentials = get_temporary_credentials()
if __get_secret() is not None:
servers = [{"url": s.split(",")[0], **credentials} for s in servers]
else:
for i in range(len(servers)):
s = servers[i].split("|")
if len(s) == 3:
servers[i] = {"url": s[0], "username": s[1], "credential": s[2]}
else:
servers[i] = {"url": s[0]}
return servers

View file

@ -10,4 +10,4 @@ jira==2.0.0
schedule==1.1.0
croniter==1.0.12
clickhouse-driver==0.1.5
python3-saml==1.10.1
python3-saml==1.12.0

View file

@ -0,0 +1,13 @@
ALTER TABLE sessions_metadata
ADD COLUMN project_id UInt32,
ADD COLUMN tracker_version String,
ADD COLUMN rev_id Nullable(String),
ADD COLUMN user_uuid UUID,
ADD COLUMN user_os String,
ADD COLUMN user_os_version Nullable(String),
ADD COLUMN user_browser String,
ADD COLUMN user_browser_version Nullable(String),
ADD COLUMN user_device Nullable(String),
ADD COLUMN user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2),
ADD COLUMN user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122);

View file

@ -0,0 +1,22 @@
CREATE TABLE customs
(
session_id UInt64,
project_id UInt32,
tracker_version String,
rev_id Nullable(String),
user_uuid UUID,
user_os String,
user_os_version Nullable(String),
user_browser String,
user_browser_version Nullable(String),
user_device Nullable(String),
user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2),
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122),
datetime DateTime,
name Nullable(String),
payload Nullable(String),
level Enum8('info'=0, 'error'=1) DEFAULT 'info'
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
ORDER BY (project_id, datetime)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -1,19 +1,31 @@
CREATE TABLE sessions_metadata (
session_id UInt64,
user_id Nullable(String),
user_anonymous_id Nullable(String),
metadata_1 Nullable(String),
metadata_2 Nullable(String),
metadata_3 Nullable(String),
metadata_4 Nullable(String),
metadata_5 Nullable(String),
metadata_6 Nullable(String),
metadata_7 Nullable(String),
metadata_8 Nullable(String),
metadata_9 Nullable(String),
metadata_10 Nullable(String),
datetime DateTime
CREATE TABLE sessions_metadata
(
session_id UInt64,
project_id UInt32,
tracker_version String,
rev_id Nullable(String),
user_uuid UUID,
user_os String,
user_os_version Nullable(String),
user_browser String,
user_browser_version Nullable(String),
user_device Nullable(String),
user_device_type Enum8('other'=0, 'desktop'=1, 'mobile'=2),
user_country Enum8('UN'=-128, 'RW'=-127, 'SO'=-126, 'YE'=-125, 'IQ'=-124, 'SA'=-123, 'IR'=-122, 'CY'=-121, 'TZ'=-120, 'SY'=-119, 'AM'=-118, 'KE'=-117, 'CD'=-116, 'DJ'=-115, 'UG'=-114, 'CF'=-113, 'SC'=-112, 'JO'=-111, 'LB'=-110, 'KW'=-109, 'OM'=-108, 'QA'=-107, 'BH'=-106, 'AE'=-105, 'IL'=-104, 'TR'=-103, 'ET'=-102, 'ER'=-101, 'EG'=-100, 'SD'=-99, 'GR'=-98, 'BI'=-97, 'EE'=-96, 'LV'=-95, 'AZ'=-94, 'LT'=-93, 'SJ'=-92, 'GE'=-91, 'MD'=-90, 'BY'=-89, 'FI'=-88, 'AX'=-87, 'UA'=-86, 'MK'=-85, 'HU'=-84, 'BG'=-83, 'AL'=-82, 'PL'=-81, 'RO'=-80, 'XK'=-79, 'ZW'=-78, 'ZM'=-77, 'KM'=-76, 'MW'=-75, 'LS'=-74, 'BW'=-73, 'MU'=-72, 'SZ'=-71, 'RE'=-70, 'ZA'=-69, 'YT'=-68, 'MZ'=-67, 'MG'=-66, 'AF'=-65, 'PK'=-64, 'BD'=-63, 'TM'=-62, 'TJ'=-61, 'LK'=-60, 'BT'=-59, 'IN'=-58, 'MV'=-57, 'IO'=-56, 'NP'=-55, 'MM'=-54, 'UZ'=-53, 'KZ'=-52, 'KG'=-51, 'TF'=-50, 'HM'=-49, 'CC'=-48, 'PW'=-47, 'VN'=-46, 'TH'=-45, 'ID'=-44, 'LA'=-43, 'TW'=-42, 'PH'=-41, 'MY'=-40, 'CN'=-39, 'HK'=-38, 'BN'=-37, 'MO'=-36, 'KH'=-35, 'KR'=-34, 'JP'=-33, 'KP'=-32, 'SG'=-31, 'CK'=-30, 'TL'=-29, 'RU'=-28, 'MN'=-27, 'AU'=-26, 'CX'=-25, 'MH'=-24, 'FM'=-23, 'PG'=-22, 'SB'=-21, 'TV'=-20, 'NR'=-19, 'VU'=-18, 'NC'=-17, 'NF'=-16, 'NZ'=-15, 'FJ'=-14, 'LY'=-13, 'CM'=-12, 'SN'=-11, 'CG'=-10, 'PT'=-9, 'LR'=-8, 'CI'=-7, 'GH'=-6, 'GQ'=-5, 'NG'=-4, 'BF'=-3, 'TG'=-2, 'GW'=-1, 'MR'=0, 'BJ'=1, 'GA'=2, 'SL'=3, 'ST'=4, 'GI'=5, 'GM'=6, 'GN'=7, 'TD'=8, 'NE'=9, 'ML'=10, 'EH'=11, 'TN'=12, 'ES'=13, 'MA'=14, 'MT'=15, 'DZ'=16, 'FO'=17, 'DK'=18, 'IS'=19, 'GB'=20, 'CH'=21, 'SE'=22, 'NL'=23, 'AT'=24, 'BE'=25, 'DE'=26, 'LU'=27, 'IE'=28, 'MC'=29, 'FR'=30, 'AD'=31, 'LI'=32, 'JE'=33, 'IM'=34, 'GG'=35, 'SK'=36, 'CZ'=37, 'NO'=38, 'VA'=39, 'SM'=40, 'IT'=41, 'SI'=42, 'ME'=43, 'HR'=44, 'BA'=45, 'AO'=46, 'NA'=47, 'SH'=48, 'BV'=49, 'BB'=50, 'CV'=51, 'GY'=52, 'GF'=53, 'SR'=54, 'PM'=55, 'GL'=56, 'PY'=57, 'UY'=58, 'BR'=59, 'FK'=60, 'GS'=61, 'JM'=62, 'DO'=63, 'CU'=64, 'MQ'=65, 'BS'=66, 'BM'=67, 'AI'=68, 'TT'=69, 'KN'=70, 'DM'=71, 'AG'=72, 'LC'=73, 'TC'=74, 'AW'=75, 'VG'=76, 'VC'=77, 'MS'=78, 'MF'=79, 'BL'=80, 'GP'=81, 'GD'=82, 'KY'=83, 'BZ'=84, 'SV'=85, 'GT'=86, 'HN'=87, 'NI'=88, 'CR'=89, 'VE'=90, 'EC'=91, 'CO'=92, 'PA'=93, 'HT'=94, 'AR'=95, 'CL'=96, 'BO'=97, 'PE'=98, 'MX'=99, 'PF'=100, 'PN'=101, 'KI'=102, 'TK'=103, 'TO'=104, 'WF'=105, 'WS'=106, 'NU'=107, 'MP'=108, 'GU'=109, 'PR'=110, 'VI'=111, 'UM'=112, 'AS'=113, 'CA'=114, 'US'=115, 'PS'=116, 'RS'=117, 'AQ'=118, 'SX'=119, 'CW'=120, 'BQ'=121, 'SS'=122),
datetime DateTime,
user_id Nullable(String),
user_anonymous_id Nullable(String),
metadata_1 Nullable(String),
metadata_2 Nullable(String),
metadata_3 Nullable(String),
metadata_4 Nullable(String),
metadata_5 Nullable(String),
metadata_6 Nullable(String),
metadata_7 Nullable(String),
metadata_8 Nullable(String),
metadata_9 Nullable(String),
metadata_10 Nullable(String)
) ENGINE = MergeTree
PARTITION BY toDate(datetime)
ORDER BY (session_id)
TTL datetime + INTERVAL 1 MONTH;
PARTITION BY toDate(datetime)
ORDER BY (session_id)
TTL datetime + INTERVAL 1 MONTH;

View file

@ -0,0 +1,72 @@
BEGIN;
CREATE INDEX sessions_user_id_useridNN_idx ON sessions (user_id) WHERE user_id IS NOT NULL;
CREATE INDEX sessions_uid_projectid_startts_sessionid_uidNN_durGTZ_idx ON sessions (user_id, project_id, start_ts, session_id) WHERE user_id IS NOT NULL AND duration > 0;
CREATE INDEX pages_base_path_base_pathLNGT2_idx ON events.pages (base_path) WHERE length(base_path) > 2;
CREATE INDEX users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL;
CREATE INDEX issues_issue_id_timestamp_idx ON events_common.issues (issue_id, timestamp);
CREATE INDEX issues_timestamp_idx ON events_common.issues (timestamp);
CREATE INDEX issues_project_id_issue_id_idx ON public.issues (project_id, issue_id);
CREATE TABLE roles
(
role_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
name text NOT NULL,
description text DEFAULT NULL,
permissions text[] NOT NULL DEFAULT '{}',
protected bool NOT NULL DEFAULT FALSE,
created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()),
deleted_at timestamp NULL DEFAULT NULL
);
INSERT INTO roles(tenant_id, name, description, permissions, protected)
SELECT *
FROM (SELECT tenant_id FROM tenants) AS tenants,
(VALUES ('Owner', 'Owner',
'{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], TRUE),
('Member', 'Member',
'{"SESSION_REPLAY", "DEV_TOOLS", "ERRORS", "METRICS", "ASSIST_LIVE", "ASSIST_CALL"}'::text[], FALSE)
) AS default_roles(name, description, permissions, protected);
ALTER TABLE users
ADD COLUMN role_id integer REFERENCES roles (role_id) ON DELETE SET NULL;
UPDATE users
SET role_id = r.role_id
FROM (SELECT tenant_id, role_id
FROM tenants
INNER JOIN roles USING (tenant_id)
WHERE roles.name = 'Owner') AS r(tenant_id, role_id)
WHERE users.tenant_id = r.tenant_id
AND users.role = 'owner';
UPDATE users
SET role_id = r.role_id
FROM (SELECT tenant_id, role_id
FROM tenants
INNER JOIN roles USING (tenant_id)
WHERE roles.name = 'Member') AS r(tenant_id, role_id)
WHERE users.tenant_id = r.tenant_id
AND users.role != 'owner';
DO
$$
BEGIN
IF NOT EXISTS(SELECT 1 FROM pg_type WHERE typname = 'user_origin') THEN
CREATE TYPE user_origin AS ENUM ('saml');
END IF;
END
$$;
ALTER TABLE public.users
ADD COLUMN IF NOT EXISTS origin user_origin NULL DEFAULT NULL,
ADD COLUMN IF NOT EXISTS internal_id text NULL DEFAULT NULL;
ALTER TABLE public.users
ALTER COLUMN origin TYPE text;
DROP TYPE IF EXISTS user_origin;
COMMIT;

File diff suppressed because it is too large Load diff

View file

@ -4,7 +4,6 @@ app_name: ""
db_name: ""
db_list:
- "minio"
- "nfs-server-provisioner"
- "postgresql"
- "redis"
- "clickhouse"

View file

@ -69,7 +69,7 @@ const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB);
organisation: state.getIn([ 'user', 'client', 'name' ]),
tenantId: state.getIn([ 'user', 'client', 'tenantId' ]),
tenants: state.getIn(['user', 'tenants']),
existingTenant: state.getIn(['user', 'existingTenant']),
existingTenant: state.getIn(['user', 'authDetails', 'tenants']),
onboarding: state.getIn([ 'user', 'onboarding' ])
};
}, {

View file

@ -80,6 +80,7 @@ export default class APIClient {
path !== '/targets_temp' &&
!path.includes('/metadata/session_search') &&
!path.includes('/watchdogs/rules') &&
!path.includes('/assist/credentials') &&
!!this.siteId &&
siteIdRequiredPaths.some(sidPath => path.startsWith(sidPath))
) {

View file

@ -10,21 +10,21 @@ import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream
export interface Props {
remoteStream: MediaStream | null,
incomeStream: MediaStream | null,
localStream: LocalStream | null,
userId: String,
endCall: () => void
}
const ChatWindow: FC<Props> = function ChatWindow({ userId, remoteStream, localStream, endCall }) {
const ChatWindow: FC<Props> = function ChatWindow({ userId, incomeStream, localStream, endCall }) {
const [localVideoEnabled, setLocalVideoEnabled] = useState(false)
const [remoteVideoEnabled, setRemoteVideoEnabled] = useState(false)
useEffect(() => {
if (!remoteStream) { return }
if (!incomeStream) { return }
const iid = setInterval(() => {
const settings = remoteStream.getVideoTracks()[0]?.getSettings()
const settings = incomeStream.getVideoTracks()[0]?.getSettings()
const isDummyVideoTrack = !!settings ? (settings.width === 2 || settings.frameRate === 0) : true
console.log(isDummyVideoTrack, settings)
const shouldBeEnabled = !isDummyVideoTrack
@ -33,7 +33,7 @@ const ChatWindow: FC<Props> = function ChatWindow({ userId, remoteStream, localS
}
}, 1000)
return () => clearInterval(iid)
}, [ remoteStream, localVideoEnabled ])
}, [ incomeStream, localVideoEnabled ])
const minimize = !localVideoEnabled && !remoteVideoEnabled
@ -48,7 +48,7 @@ const ChatWindow: FC<Props> = function ChatWindow({ userId, remoteStream, localS
<Counter startTime={new Date().getTime() } className="text-sm ml-auto" />
</div>
<div className={cn(stl.videoWrapper, {'hidden' : minimize}, 'relative')}>
<VideoContainer stream={ remoteStream } />
<VideoContainer stream={ incomeStream } />
<div className="absolute bottom-0 right-0 z-50">
<VideoContainer stream={ localStream ? localStream.stream : null } muted width={50} />
</div>

View file

@ -1,11 +1,4 @@
.inCall {
& svg {
fill: $red
}
color: $red;
}
.disabled {
opacity: 0.5;
pointer-events: none;
}
}

View file

@ -31,16 +31,19 @@ interface Props {
userId: String,
toggleChatWindow: (state) => void,
calling: CallingState,
peerConnectionStatus: ConnectionStatus
peerConnectionStatus: ConnectionStatus,
remoteControlEnabled: boolean,
hasPermission: boolean,
isEnterprise: boolean,
}
function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus }: Props) {
const [ remoteStream, setRemoteStream ] = useState<MediaStream | null>(null);
function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus, remoteControlEnabled, hasPermission, isEnterprise }: Props) {
const [ incomeStream, setIncomeStream ] = useState<MediaStream | null>(null);
const [ localStream, setLocalStream ] = useState<LocalStream | null>(null);
const [ endCall, setEndCall ] = useState<()=>void>(()=>{});
const [ callObject, setCallObject ] = useState<{ end: ()=>void, toggleRemoteControl: ()=>void } | null >(null);
useEffect(() => {
return endCall
return callObject?.end()
}, [])
useEffect(() => {
@ -49,13 +52,12 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus
}
}, [peerConnectionStatus])
function call() {
RequestLocalStream().then(lStream => {
setLocalStream(lStream);
setEndCall(() => callPeer(
setCallObject(callPeer(
lStream,
setRemoteStream,
setIncomeStream,
lStream.stop.bind(lStream),
onReject,
onError
@ -74,6 +76,7 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus
}
const inCall = calling !== CallingState.False;
const cannotCall = (peerConnectionStatus !== ConnectionStatus.Connected) || (isEnterprise && !hasPermission)
return (
<div className="flex items-center">
@ -83,11 +86,11 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus
className={
cn(
'cursor-pointer p-2 mr-2 flex items-center',
{[stl.inCall] : inCall },
{[stl.disabled]: peerConnectionStatus !== ConnectionStatus.Connected}
// {[stl.inCall] : inCall },
{[stl.disabled]: cannotCall}
)
}
onClick={ inCall ? endCall : confirmCall}
onClick={ inCall ? callObject?.end : confirmCall}
role="button"
>
<Icon
@ -95,7 +98,7 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus
size="20"
color={ inCall ? "red" : "gray-darkest" }
/>
<span className={cn("ml-2", { 'text-red' : inCall })}>{ inCall ? 'End Call' : 'Call' }</span>
<span className={cn("ml-2", { 'color-red' : inCall })}>{ inCall ? 'End Call' : 'Call' }</span>
</div>
}
content={ `Call ${userId ? userId : 'User'}` }
@ -103,16 +106,41 @@ function AssistActions({ toggleChatWindow, userId, calling, peerConnectionStatus
inverted
position="top right"
/>
{ calling === CallingState.True &&
<div
className={
cn(
'cursor-pointer p-2 mr-2 flex items-center',
)
}
onClick={ callObject?.toggleRemoteControl }
role="button"
>
<Icon
name="remote-control"
size="20"
color={ remoteControlEnabled ? "green" : "gray-darkest"}
/>
<span className={cn("ml-2", { 'color-green' : remoteControlEnabled })}>{ 'Remote Control' }</span>
</div>
}
<div className="fixed ml-3 left-0 top-0" style={{ zIndex: 999 }}>
{ inCall && <ChatWindow endCall={endCall} userId={userId} remoteStream={remoteStream} localStream={localStream} /> }
{ inCall && callObject && <ChatWindow endCall={callObject.end} userId={userId} incomeStream={incomeStream} localStream={localStream} /> }
</div>
</div>
)
}
const con = connect(null, { toggleChatWindow })
const con = connect(state => {
const permissions = state.getIn([ 'user', 'account', 'permissions' ]) || []
return {
hasPermission: permissions.includes('ASSIST_CALL'),
isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee',
}
}, { toggleChatWindow })
export default con(connectPlayer(state => ({
calling: state.calling,
remoteControlEnabled: state.remoteControl,
peerConnectionStatus: state.peerConnectionStatus,
}))(AssistActions))

View file

@ -18,7 +18,7 @@ import withLocationHandlers from "HOCs/withLocationHandlers";
import { fetch as fetchFilterVariables } from 'Duck/sources';
import { fetchList as fetchIntegrationVariables, fetchSources } from 'Duck/customField';
import { RehydrateSlidePanel } from './WatchDogs/components';
import { setActiveTab } from 'Duck/sessions';
import { setActiveTab, setFunnelPage } from 'Duck/sessions';
import SessionsMenu from './SessionsMenu/SessionsMenu';
import SessionFlowList from './SessionFlowList/SessionFlowList';
import { LAST_7_DAYS } from 'Types/app/period';
@ -74,7 +74,8 @@ const allowedQueryKeys = [
fetchSiteList,
fetchFunnelsList,
resetFunnel,
resetFunnelFilters
resetFunnelFilters,
setFunnelPage
})
@withPageTitle("Sessions - OpenReplay")
export default class BugFinder extends React.PureComponent {
@ -114,6 +115,10 @@ export default class BugFinder extends React.PureComponent {
}
}
componentDidMount() {
this.props.setFunnelPage(false);
}
toggleRehydratePanel = () => {
this.setState({ showRehydratePanel: !this.state.showRehydratePanel })
}

View file

@ -4,6 +4,7 @@ import { connect } from 'react-redux';
import { NoContent, Loader } from 'UI';
import { List, Map } from 'immutable';
import SessionItem from 'Shared/SessionItem';
import withPermissions from 'HOCs/withPermissions'
import { KEYS } from 'Types/filter/customFilter';
import { applyFilter, addAttribute } from 'Duck/filters';
import Filter from 'Types/filter';
@ -38,7 +39,7 @@ function LiveSessionList(props: Props) {
} else {
props.addAttribute({ label: 'Anonymous ID', key: 'USERANONYMOUSID', type: "USERANONYMOUSID", operator: 'is', value: userAnonymousId })
}
props.applyFilter()
}
@ -77,8 +78,12 @@ function LiveSessionList(props: Props) {
)
}
export default connect(state => ({
list: state.getIn(['sessions', 'liveSessions']),
loading: state.getIn([ 'sessions', 'loading' ]),
filters: state.getIn([ 'filters', 'appliedFilter' ]),
}), { fetchList, applyFilter, addAttribute })(LiveSessionList)
export default withPermissions(['ASSIST_LIVE'])(connect(
(state) => ({
list: state.getIn(['sessions', 'liveSessions']),
loading: state.getIn([ 'sessions', 'loading' ]),
filters: state.getIn([ 'filters', 'appliedFilter' ]),
}),
{
fetchList, applyFilter, addAttribute }
)(LiveSessionList));

View file

@ -15,6 +15,7 @@ import styles from './client.css';
import cn from 'classnames';
import PreferencesMenu from './PreferencesMenu';
import Notifications from './Notifications';
import Roles from './Roles';
@connect((state) => ({
appearance: state.getIn([ 'user', 'account', 'appearance' ]),
@ -42,6 +43,7 @@ export default class Client extends React.PureComponent {
<Route exact strict path={ clientRoute(CLIENT_TABS.CUSTOM_FIELDS) } component={ CustomFields } />
<Route exact strict path={ clientRoute(CLIENT_TABS.WEBHOOKS) } component={ Webhooks } />
<Route exact strict path={ clientRoute(CLIENT_TABS.NOTIFICATIONS) } component={ Notifications } />
<Route exact strict path={ clientRoute(CLIENT_TABS.MANAGE_ROLES) } component={ Roles } />
<Redirect to={ clientRoute(CLIENT_TABS.PROFILE) } />
</Switch>
)

View file

@ -0,0 +1,60 @@
import Highlight from 'react-highlight'
import ToggleContent from 'Shared/ToggleContent'
import DocLink from 'Shared/DocLink/DocLink';
const AxiosDoc = (props) => {
const { projectKey } = props;
return (
<div className="p-4">
<div>This plugin allows you to capture axios requests and inspect them later on while replaying session recordings. This is very useful for understanding and fixing issues.</div>
<div className="font-bold my-2">Installation</div>
<Highlight className="js">
{`npm i @openreplay/tracker-axios`}
</Highlight>
<div className="font-bold my-2">Usage</div>
<p>Initialize the @openreplay/tracker package as usual then load the axios plugin. Note that OpenReplay axios plugin requires axios@^0.21.2 as a peer dependency.</p>
<div className="py-3" />
<div className="font-bold my-2">Usage</div>
<ToggleContent
label="Server-Side-Rendered (SSR)?"
first={
<Highlight className="js">
{`import tracker from '@openreplay/tracker';
import trackerAxios from '@openreplay/tracker-axios';
const tracker = new OpenReplay({
projectKey: '${projectKey}'
});
tracker.use(trackerAxios(options)); // check list of available options below
tracker.start();`}
</Highlight>
}
second={
<Highlight className="js">
{`import OpenReplay from '@openreplay/tracker/cjs';
import trackerAxios from '@openreplay/tracker-axios/cjs';
const tracker = new OpenReplay({
projectKey: '${projectKey}'
});
tracker.use(trackerAxios(options)); // check list of available options below
//...
function MyApp() {
useEffect(() => { // use componentDidMount in case of React Class Component
tracker.start();
}, [])
//...
}`}
</Highlight>
}
/>
<DocLink className="mt-4" label="Integrate Fetch" url="https://docs.openreplay.com/plugins/axios" />
</div>
)
};
AxiosDoc.displayName = "AxiosDoc";
export default AxiosDoc;

View file

@ -0,0 +1 @@
export { default } from './AxiosDoc'

View file

@ -29,6 +29,7 @@ import FetchDoc from './FetchDoc';
import MobxDoc from './MobxDoc';
import ProfilerDoc from './ProfilerDoc';
import AssistDoc from './AssistDoc';
import AxiosDoc from './AxiosDoc/AxiosDoc';
const NONE = -1;
const SENTRY = 0;
@ -51,6 +52,7 @@ const FETCH = 16;
const MOBX = 17;
const PROFILER = 18;
const ASSIST = 19;
const AXIOS = 20;
const TITLE = {
[ SENTRY ]: 'Sentry',
@ -73,6 +75,7 @@ const TITLE = {
[ MOBX ] : 'MobX',
[ PROFILER ] : 'Profiler',
[ ASSIST ] : 'Assist',
[ AXIOS ] : 'Axios',
}
const DOCS = [REDUX, VUE, GRAPHQL, NGRX, FETCH, MOBX, PROFILER, ASSIST]
@ -191,6 +194,8 @@ export default class Integrations extends React.PureComponent {
return <ProfilerDoc onClose={ this.closeModal } projectKey={projectKey} />
case ASSIST:
return <AssistDoc onClose={ this.closeModal } projectKey={projectKey} />
case AXIOS:
return <AxiosDoc onClose={ this.closeModal } projectKey={projectKey} />
default:
return null;
}
@ -295,7 +300,14 @@ export default class Integrations extends React.PureComponent {
onClick={ () => this.showIntegrationConfig(NGRX) }
// integrated={ sentryIntegrated }
/>
<IntegrationItem
title="MobX"
icon="integrations/mobx"
url={ null }
dockLink="https://docs.openreplay.com/integrations/sentry"
onClick={ () => this.showIntegrationConfig(MOBX) }
// integrated={ sentryIntegrated }
/>
<IntegrationItem
title="Fetch"
icon="integrations/openreplay"
@ -304,16 +316,6 @@ export default class Integrations extends React.PureComponent {
onClick={ () => this.showIntegrationConfig(FETCH) }
// integrated={ sentryIntegrated }
/>
<IntegrationItem
title="MobX"
icon="integrations/mobx"
url={ null }
dockLink="https://docs.openreplay.com/integrations/sentry"
onClick={ () => this.showIntegrationConfig(MOBX) }
// integrated={ sentryIntegrated }
/>
<IntegrationItem
title="Profiler"
icon="integrations/openreplay"
@ -322,6 +324,14 @@ export default class Integrations extends React.PureComponent {
onClick={ () => this.showIntegrationConfig(PROFILER) }
// integrated={ sentryIntegrated }
/>
<IntegrationItem
title="Axios"
icon="integrations/openreplay"
url={ null }
dockLink="https://docs.openreplay.com/plugins/axios"
onClick={ () => this.showIntegrationConfig(AXIOS) }
// integrated={ sentryIntegrated }
/>
<IntegrationItem
title="Assist"
icon="integrations/assist"

View file

@ -1,8 +1,11 @@
import { connect } from 'react-redux';
import cn from 'classnames';
import withPageTitle from 'HOCs/withPageTitle';
import { IconButton, SlideModal, Input, Button, Loader, NoContent, Popup, CopyButton } from 'UI';
import {
IconButton, SlideModal, Input, Button, Loader,
NoContent, Popup, CopyButton, Dropdown } from 'UI';
import { init, save, edit, remove as deleteMember, fetchList, generateInviteLink } from 'Duck/member';
import { fetchList as fetchRoles } from 'Duck/roles';
import styles from './manageUsers.css';
import UserItem from './UserItem';
import { confirm } from 'UI/Confirmation';
@ -19,24 +22,29 @@ const LIMIT_WARNING = 'You have reached users limit.';
errors: state.getIn([ 'members', 'saveRequest', 'errors' ]),
loading: state.getIn([ 'members', 'loading' ]),
saving: state.getIn([ 'members', 'saveRequest', 'loading' ]),
roles: state.getIn(['roles', 'list']).filter(r => !r.protected).map(r => ({ text: r.name, value: r.roleId })).toJS(),
isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee',
}), {
init,
save,
edit,
deleteMember,
fetchList,
generateInviteLink
generateInviteLink,
fetchRoles
})
@withPageTitle('Users - OpenReplay Preferences')
class ManageUsers extends React.PureComponent {
state = { showModal: false, remaining: this.props.account.limits.teamMember.remaining, invited: false }
// writeOption = (e, { name, value }) => this.props.edit({ [ name ]: value });
onChange = (e, { name, value }) => this.props.edit({ [ name ]: value });
onChangeCheckbox = ({ target: { checked, name } }) => this.props.edit({ [ name ]: checked });
setFocus = () => this.focusElement.focus();
closeModal = () => this.setState({ showModal: false });
componentWillMount = () => {
this.props.fetchList();
this.props.fetchRoles();
}
adminLabel = (user) => {
@ -76,81 +84,99 @@ class ManageUsers extends React.PureComponent {
});
}
formContent = (member, account) => (
<div className={ styles.form }>
<form onSubmit={ this.save } >
<div className={ styles.formGroup }>
<label>{ 'Full Name' }</label>
<Input
ref={ (ref) => { this.focusElement = ref; } }
name="name"
value={ member.name }
onChange={ this.onChange }
className={ styles.input }
id="name-field"
/>
</div>
formContent = () => {
const { member, account, isEnterprise, roles } = this.props;
<div className={ styles.formGroup }>
<label>{ 'Email Address' }</label>
<Input
disabled={member.exists()}
name="email"
value={ member.email }
onChange={ this.onChange }
className={ styles.input }
/>
</div>
{ !account.smtp &&
<div className={cn("mb-4 p-2", styles.smtpMessage)}>
SMTP is not configured. Please follow (see <a className="link" href="https://docs.openreplay.com/configuration/configure-smtp" target="_blank">here</a> how to set it up). You can still add new users, but youd have to manually copy then send them the invitation link.
</div>
}
<div className={ styles.formGroup }>
<label className={ styles.checkbox }>
<input
name="admin"
type="checkbox"
value={ member.admin }
checked={ !!member.admin }
onChange={ this.onChangeCheckbox }
disabled={member.superAdmin}
return (
<div className={ styles.form }>
<form onSubmit={ this.save } >
<div className={ styles.formGroup }>
<label>{ 'Full Name' }</label>
<Input
ref={ (ref) => { this.focusElement = ref; } }
name="name"
value={ member.name }
onChange={ this.onChange }
className={ styles.input }
id="name-field"
/>
<span>{ 'Admin' }</span>
</label>
<div className={ styles.adminInfo }>{ 'Can manage Projects and team members.' }</div>
</div>
</form>
</div>
<div className="flex items-center">
<div className="flex items-center mr-auto">
<Button
onClick={ this.save }
disabled={ !member.validate() }
loading={ this.props.saving }
primary
marginRight
>
{ member.exists() ? 'Update' : 'Invite' }
</Button>
<Button
data-hidden={ !member.exists() }
onClick={ this.closeModal }
outline
>
{ 'Cancel' }
</Button>
<div className={ styles.formGroup }>
<label>{ 'Email Address' }</label>
<Input
disabled={member.exists()}
name="email"
value={ member.email }
onChange={ this.onChange }
className={ styles.input }
/>
</div>
{ !account.smtp &&
<div className={cn("mb-4 p-2", styles.smtpMessage)}>
SMTP is not configured. Please follow (see <a className="link" href="https://docs.openreplay.com/configuration/configure-smtp" target="_blank">here</a> how to set it up). You can still add new users, but youd have to manually copy then send them the invitation link.
</div>
}
<div className={ styles.formGroup }>
<label className={ styles.checkbox }>
<input
name="admin"
type="checkbox"
value={ member.admin }
checked={ !!member.admin }
onChange={ this.onChangeCheckbox }
disabled={member.superAdmin}
/>
<span>{ 'Admin Privileges' }</span>
</label>
<div className={ styles.adminInfo }>{ 'Can manage Projects and team members.' }</div>
</div>
{ isEnterprise && (
<div className={ styles.formGroup }>
<label htmlFor="role">{ 'Role' }</label>
<Dropdown
placeholder="Role"
selection
options={ roles }
name="roleId"
value={ member.roleId }
onChange={ this.onChange }
/>
</div>
)}
</form>
<div className="flex items-center">
<div className="flex items-center mr-auto">
<Button
onClick={ this.save }
disabled={ !member.validate() }
loading={ this.props.saving }
primary
marginRight
>
{ member.exists() ? 'Update' : 'Invite' }
</Button>
<Button
data-hidden={ !member.exists() }
onClick={ this.closeModal }
outline
>
{ 'Cancel' }
</Button>
</div>
{ !member.joined && member.invitationLink &&
<CopyButton
content={member.invitationLink}
className="link"
btnText="Copy invite link"
/>
}
</div>
{ !member.joined && member.invitationLink &&
<CopyButton
content={member.invitationLink}
className="link"
btnText="Copy invite link"
/>
}
</div>
</div>
)
)
}
init = (v) => {
this.props.init(v);
@ -160,7 +186,7 @@ class ManageUsers extends React.PureComponent {
render() {
const {
members, member, loading, account, hideHeader = false,
members, loading, account, hideHeader = false
} = this.props;
const { showModal, remaining, invited } = this.state;
const isAdmin = account.admin || account.superAdmin;
@ -173,7 +199,7 @@ class ManageUsers extends React.PureComponent {
title="Invite People"
size="small"
isDisplayed={ showModal }
content={ this.formContent(member, account) }
content={ this.formContent() }
onClose={ this.closeModal }
/>
<div className={ styles.wrapper }>
@ -194,7 +220,7 @@ class ManageUsers extends React.PureComponent {
/>
</div>
}
// disabled={ canAddUsers }
disabled={ canAddUsers }
content={ `${ !canAddUsers ? (!isAdmin ? PERMISSION_WARNING : LIMIT_WARNING) : 'Add team member' }` }
size="tiny"
inverted

View file

@ -7,6 +7,7 @@ const UserItem = ({ user, adminLabel, deleteHandler, editHandler, generateInvite
<Icon name="user-alt" size="16" marginRight="10" />
<div id="user-name">{ user.name || user.email }</div>
{ adminLabel && <div className={ styles.adminLabel }>{ adminLabel }</div>}
{ user.roleName && <div className={ styles.adminLabel }>{ user.roleName }</div>}
<div className={ styles.actions }>
{ user.expiredInvitation && !user.joined &&
<Popup

View file

@ -6,7 +6,7 @@ import stl from './preferencesMenu.css';
import { CLIENT_TABS, client as clientRoute } from 'App/routes';
import { withRouter } from 'react-router-dom';
function PreferencesMenu({ activeTab, appearance, history }) {
function PreferencesMenu({ activeTab, appearance, history, isEnterprise }) {
const setTab = (tab) => {
history.push(clientRoute(tab));
@ -50,7 +50,6 @@ function PreferencesMenu({ activeTab, appearance, history }) {
{
<div className="mb-4">
<SideMenuitem
active={ activeTab === CLIENT_TABS.WEBHOOKS }
title="Webhooks"
@ -76,7 +75,18 @@ function PreferencesMenu({ activeTab, appearance, history }) {
iconName="users"
onClick={() => setTab(CLIENT_TABS.MANAGE_USERS) }
/>
</div>
</div>
{ isEnterprise && (
<div className="mb-4">
<SideMenuitem
active={ activeTab === CLIENT_TABS.MANAGE_ROLES }
title="Roles"
iconName="shield-lock"
onClick={() => setTab(CLIENT_TABS.MANAGE_ROLES) }
/>
</div>
)}
<div className="mb-4">
<SideMenuitem
@ -92,4 +102,5 @@ function PreferencesMenu({ activeTab, appearance, history }) {
export default connect(state => ({
appearance: state.getIn([ 'user', 'account', 'appearance' ]),
isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee',
}))(withRouter(PreferencesMenu));

View file

@ -11,10 +11,11 @@ import { connect } from 'react-redux';
@withPageTitle('Account - OpenReplay Preferences')
@connect(state => ({
account: state.getIn([ 'user', 'account' ]),
isEnterprise: state.getIn([ 'user', 'client', 'edition' ]) === 'ee',
}))
export default class ProfileSettings extends React.PureComponent {
render() {
const { account } = this.props;
const { account, isEnterprise } = this.props;
return (
<React.Fragment>
<div className="flex items-center">
@ -55,15 +56,19 @@ export default class ProfileSettings extends React.PureComponent {
<div><TenantKey /></div>
</div>
<div className="divider" />
{ !isEnterprise && (
<>
<div className="divider" />
<div className="flex items-center">
<div className={ styles.left }>
<h4 className="text-lg mb-4">{ 'Data Collection' }</h4>
<div className={ styles.info }>{ 'Enables you to control how OpenReplay captures data on your organizations usage to improve our product.' }</div>
</div>
<div><OptOut /></div>
</div>
</>
)}
<div className="flex items-center">
<div className={ styles.left }>
<h4 className="text-lg mb-4">{ 'Data Collection' }</h4>
<div className={ styles.info }>{ 'Enables you to control how OpenReplay captures data on your organizations usage to improve our product.' }</div>
</div>
<div><OptOut /></div>
</div>
{ account.license && (
<>
<div className="divider" />

View file

@ -4,37 +4,35 @@ import { connect } from 'react-redux';
import styles from './profileSettings.css';
@connect(state => ({
key: state.getIn([ 'user', 'client', 'tenantKey' ]),
loading: state.getIn([ 'user', 'updateAccountRequest', 'loading' ]) ||
state.getIn([ 'user', 'putClientRequest', 'loading' ]),
tenantKey: state.getIn([ 'user', 'client', 'tenantKey' ]),
}))
export default class TenantKey extends React.PureComponent {
state = { copied: false }
copyHandler = () => {
const { key } = this.props;
const { tenantKey } = this.props;
this.setState({ copied: true });
copy(key);
copy(tenantKey);
setTimeout(() => {
this.setState({ copied: false });
}, 1000);
};
render() {
const { key } = this.props;
const { tenantKey } = this.props;
const { copied } = this.state;
return (
<form onSubmit={ this.handleSubmit } className={ styles.form }>
<div className={ styles.formGroup }>
<label htmlFor="key">{ 'Tenant Key' }</label>
<label htmlFor="tenantKey">{ 'Tenant Key' }</label>
<div className="ui action input">
<input
name="key"
id="key"
name="tenantKey"
id="tenantKey"
type="text"
readOnly={ true }
value={ key }
value={ tenantKey }
/>
<div
className="ui button copy-button"

View file

@ -0,0 +1,127 @@
import React, { useState, useEffect } from 'react'
import cn from 'classnames'
import { Loader, IconButton, Popup, NoContent, SlideModal } from 'UI'
import { connect } from 'react-redux'
import stl from './roles.css'
import RoleForm from './components/RoleForm'
import { init, edit, fetchList, remove as deleteRole } from 'Duck/roles';
import RoleItem from './components/RoleItem'
import { confirm } from 'UI/Confirmation';
interface Props {
loading: boolean
init: (role?: any) => void,
edit: (role: any) => void,
instance: any,
roles: any[],
deleteRole: (id: any) => void,
fetchList: () => Promise<void>,
account: any,
permissionsMap: any
}
function Roles(props: Props) {
const { loading, instance, roles, init, edit, deleteRole, account, permissionsMap } = props
const [showModal, setShowmModal] = useState(false)
const isAdmin = account.admin || account.superAdmin;
console.log('permissionsMap', permissionsMap)
useEffect(() => {
props.fetchList()
}, [])
const closeModal = () => {
setShowmModal(false)
setTimeout(() => {
init()
}, 100)
}
const editHandler = role => {
init(role)
setShowmModal(true)
}
const deleteHandler = async (role) => {
if (await confirm({
header: 'Roles',
confirmation: `Are you sure you want to remove this role?`
})) {
deleteRole(role.roleId)
}
}
return (
<React.Fragment>
<Loader loading={ loading }>
<SlideModal
title={ instance.exists() ? "Edit Role" : "Add Role" }
size="small"
isDisplayed={showModal }
content={ showModal && <RoleForm closeModal={closeModal}/> }
onClose={ closeModal }
/>
<div className={ stl.wrapper }>
<div className={ cn(stl.tabHeader, 'flex items-center') }>
<div className="flex items-center mr-auto">
<h3 className={ cn(stl.tabTitle, "text-2xl") }>Manage Roles and Permissions</h3>
<Popup
trigger={
<div>
<IconButton
id="add-button"
circle
icon="plus"
outline
disabled={ !isAdmin }
onClick={ () => setShowmModal(true) }
/>
</div>
}
disabled={ isAdmin }
size="tiny"
inverted
position="top left"
/>
</div>
</div>
<NoContent
title="No roles are available."
size="small"
show={ false }
icon
>
<div className={''}>
{roles.map(role => (
<RoleItem
role={role}
permissions={permissionsMap}
editHandler={editHandler}
deleteHandler={deleteHandler}
/>
))}
</div>
</NoContent>
</div>
</Loader>
</React.Fragment>
)
}
export default connect(state => {
const permissions = state.getIn(['roles', 'permissions'])
const permissionsMap = {}
permissions.forEach(p => {
permissionsMap[p.value] = p.name
});
return {
instance: state.getIn(['roles', 'instance']) || null,
permissionsMap: permissionsMap,
roles: state.getIn(['roles', 'list']),
loading: state.getIn(['roles', 'fetchRequest', 'loading']),
account: state.getIn([ 'user', 'account' ])
}
}, { init, edit, fetchList, deleteRole })(Roles)

View file

@ -0,0 +1,15 @@
import React from 'react';
import Role from 'Types/role'
interface Props {
role: Role
}
function Permissions(props: Props) {
return (
<div>
</div>
);
}
export default Permissions;

View file

@ -0,0 +1 @@
export { default } from './Permissions';

View file

@ -0,0 +1,101 @@
import React, { useRef, useEffect } from 'react'
import { connect } from 'react-redux'
import stl from './roleForm.css'
import { save, edit } from 'Duck/roles'
import { Input, Button, Checkbox } from 'UI'
interface Permission {
name: string,
value: string
}
interface Props {
role: any,
edit: (role: any) => void,
save: (role: any) => Promise<void>,
closeModal: () => void,
saving: boolean,
permissions: Array<Permission>[]
}
const RoleForm = ({ role, closeModal, edit, save, saving, permissions }: Props) => {
let focusElement = useRef<any>(null)
const _save = () => {
save(role).then(() => {
closeModal()
})
}
const write = ({ target: { value, name } }) => edit({ [ name ]: value })
const onChangeOption = (e) => {
const { permissions } = role
const index = permissions.indexOf(e)
const _perms = permissions.contains(e) ? permissions.remove(index) : permissions.push(e)
edit({ permissions: _perms })
}
useEffect(() => {
focusElement && focusElement.current && focusElement.current.focus()
}, [])
return (
<div className={ stl.form }>
<form onSubmit={ _save } >
<div className={ stl.formGroup }>
<label>{ 'Name' }</label>
<Input
ref={ focusElement }
name="name"
value={ role.name }
onChange={ write }
className={ stl.input }
id="name-field"
/>
</div>
<div>
{ permissions.map((permission: any, index) => (
<div key={ index } className={ stl.formGroup }>
<Checkbox
name="permissions"
className="font-medium"
type="checkbox"
checked={ role.permissions.contains(permission.value) }
onClick={ () => onChangeOption(permission.value) }
label={permission.name}
/>
</div>
))}
</div>
</form>
<div className="flex items-center">
<div className="flex items-center mr-auto">
<Button
onClick={ _save }
disabled={ !role.validate() }
loading={ saving }
primary
marginRight
>
{ role.exists() ? 'Update' : 'Add' }
</Button>
<Button
data-hidden={ !role.exists() }
onClick={ closeModal }
outline
>
{ 'Cancel' }
</Button>
</div>
</div>
</div>
);
}
export default connect(state => ({
role: state.getIn(['roles', 'instance']),
permissions: state.getIn(['roles', 'permissions']),
saving: state.getIn([ 'roles', 'saveRequest', 'loading' ]),
}), { edit, save })(RoleForm);

View file

@ -0,0 +1 @@
export { default } from './RoleForm';

View file

@ -0,0 +1,21 @@
.form {
padding: 0 20px;
& .formGroup {
margin-bottom: 15px;
}
& label {
display: block;
margin-bottom: 5px;
font-weight: 500;
}
& .input {
width: 100%;
}
& input[type=checkbox] {
margin-right: 10px;
height: 13px;
}
}

View file

@ -0,0 +1,48 @@
import React from 'react'
import { Icon } from 'UI'
import stl from './roleItem.css'
import cn from 'classnames'
function PermisionLabel({ permission }: any) {
return (
<div className={cn(stl.label)}>{ permission }</div>
);
}
interface Props {
role: any,
deleteHandler?: (role: any) => void,
editHandler?: (role: any) => void,
permissions: any
}
function RoleItem({ role, deleteHandler, editHandler, permissions }: Props) {
return (
<div className={cn(stl.wrapper)}>
<Icon name="user-alt" size="16" marginRight="10" />
<div className="flex items-center">
<span>{ role.name }</span>
<div className="grid grid-flow-col auto-cols-max gap-2">
{role.permissions.map((permission: any) => (
<PermisionLabel permission={permissions[permission]} key={permission.id} />
// <span key={permission.id} className={cn(stl.permission)}>{ permissions[permission].name }</span>
))}
</div>
</div>
<div className={ stl.actions }>
{ !!deleteHandler &&
<div className={ cn(stl.button, {[stl.disabled] : role.protected }) } onClick={ () => deleteHandler(role) } id="trash">
<Icon name="trash" size="16" color="teal"/>
</div>
}
{ !!editHandler &&
<div className={ cn(stl.button, {[stl.disabled] : role.protected }) } onClick={ () => editHandler(role) }>
<Icon name="edit" size="16" color="teal"/>
</div>
}
</div>
</div>
);
}
export default RoleItem;

View file

@ -0,0 +1 @@
export { default } from './RoleItem'

View file

@ -0,0 +1,47 @@
.wrapper {
display: flex;
align-items: center;
width: 100%;
border-bottom: solid thin #e6e6e6;
padding: 10px 0px;
}
.actions {
margin-left: auto;
/* opacity: 0; */
transition: all 0.4s;
display: flex;
align-items: center;
& .button {
padding: 5px;
cursor: pointer;
margin-left: 10px;
display: flex;
align-items: center;
justify-content: center;
&:hover {
& svg {
fill: $teal-dark;
}
}
&.disabled {
pointer-events: none;
opacity: 0.5;
}
}
& .disabled {
pointer-events: none;
opacity: 0.5;
}
}
.label {
margin-left: 10px;
padding: 0 10px;
border-radius: 3px;
background-color: $gray-lightest;
font-size: 12px;
border: solid thin $gray-light;
width: fit-content;
}

View file

@ -0,0 +1 @@
export { default } from './Roles';

View file

@ -0,0 +1,13 @@
.wrapper {
padding: 0;
}
.tabHeader {
display: flex;
align-items: center;
margin-bottom: 25px;
& .tabTitle {
margin: 0 15px 0 0;
font-weight: 400 !important;
}
}

View file

@ -1,6 +1,7 @@
import { connect } from 'react-redux';
import cn from 'classnames';
import withPageTitle from 'HOCs/withPageTitle';
import withPermissions from 'HOCs/withPermissions'
import { setPeriod, setPlatform, fetchMetadataOptions } from 'Duck/dashboard';
import { NoContent } from 'UI';
import { WIDGET_KEYS } from 'Types/dashboard';
@ -103,6 +104,7 @@ function isInViewport(el) {
);
}
@withPermissions(['METRICS'], 'page-margin container-90')
@connect(state => ({
period: state.getIn([ 'dashboard', 'period' ]),
comparing: state.getIn([ 'dashboard', 'comparing' ]),

View file

@ -1,5 +1,6 @@
import { connect } from 'react-redux';
import withSiteIdRouter from 'HOCs/withSiteIdRouter';
import withPermissions from 'HOCs/withPermissions'
import { UNRESOLVED, RESOLVED, IGNORED } from "Types/errorInfo";
import { getRE } from 'App/utils';
import { fetchBookmarks } from "Duck/errors";
@ -33,6 +34,7 @@ function getStatusLabel(status) {
}
}
@withPermissions(['ERRORS'], 'page-margin container-90')
@withSiteIdRouter
@connect(state => ({
list: state.getIn([ "errors", "list" ]),

View file

@ -9,7 +9,6 @@ function FunnelDropdown(props) {
const writeOption = (e, { name, value }) => {
const { siteId, history } = props;
console.log(value)
history.push(withSiteId(funnelRoute(parseInt(value)), siteId));
}

View file

@ -28,7 +28,7 @@ function FunnelIssueDetails(props) {
<Loader loading={loading}>
<IssueItem issue={issue} inDetails onBack={onBack} />
<div className="my-6" />
<FunnelSessionList issuId={issueId} inDetails />
<FunnelSessionList funnelId={funnelId} issueId={issueId} inDetails />
</Loader>
</div>
)

View file

@ -2,19 +2,27 @@ import React, { useState, useEffect } from 'react'
import { connect } from 'react-redux'
import SessionItem from 'Shared/SessionItem'
import { fetchSessions, fetchSessionsFiltered } from 'Duck/funnels'
import { setFunnelPage } from 'Duck/sessions'
import { LoadMoreButton, NoContent, Loader } from 'UI'
import FunnelSessionsHeader from '../FunnelSessionsHeader'
const PER_PAGE = 10;
function FunnelSessionList(props) {
const { list, sessionsTotal, sessionsSort, inDetails = false } = props;
const { funnelId, issueId, list, sessionsTotal, sessionsSort, inDetails = false } = props;
const [showPages, setShowPages] = useState(1)
const displayedCount = Math.min(showPages * PER_PAGE, list.size);
const addPage = () => setShowPages(showPages + 1);
useEffect(() => {
props.setFunnelPage({
funnelId,
issueId
})
}, [])
return (
<div>
<FunnelSessionsHeader sessionsCount={inDetails ? sessionsTotal : list.size} inDetails={inDetails} />
@ -24,7 +32,7 @@ function FunnelSessionList(props) {
subtext="Please try changing your search parameters."
icon="exclamation-circle"
show={ list.size === 0}
>
>
{ list.take(displayedCount).map(session => (
<SessionItem
key={ session.sessionId }
@ -37,7 +45,7 @@ function FunnelSessionList(props) {
displayedCount={displayedCount}
totalCount={list.size}
onClick={addPage}
/>
/>
</NoContent>
</div>
)
@ -51,4 +59,4 @@ export default connect(state => ({
liveFilters: state.getIn(['funnelFilters', 'appliedFilter']),
funnelFilters: state.getIn(['funnels', 'funnelFilters']),
sessionsSort: state.getIn(['funnels', 'sessionsSort']),
}), { fetchSessions, fetchSessionsFiltered })(FunnelSessionList)
}), { fetchSessions, fetchSessionsFiltered, setFunnelPage })(FunnelSessionList)

View file

@ -11,7 +11,6 @@ const sortOptions = Object.entries(sortOptionsMap)
.map(([ value, text ]) => ({ value, text }));
function FunnelSessionsHeader({ sessionsCount, inDetails = false }) {
const onSort = () => {}
return (
<div className="flex items-center">
<div className="flex items-center mr-auto text-xl">

Some files were not shown because too many files have changed in this diff Show more