Merge remote-tracking branch 'origin/main' into api-v1.9.5

# Conflicts:
#	scripts/helmcharts/openreplay/files/minio.sh
This commit is contained in:
Taha Yassine Kraiem 2022-11-29 16:07:56 +01:00
commit c43e783339
225 changed files with 3790 additions and 5919 deletions

View file

@ -86,7 +86,11 @@ jobs:
;;
esac
[[ $(cat /tmp/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 1)
if [[ $(cat /tmp/images_to_build.txt) == "" ]]; then
echo "Nothing to build here"
touch /tmp/nothing-to-build-here
exit 0
fi
#
# Pushing image to registry
#
@ -94,7 +98,7 @@ jobs:
for image in $(cat /tmp/images_to_build.txt);
do
echo "Bulding $image"
PUSH_IMAGE=0 bash -x ./build.sh skip $image
PUSH_IMAGE=0 bash -x ./build.sh ee $image
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
./trivy image --exit-code 1 --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
@ -105,7 +109,7 @@ jobs:
} && {
echo "Skipping Security Checks"
}
PUSH_IMAGE=1 bash -x ./build.sh skip $image
PUSH_IMAGE=1 bash -x ./build.sh ee $image
echo "::set-output name=image::$DOCKER_REPO/$image:$IMAGE_TAG"
done
@ -118,6 +122,7 @@ jobs:
# Deploying image to environment.
#
set -x
[[ -f /tmp/nothing-to-build-here ]] && exit 0
cd scripts/helmcharts/
## Update secerts
@ -156,16 +161,16 @@ jobs:
# Deploy command
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true | kubectl apply -f -
- name: Alert slack
if: ${{ failure() }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: ee
SLACK_TITLE: "Failed ${{ github.workflow }}"
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
SLACK_USERNAME: "OR Bot"
SLACK_MESSAGE: 'Build failed :bomb:'
#- name: Alert slack
# if: ${{ failure() }}
# uses: rtCamp/action-slack-notify@v2
# env:
# SLACK_CHANNEL: ee
# SLACK_TITLE: "Failed ${{ github.workflow }}"
# SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
# SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
# SLACK_USERNAME: "OR Bot"
# SLACK_MESSAGE: 'Build failed :bomb:'
# - name: Debug Job

View file

@ -86,7 +86,11 @@ jobs:
;;
esac
[[ $(cat /tmp/images_to_build.txt) != "" ]] || (echo "Nothing to build here"; exit 1)
if [[ $(cat /tmp/images_to_build.txt) == "" ]]; then
echo "Nothing to build here"
touch /tmp/nothing-to-build-here
exit 0
fi
#
# Pushing image to registry
#
@ -116,6 +120,8 @@ jobs:
#
# Deploying image to environment.
#
set -x
[[ -f /tmp/nothing-to-build-here ]] && exit 0
cd scripts/helmcharts/
## Update secerts

View file

@ -91,9 +91,3 @@ Check out our [roadmap](https://www.notion.so/openreplay/Roadmap-889d2c3d968b478
## License
This monorepo uses several licenses. See [LICENSE](/LICENSE) for more details.
## Contributors
<a href="https://github.com/openreplay/openreplay/graphs/contributors">
<img src="https://contrib.rocks/image?repo=openreplay/openreplay" />
</a>

View file

@ -199,7 +199,8 @@ def process():
logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}")
notifications.append(generate_notification(alert, result))
except Exception as e:
logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}")
logging.error(
f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}")
logging.error(query)
logging.error(e)
cur = cur.recreate(rollback=True)
@ -212,12 +213,22 @@ def process():
alerts.process_notifications(notifications)
def __format_value(x):
if x % 1 == 0:
x = int(x)
else:
x = round(x, 2)
return f"{x:,}"
def generate_notification(alert, result):
left = __format_value(result['value'])
right = __format_value(alert['query']['right'])
return {
"alertId": alert["alertId"],
"tenantId": alert["tenantId"],
"title": alert["name"],
"description": f"has been triggered, {alert['query']['left']} = {round(result['value'], 2)} ({alert['query']['operator']} {alert['query']['right']}).",
"description": f"has been triggered, {alert['query']['left']} = {left} ({alert['query']['operator']} {right}).",
"buttonText": "Check metrics for more details",
"buttonUrl": f"/{alert['projectId']}/metrics",
"imageUrl": None,

View file

@ -266,7 +266,8 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
"metric_type": data.metric_type, "metric_of": data.metric_of,
"metric_value": data.metric_value, "metric_format": data.metric_format}
"metric_value": data.metric_value, "metric_format": data.metric_format,
"config": json.dumps(data.config.dict())}
for i, s in enumerate(data.series):
prefix = "u_"
if s.index is None:
@ -316,7 +317,8 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
view_type= %(view_type)s, metric_type= %(metric_type)s,
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
metric_format= %(metric_format)s,
edited_at = timezone('utc'::text, now())
edited_at = timezone('utc'::text, now()),
default_config = %(config)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
@ -392,7 +394,7 @@ def get(metric_id, project_id, user_id, flatten=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT *
"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
@ -443,7 +445,7 @@ def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
) AS connected_dashboards ON (TRUE)"""
cur.execute(
cur.mogrify(
f"""SELECT *
f"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series

View file

@ -111,6 +111,8 @@ def get_dashboard(project_id, user_id, dashboard_id):
for w in row["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
w["config"]["col"] = w["default_config"]["col"]
w["config"]["row"] = w["default_config"]["row"]
for s in w["series"]:
s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
return helper.dict_to_camel_case(row)

View file

@ -419,7 +419,7 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
pg_sub_query_chart.append("resources.type = 'img'")
pg_sub_query_chart.append("resources.url = top_img.url")
pg_sub_query_chart.append("resources.url_hostpath = top_img.url_hostpath")
pg_sub_query_subset = __get_constraints(project_id=project_id, time_constraint=True,
chart=False, data=args)
@ -431,13 +431,13 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT *
FROM (SELECT resources.url,
FROM (SELECT resources.url_hostpath,
COALESCE(AVG(resources.duration), 0) AS avg_duration,
COUNT(resources.session_id) AS sessions_count
FROM events.resources
INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
GROUP BY resources.url
GROUP BY resources.url_hostpath
ORDER BY avg_duration DESC
LIMIT 10) AS top_img
LEFT JOIN LATERAL (
@ -485,13 +485,13 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
if resources and len(resources) > 0:
for r in resources:
if r["type"] == "IMG":
img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s")
img_constraints.append(f"resources.url_hostpath = %(val_{len(img_constraints)})s")
img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value']
elif r["type"] == "LOCATION":
location_constraints.append(f"pages.path = %(val_{len(location_constraints)})s")
location_constraints_vals["val_" + str(len(location_constraints) - 1)] = r['value']
else:
request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s")
request_constraints.append(f"resources.url_hostpath = %(val_{len(request_constraints)})s")
request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value']
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
@ -627,12 +627,12 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
pg_sub_query.append("url_hostpath ILIKE %(value)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT key, value
FROM ( SELECT DISTINCT ON (url) ROW_NUMBER() OVER (PARTITION BY type ORDER BY url) AS r,
url AS value,
FROM ( SELECT DISTINCT ON (url_hostpath) ROW_NUMBER() OVER (PARTITION BY type ORDER BY url_hostpath) AS r,
url_hostpath AS value,
type AS key
FROM events.resources INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
ORDER BY url, type ASC) AS ranked_values
ORDER BY url_hostpath, type ASC) AS ranked_values
WHERE ranked_values.r<=5;"""
cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text)}))
rows = cur.fetchall()
@ -893,7 +893,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
if type is not None:
pg_sub_query_subset.append(f"resources.type = '{__get_resource_db_type_from_type(type)}'")
if url is not None:
pg_sub_query_subset.append(f"resources.url = %(value)s")
pg_sub_query_subset.append(f"resources.url_hostpath = %(value)s")
with pg_client.PostgresClient() as cur:
pg_query = f"""WITH resources AS (SELECT resources.duration, timestamp
@ -1009,7 +1009,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ORDER BY avg DESC
LIMIT 10) AS main_list
INNER JOIN LATERAL (
SELECT url, type
SELECT url_hostpath AS url, type
FROM events.resources
INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}

View file

@ -76,19 +76,21 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
rows = cur.fetchall()
# if recorded is requested, check if it was saved or computed
if recorded:
for r in rows:
u_values = []
params = {}
for i, r in enumerate(rows):
if r["first_recorded_session_at"] is None:
extra_update = ""
if r["recorded"]:
extra_update = ", first_recorded_session_at=to_timestamp(%(first_recorded)s/1000)"
query = cur.mogrify(f"""UPDATE public.projects
SET sessions_last_check_at=(now() at time zone 'utc')
{extra_update}
WHERE project_id=%(project_id)s""",
{"project_id": r["project_id"], "first_recorded": r["first_recorded"]})
cur.execute(query)
u_values.append(f"(%(project_id_{i})s,to_timestamp(%(first_recorded_{i})s/1000))")
params[f"project_id_{i}"] = r["project_id"]
params[f"first_recorded_{i}"] = r["first_recorded"] if r["recorded"] else None
r.pop("first_recorded_session_at")
r.pop("first_recorded")
if len(u_values) > 0:
query = cur.mogrify(f"""UPDATE public.projects
SET sessions_last_check_at=(now() at time zone 'utc'), first_recorded_session_at=u.first_recorded
FROM (VALUES {",".join(u_values)}) AS u(project_id,first_recorded)
WHERE projects.project_id=u.project_id;""", params)
cur.execute(query)
if recording_state and len(rows) > 0:
project_ids = [f'({r["project_id"]})' for r in rows]

View file

@ -177,7 +177,7 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False):
if data.bookmarked:
data.startDate, data.endDate = sessions_favorite.get_start_end_timestamp(project_id, user_id)
@ -185,9 +185,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id)
if data.limit is not None and data.page is not None:
full_args["sessions_limit"] = data.limit
full_args["sessions_limit_s"] = (data.page - 1) * data.limit
full_args["sessions_limit_e"] = data.page * data.limit
else:
full_args["sessions_limit"] = 200
full_args["sessions_limit_s"] = 1
full_args["sessions_limit_e"] = 200
@ -235,6 +237,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
GROUP BY user_id
) AS users_sessions;""",
full_args)
elif ids_only:
main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id
{query_part}
ORDER BY s.session_id desc
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""",
full_args)
else:
if data.order is None:
data.order = schemas.SortOrderType.desc
@ -242,7 +250,6 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
sort = helper.key_to_snake_case(data.sort)
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
COALESCE(JSONB_AGG(full_sessions)
@ -266,7 +273,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
print(data.json())
print("--------------------")
raise err
if errors_only:
if errors_only or ids_only:
return helper.list_to_camel_case(cur.fetchall())
sessions = cur.fetchone()

View file

@ -140,7 +140,9 @@ def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None:
return {"errors": ["Note not found"]}
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}")
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
if note["timestamp"] > 0:
session_url += f"&jumpto={note['timestamp']}"
title = f"<{session_url}|Note for session {note['sessionId']}>"
blocks = [{"type": "section",

View file

@ -181,9 +181,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
values=s["value"], value_key=f"value{i + 1}")
n_stages_query.append(f"""
(SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp,
'{event_type}' AS type,
'{s["operator"]}' AS operator
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
FROM {next_table} AS main {" ".join(extra_from)}
WHERE main.timestamp >= {f"T{i}.stage{i}_timestamp" if i > 0 else "%(startTimestamp)s"}
{f"AND main.session_id=T1.session_id" if i > 0 else ""}
@ -191,30 +189,34 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
{(" AND " + " AND ".join(stage_constraints)) if len(stage_constraints) > 0 else ""}
{(" AND " + " AND ".join(first_stage_extra_constraints)) if len(first_stage_extra_constraints) > 0 and i == 0 else ""}
GROUP BY main.session_id)
AS T{i + 1} {"USING (session_id)" if i > 0 else ""}
AS T{i + 1} {"ON (TRUE)" if i > 0 else ""}
""")
if len(n_stages_query) == 0:
n_stages = len(n_stages_query)
if n_stages == 0:
return []
n_stages_query = " LEFT JOIN LATERAL ".join(n_stages_query)
n_stages_query += ") AS stages_t"
n_stages_query = f"""
SELECT stages_and_issues_t.*, sessions.user_uuid FROM (
SELECT stages_and_issues_t.*, sessions.user_uuid
FROM (
SELECT * FROM (
SELECT * FROM
{n_stages_query}
SELECT T1.session_id, {",".join([f"stage{i + 1}_timestamp" for i in range(n_stages)])}
FROM {n_stages_query}
LEFT JOIN LATERAL
( SELECT ISE.session_id,
ISS.type as issue_type,
( SELECT ISS.type as issue_type,
ISE.timestamp AS issue_timestamp,
ISS.context_string as issue_context,
COALESCE(ISS.context_string,'') as issue_context,
ISS.issue_id as issue_id
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
WHERE ISE.timestamp >= stages_t.stage1_timestamp
AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
AND ISS.project_id=%(project_id)s
AND ISE.session_id = stages_t.session_id
AND ISS.type!='custom' -- ignore custom issues because they are massive
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}
) AS issues_t USING (session_id)
LIMIT 10 -- remove the limit to get exact stats
) AS issues_t ON (TRUE)
) AS stages_and_issues_t INNER JOIN sessions USING(session_id);
"""
@ -297,7 +299,21 @@ def pearson_corr(x: list, y: list):
return r, confidence, False
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage):
# def tuple_or(t: tuple):
# x = 0
# for el in t:
# x |= el # | is for bitwise OR
# return x
#
# The following function is correct optimization of the previous function because t is a list of 0,1
def tuple_or(t: tuple):
for el in t:
if el > 0:
return 1
return 0
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues, first_stage, last_stage):
"""
Returns two lists with binary values 0/1:
@ -316,12 +332,6 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_
transitions = []
n_sess_affected = 0
errors = {}
for issue in all_issues_with_context:
split = issue.split('__^__')
errors[issue] = {
"errors": [],
"issue_type": split[0],
"context": split[1]}
for row in rows:
t = 0
@ -329,38 +339,26 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_
last_ts = row[f'stage{last_stage}_timestamp']
if first_ts is None:
continue
elif first_ts is not None and last_ts is not None:
elif last_ts is not None:
t = 1
transitions.append(t)
ic_present = False
for issue_type_with_context in errors:
for error_id in all_issues:
if error_id not in errors:
errors[error_id] = []
ic = 0
issue_type = errors[issue_type_with_context]["issue_type"]
context = errors[issue_type_with_context]["context"]
if row['issue_type'] is not None:
row_issue_id = row['issue_id']
if row_issue_id is not None:
if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts):
context_in_row = row['issue_context'] if row['issue_context'] is not None else ''
if issue_type == row['issue_type'] and context == context_in_row:
if error_id == row_issue_id:
ic = 1
ic_present = True
errors[issue_type_with_context]["errors"].append(ic)
errors[error_id].append(ic)
if ic_present and t:
n_sess_affected += 1
# def tuple_or(t: tuple):
# x = 0
# for el in t:
# x |= el
# return x
def tuple_or(t: tuple):
for el in t:
if el > 0:
return 1
return 0
errors = {key: errors[key]["errors"] for key in errors}
all_errors = [tuple_or(t) for t in zip(*errors.values())]
return transitions, errors, all_errors, n_sess_affected
@ -376,10 +374,9 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage):
"""
affected_users = defaultdict(lambda: set())
affected_sessions = defaultdict(lambda: set())
contexts = defaultdict(lambda: None)
all_issues = {}
n_affected_users_dict = defaultdict(lambda: None)
n_affected_sessions_dict = defaultdict(lambda: None)
all_issues_with_context = set()
n_issues_dict = defaultdict(lambda: 0)
issues_by_session = defaultdict(lambda: 0)
@ -395,15 +392,13 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage):
# check that the issue exists and belongs to subfunnel:
if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or
(row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])):
context_string = row['issue_context'] if row['issue_context'] is not None else ''
issue_with_context = iss + '__^__' + context_string
contexts[issue_with_context] = {"context": context_string, "id": row["issue_id"]}
all_issues_with_context.add(issue_with_context)
n_issues_dict[issue_with_context] += 1
if row["issue_id"] not in all_issues:
all_issues[row["issue_id"]] = {"context": row['issue_context'], "issue_type": row["issue_type"]}
n_issues_dict[row["issue_id"]] += 1
if row['user_uuid'] is not None:
affected_users[issue_with_context].add(row['user_uuid'])
affected_users[row["issue_id"]].add(row['user_uuid'])
affected_sessions[issue_with_context].add(row['session_id'])
affected_sessions[row["issue_id"]].add(row['session_id'])
issues_by_session[row[f'session_id']] += 1
if len(affected_users) > 0:
@ -414,29 +409,28 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage):
n_affected_sessions_dict.update({
iss: len(affected_sessions[iss]) for iss in affected_sessions
})
return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts
return all_issues, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict
def count_sessions(rows, n_stages):
session_counts = {i: set() for i in range(1, n_stages + 1)}
for ind, row in enumerate(rows):
for row in rows:
for i in range(1, n_stages + 1):
if row[f"stage{i}_timestamp"] is not None:
session_counts[i].add(row[f"session_id"])
session_counts = {i: len(session_counts[i]) for i in session_counts}
return session_counts
def count_users(rows, n_stages):
users_in_stages = defaultdict(lambda: set())
for ind, row in enumerate(rows):
users_in_stages = {i: set() for i in range(1, n_stages + 1)}
for row in rows:
for i in range(1, n_stages + 1):
if row[f"stage{i}_timestamp"] is not None:
users_in_stages[i].add(row["user_uuid"])
users_count = {i: len(users_in_stages[i]) for i in range(1, n_stages + 1)}
return users_count
@ -489,18 +483,18 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
last_stage = n_stages
n_critical_issues = 0
issues_dict = dict({"significant": [],
"insignificant": []})
issues_dict = {"significant": [],
"insignificant": []}
session_counts = count_sessions(rows, n_stages)
drop = session_counts[first_stage] - session_counts[last_stage]
all_issues_with_context, n_issues_dict, affected_users_dict, affected_sessions, contexts = get_affected_users_for_all_issues(
all_issues, n_issues_dict, affected_users_dict, affected_sessions = get_affected_users_for_all_issues(
rows, first_stage, last_stage)
transitions, errors, all_errors, n_sess_affected = get_transitions_and_issues_of_each_type(rows,
all_issues_with_context,
all_issues,
first_stage, last_stage)
# print("len(transitions) =", len(transitions))
del rows
if any(all_errors):
total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors)
@ -513,33 +507,35 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
if drop_only:
return total_drop_due_to_issues
for issue in all_issues_with_context:
for issue_id in all_issues:
if not any(errors[issue]):
if not any(errors[issue_id]):
continue
r, confidence, is_sign = pearson_corr(transitions, errors[issue])
r, confidence, is_sign = pearson_corr(transitions, errors[issue_id])
if r is not None and drop is not None and is_sign:
lost_conversions = int(r * affected_sessions[issue])
lost_conversions = int(r * affected_sessions[issue_id])
else:
lost_conversions = None
if r is None:
r = 0
split = issue.split('__^__')
issues_dict['significant' if is_sign else 'insignificant'].append({
"type": split[0],
"title": helper.get_issue_title(split[0]),
"affected_sessions": affected_sessions[issue],
"unaffected_sessions": session_counts[1] - affected_sessions[issue],
"type": all_issues[issue_id]["issue_type"],
"title": helper.get_issue_title(all_issues[issue_id]["issue_type"]),
"affected_sessions": affected_sessions[issue_id],
"unaffected_sessions": session_counts[1] - affected_sessions[issue_id],
"lost_conversions": lost_conversions,
"affected_users": affected_users_dict[issue],
"affected_users": affected_users_dict[issue_id],
"conversion_impact": round(r * 100),
"context_string": contexts[issue]["context"],
"issue_id": contexts[issue]["id"]
"context_string": all_issues[issue_id]["context"],
"issue_id": issue_id
})
if is_sign:
n_critical_issues += n_issues_dict[issue]
n_critical_issues += n_issues_dict[issue_id]
# To limit the number of returned issues to the frontend
issues_dict["significant"] = issues_dict["significant"][:20]
issues_dict["insignificant"] = issues_dict["insignificant"][:20]
return n_critical_issues, issues_dict, total_drop_due_to_issues

View file

@ -70,7 +70,7 @@ def format_payload(p, truncate_to_first=False):
def url_exists(url):
try:
r = requests.head(url, allow_redirects=False)
return r.status_code == 200 and r.headers.get("Content-Type") != "text/html"
return r.status_code == 200 and "text/html" not in r.headers.get("Content-Type", "")
except Exception as e:
print(f"!! Issue checking if URL exists: {url}")
print(e)

View file

@ -242,7 +242,7 @@ class JiraManager:
def get_issue_types(self):
try:
types = self._jira.issue_types()
types = self._jira.project(self._config['JIRA_PROJECT_ID']).issueTypes
except JIRAError as e:
self.retries -= 1
if (e.status_code // 100) == 4 and self.retries > 0:

View file

@ -1,15 +1,15 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.4
boto3==1.26.14
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.0
elasticsearch==8.5.1
jira==3.4.1
fastapi==0.86.0
uvicorn[standard]==0.19.0
fastapi==0.87.0
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
apscheduler==3.9.1
apscheduler==3.9.1.post1

View file

@ -1,15 +1,15 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.4
boto3==1.26.14
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.0
elasticsearch==8.5.1
jira==3.4.1
fastapi==0.86.0
uvicorn[standard]==0.19.0
fastapi==0.87.0
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
apscheduler==3.9.1
apscheduler==3.9.1.post1

View file

@ -56,6 +56,14 @@ def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchem
return {'data': data}
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"])
@app.post('/{projectId}/sessions/search2/ids', tags=["sessions"])
def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True)
return {'data': data}
@app.get('/{projectId}/events/search', tags=["events"])
def events_search(projectId: int, q: str,
type: Union[schemas.FilterType, schemas.EventType,

View file

@ -874,14 +874,14 @@ class TryCustomMetricsPayloadSchema(CustomMetricChartPayloadSchema):
class CustomMetricsConfigSchema(BaseModel):
col: Optional[int] = Field(default=2)
col: Optional[int] = Field(...)
row: Optional[int] = Field(default=2)
position: Optional[int] = Field(default=0)
class CreateCustomMetricsSchema(TryCustomMetricsPayloadSchema):
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
config: CustomMetricsConfigSchema = Field(default=CustomMetricsConfigSchema())
config: CustomMetricsConfigSchema = Field(...)
@root_validator(pre=True)
def transform_series(cls, values):

View file

@ -1,6 +1,6 @@
FROM golang:1.18-alpine3.15 AS prepare
RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash
RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5
WORKDIR /root
@ -15,11 +15,11 @@ COPY pkg pkg
COPY internal internal
ARG SERVICE_NAME
RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags musl openreplay/backend/cmd/$SERVICE_NAME
RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags dynamic openreplay/backend/cmd/$SERVICE_NAME
FROM alpine AS entrypoint
RUN apk add --no-cache ca-certificates
RUN apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5
RUN adduser -u 1001 openreplay -D
ENV TZ=UTC \
@ -29,6 +29,18 @@ ENV TZ=UTC \
UAPARSER_FILE=/home/openreplay/regexes.yaml \
HTTP_PORT=8080 \
KAFKA_USE_SSL=true \
# KAFKA_USE_KERBEROS should be set true if you wish to use Kerberos auth for Kafka
KAFKA_USE_KERBEROS=false \
# KERBEROS_SERVICE_NAME is the primary name of the Brokers configured in the Broker JAAS file
KERBEROS_SERVICE_NAME="" \
# KERBEROS_PRINCIPAL is this client's principal name
KERBEROS_PRINCIPAL="" \
# KERBEROS_PRINCIPAL is the absolute path to the keytab to be used for authentication
KERBEROS_KEYTAB_LOCATION="" \
# KAFKA_SSL_KEY is the absolute path to the CA cert for verifying the broker's key
KAFKA_SSL_KEY="" \
# KAFKA_SSL_CERT is a CA cert string (PEM format) for verifying the broker's key
KAFKA_SSL_CERT="" \
KAFKA_MAX_POLL_INTERVAL_MS=400000 \
REDIS_STREAMS_MAX_LEN=10000 \
TOPIC_RAW_WEB=raw \

View file

@ -1,6 +1,6 @@
FROM golang:1.18-alpine3.15 AS prepare
RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash
RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash librdkafka-dev cyrus-sasl-gssapi cyrus-sasl-devel
WORKDIR /root
@ -14,11 +14,11 @@ COPY cmd cmd
COPY pkg pkg
COPY internal internal
RUN for name in assets db ender http integrations sink storage;do CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o bin/$name -tags musl openreplay/backend/cmd/$name; done
RUN for name in assets db ender http integrations sink storage;do CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o bin/$name -tags dynamic openreplay/backend/cmd/$name; done
FROM alpine AS entrypoint
#FROM pygmy/alpine-tini:latest
RUN apk add --no-cache ca-certificates
RUN apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl-gssapi cyrus-sasl-devel pkgconf
ENV TZ=UTC \
FS_ULIMIT=1000 \
@ -28,6 +28,18 @@ ENV TZ=UTC \
HTTP_PORT=80 \
BEACON_SIZE_LIMIT=7000000 \
KAFKA_USE_SSL=true \
# KAFKA_USE_KERBEROS should be set true if you wish to use Kerberos auth for Kafka
KAFKA_USE_KERBEROS=false \
# KERBEROS_SERVICE_NAME is the primary name of the Brokers configured in the Broker JAAS file
KERBEROS_SERVICE_NAME="" \
# KERBEROS_PRINCIPAL is this client's principal name
KERBEROS_PRINCIPAL="" \
# KERBEROS_PRINCIPAL is the absolute path to the keytab to be used for authentication
KERBEROS_KEYTAB_LOCATION="" \
# KAFKA_SSL_KEY is the absolute path to the CA cert for verifying the broker's key
KAFKA_SSL_KEY="" \
# KAFKA_SSL_CERT is a CA cert string (PEM format) for verifying the broker's key
KAFKA_SSL_CERT="" \
KAFKA_MAX_POLL_INTERVAL_MS=400000 \
REDIS_STREAMS_MAX_LEN=3000 \
TOPIC_RAW_WEB=raw \

View file

@ -73,6 +73,8 @@ func main() {
log.Printf("Error while caching: %v", err)
case <-tick:
cacher.UpdateTimeouts()
case msg := <-msgConsumer.Rebalanced():
log.Println(msg)
default:
if !cacher.CanCache() {
continue

View file

@ -163,6 +163,8 @@ func main() {
os.Exit(0)
case <-commitTick:
commitDBUpdates()
case msg := <-consumer.Rebalanced():
log.Println(msg)
default:
// Handle new message from queue
if err := consumer.ConsumeNext(); err != nil {

View file

@ -1,92 +0,0 @@
chalice:
env:
jwt_secret: SetARandomStringHere
clickhouse:
enabled: false
fromVersion: v1.6.0
global:
domainName: openreplay.local
email:
emailFrom: OpenReplay<do-not-reply@openreplay.com>
emailHost: ""
emailPassword: ""
emailPort: "587"
emailSslCert: ""
emailSslKey: ""
emailUseSsl: "false"
emailUseTls: "true"
emailUser: ""
enterpriseEditionLicense: ""
ingress:
controller:
config:
enable-real-ip: true
force-ssl-redirect: false
max-worker-connections: 0
proxy-body-size: 10m
ssl-redirect: false
extraArgs:
default-ssl-certificate: app/openreplay-ssl
ingressClass: openreplay
ingressClassResource:
name: openreplay
service:
externalTrafficPolicy: Local
kafka:
kafkaHost: kafka.db.svc.cluster.local
kafkaPort: "9092"
kafkaUseSsl: "false"
zookeeperHost: databases-zookeeper.svc.cluster.local
zookeeperNonTLSPort: 2181
postgresql:
postgresqlDatabase: postgres
postgresqlHost: postgresql.db.svc.cluster.local
postgresqlPassword: changeMePassword
postgresqlPort: "5432"
postgresqlUser: postgres
redis:
redisHost: redis-master.db.svc.cluster.local
redisPort: "6379"
s3:
accessKey: changeMeMinioAccessKey
assetsBucket: sessions-assets
endpoint: http://minio.db.svc.cluster.local:9000
recordingsBucket: mobs
region: us-east-1
secretKey: changeMeMinioPassword
sourcemapsBucket: sourcemaps
ingress-nginx:
controller:
config:
enable-real-ip: true
force-ssl-redirect: false
max-worker-connections: 0
proxy-body-size: 10m
ssl-redirect: false
extraArgs:
default-ssl-certificate: app/openreplay-ssl
ingressClass: openreplay
ingressClassResource:
name: openreplay
service:
externalTrafficPolicy: Local
kafka:
kafkaHost: kafka.db.svc.cluster.local
kafkaPort: "9092"
kafkaUseSsl: "false"
zookeeperHost: databases-zookeeper.svc.cluster.local
zookeeperNonTLSPort: 2181
minio:
global:
minio:
accessKey: changeMeMinioAccessKey
secretKey: changeMeMinioPassword
postgresql:
postgresqlDatabase: postgres
postgresqlHost: postgresql.db.svc.cluster.local
postgresqlPassword: changeMePassword
postgresqlPort: "5432"
postgresqlUser: postgres
redis:
redisHost: redis-master.db.svc.cluster.local
redisPort: "6379"

View file

@ -98,6 +98,8 @@ func main() {
if err := consumer.CommitBack(intervals.EVENTS_BACK_COMMIT_GAP); err != nil {
log.Printf("can't commit messages with offset: %s", err)
}
case msg := <-consumer.Rebalanced():
log.Println(msg)
default:
if err := consumer.ConsumeNext(); err != nil {
log.Fatalf("Error on consuming: %v", err)

View file

@ -82,6 +82,8 @@ func main() {
})
producer.Flush(cfg.ProducerTimeout)
consumer.Commit()
case msg := <-consumer.Rebalanced():
log.Println(msg)
default:
if err := consumer.ConsumeNext(); err != nil {
log.Fatalf("Error on consuming: %v", err)

View file

@ -3,16 +3,14 @@ package main
import (
"context"
"log"
"openreplay/backend/pkg/pprof"
"os"
"os/signal"
"strings"
"syscall"
"time"
"openreplay/backend/internal/config/sink"
"openreplay/backend/internal/sink/assetscache"
"openreplay/backend/internal/sink/oswriter"
"openreplay/backend/internal/sink/sessionwriter"
"openreplay/backend/internal/storage"
"openreplay/backend/pkg/messages"
"openreplay/backend/pkg/monitoring"
@ -21,8 +19,6 @@ import (
)
func main() {
pprof.StartProfilingServer()
metrics := monitoring.New("sink")
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
@ -33,7 +29,7 @@ func main() {
log.Fatalf("%v doesn't exist. %v", cfg.FsDir, err)
}
writer := oswriter.NewWriter(cfg.FsUlimit, cfg.FsDir)
writer := sessionwriter.NewWriter(cfg.FsUlimit, cfg.FsDir, cfg.FileBuffer, cfg.SyncTimeout)
producer := queue.NewProducer(cfg.MessageSizeLimit, true)
defer producer.Close(cfg.ProducerCloseTimeout)
@ -64,6 +60,7 @@ func main() {
if err := producer.Produce(cfg.TopicTrigger, msg.SessionID(), msg.Encode()); err != nil {
log.Printf("can't send SessionEnd to trigger topic: %s; sessID: %d", err, msg.SessionID())
}
writer.Close(msg.SessionID())
return
}
@ -95,47 +92,20 @@ func main() {
counter.Update(msg.SessionID(), time.UnixMilli(ts))
}
// Write encoded message with index to session file
data := msg.EncodeWithIndex()
// Try to encode message to avoid null data inserts
data := msg.Encode()
if data == nil {
log.Printf("can't encode with index, err: %s", err)
return
}
wasWritten := false // To avoid timestamp duplicates in original mob file
if messages.IsDOMType(msg.TypeID()) {
if err := writer.WriteDOM(msg.SessionID(), data); err != nil {
if strings.Contains(err.Error(), "not a directory") {
// Trying to write data to mob file by original path
oldErr := writer.WriteMOB(msg.SessionID(), data)
if oldErr != nil {
log.Printf("MOB Writeer error: %s, prev DOM error: %s, info: %s", oldErr, err, msg.Meta().Batch().Info())
} else {
wasWritten = true
}
} else {
log.Printf("DOM Writer error: %s, info: %s", err, msg.Meta().Batch().Info())
}
}
}
if !messages.IsDOMType(msg.TypeID()) || msg.TypeID() == messages.MsgTimestamp {
// TODO: write only necessary timestamps
if err := writer.WriteDEV(msg.SessionID(), data); err != nil {
if strings.Contains(err.Error(), "not a directory") {
if !wasWritten {
// Trying to write data to mob file by original path
oldErr := writer.WriteMOB(msg.SessionID(), data)
if oldErr != nil {
log.Printf("MOB Writeer error: %s, prev DEV error: %s, info: %s", oldErr, err, msg.Meta().Batch().Info())
}
}
} else {
log.Printf("Devtools Writer error: %s, info: %s", err, msg.Meta().Batch().Info())
}
}
// Write message to file
if err := writer.Write(msg); err != nil {
log.Printf("writer error: %s", err)
return
}
// [METRICS] Increase the number of written to the files messages and the message size
messageSize.Record(context.Background(), float64(len(data)))
messageSize.Record(context.Background(), float64(len(msg.Encode())))
savedMessages.Add(context.Background(), 1)
}
@ -153,27 +123,36 @@ func main() {
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
tick := time.Tick(30 * time.Second)
tick := time.Tick(10 * time.Second)
tickInfo := time.Tick(30 * time.Second)
for {
select {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
if err := writer.CloseAll(); err != nil {
log.Printf("closeAll error: %v\n", err)
}
// Sync and stop writer
writer.Stop()
// Commit and stop consumer
if err := consumer.Commit(); err != nil {
log.Printf("can't commit messages: %s", err)
}
consumer.Close()
os.Exit(0)
case <-tick:
if err := writer.SyncAll(); err != nil {
log.Fatalf("sync error: %v\n", err)
}
counter.Print()
if err := consumer.Commit(); err != nil {
log.Printf("can't commit messages: %s", err)
}
case <-tickInfo:
counter.Print()
log.Printf("writer: %s", writer.Info())
case <-consumer.Rebalanced():
s := time.Now()
// Commit now to avoid duplicate reads
if err := consumer.Commit(); err != nil {
log.Printf("can't commit messages: %s", err)
}
// Sync all files
writer.Sync()
log.Printf("manual sync finished, dur: %d", time.Now().Sub(s).Milliseconds())
default:
err := consumer.ConsumeNext()
if err != nil {

View file

@ -73,6 +73,8 @@ func main() {
os.Exit(0)
case <-counterTick:
go counter.Print()
case msg := <-consumer.Rebalanced():
log.Println(msg)
default:
err := consumer.ConsumeNext()
if err != nil {

View file

@ -8,6 +8,7 @@ require (
github.com/Masterminds/semver v1.5.0
github.com/aws/aws-sdk-go v1.44.98
github.com/btcsuite/btcutil v1.0.2
github.com/confluentinc/confluent-kafka-go v1.8.2
github.com/elastic/go-elasticsearch/v7 v7.13.1
github.com/go-redis/redis v6.15.9+incompatible
github.com/google/uuid v1.3.0
@ -26,9 +27,8 @@ require (
go.opentelemetry.io/otel/exporters/prometheus v0.30.0
go.opentelemetry.io/otel/metric v0.30.0
go.opentelemetry.io/otel/sdk/metric v0.30.0
golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c
google.golang.org/api v0.81.0
gopkg.in/confluentinc/confluent-kafka-go.v1 v1.8.2
)
require (
@ -38,7 +38,6 @@ require (
cloud.google.com/go/storage v1.14.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/confluentinc/confluent-kafka-go v1.9.0 // indirect
github.com/go-logr/logr v1.2.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
@ -53,7 +52,6 @@ require (
github.com/jackc/puddle v1.2.2-0.20220404125616-4e959849469a // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/klauspost/compress v1.15.7 // indirect
github.com/kr/pretty v0.3.0 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/paulmach/orb v0.7.1 // indirect
github.com/pierrec/lz4/v4 v4.1.15 // indirect
@ -69,8 +67,8 @@ require (
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect
golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a // indirect
golang.org/x/text v0.3.7 // indirect
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect
golang.org/x/text v0.4.0 // indirect
golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd // indirect

View file

@ -115,12 +115,11 @@ github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWH
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
github.com/confluentinc/confluent-kafka-go v1.9.0 h1:d1k62oAuQVxgdMdiDQnpkABbtIWTBwXHpDcyGQUw5QQ=
github.com/confluentinc/confluent-kafka-go v1.9.0/go.mod h1:WDFs+KlhHITEoCzEfHSNgj5aP7vjajyYbZpvTEGs1sE=
github.com/confluentinc/confluent-kafka-go v1.8.2 h1:PBdbvYpyOdFLehj8j+9ba7FL4c4Moxn79gy9cYKxG5E=
github.com/confluentinc/confluent-kafka-go v1.8.2/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
@ -328,14 +327,12 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxv
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
@ -400,8 +397,6 @@ github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0
github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
@ -566,8 +561,9 @@ golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su
golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 h1:NWy5+hlRbC7HK+PmcXVUmW1IMyFce7to56IUvhUFm7Y=
golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c h1:yKufUcDwucU5urd+50/Opbt4AYpqthk7wHpHok8f1lo=
golang.org/x/net v0.0.0-20220906165146-f3363e06e74c/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -678,8 +674,9 @@ golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a h1:dGzPydgVsqGcTRVwiLJ1jVbufYwmzD3LfVPLKsKg+0k=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg=
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -690,8 +687,9 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -939,8 +937,6 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/confluentinc/confluent-kafka-go.v1 v1.8.2 h1:QAgN6OC0o7dwvyz+HML6GYm+0Pk54O91+oxGqJ/5z8I=
gopkg.in/confluentinc/confluent-kafka-go.v1 v1.8.2/go.mod h1:ZdI3yfYmdNSLQPNCpO1y00EHyWaHG5EnQEyL/ntAegY=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=

View file

@ -9,6 +9,8 @@ type Config struct {
common.Config
FsDir string `env:"FS_DIR,required"`
FsUlimit uint16 `env:"FS_ULIMIT,required"`
FileBuffer int `env:"FILE_BUFFER,default=16384"`
SyncTimeout int `env:"SYNC_TIMEOUT,default=5"`
GroupSink string `env:"GROUP_SINK,required"`
TopicRawWeb string `env:"TOPIC_RAW_WEB,required"`
TopicRawIOS string `env:"TOPIC_RAW_IOS,required"`
@ -17,7 +19,7 @@ type Config struct {
CacheAssets bool `env:"CACHE_ASSETS,required"`
AssetsOrigin string `env:"ASSETS_ORIGIN,required"`
ProducerCloseTimeout int `env:"PRODUCER_CLOSE_TIMEOUT,default=15000"`
CacheThreshold int64 `env:"CACHE_THRESHOLD,default=75"`
CacheThreshold int64 `env:"CACHE_THRESHOLD,default=5"`
CacheExpiration int64 `env:"CACHE_EXPIRATION,default=120"`
}

View file

@ -11,7 +11,6 @@ type Config struct {
S3Region string `env:"AWS_REGION_WEB,required"`
S3Bucket string `env:"S3_BUCKET_WEB,required"`
FSDir string `env:"FS_DIR,required"`
FSCleanHRS int `env:"FS_CLEAN_HRS,required"`
FileSplitSize int `env:"FILE_SPLIT_SIZE,required"`
RetryTimeout time.Duration `env:"RETRY_TIMEOUT,default=2m"`
GroupStorage string `env:"GROUP_STORAGE,required"`
@ -21,6 +20,7 @@ type Config struct {
DeleteTimeout time.Duration `env:"DELETE_TIMEOUT,default=48h"`
ProducerCloseTimeout int `env:"PRODUCER_CLOSE_TIMEOUT,default=15000"`
UseFailover bool `env:"USE_FAILOVER,default=false"`
MaxFileSize int64 `env:"MAX_FILE_SIZE,default=524288000"`
}
func New() *Config {

View file

@ -1,166 +0,0 @@
package oswriter
import (
"errors"
"log"
"math"
"os"
"path/filepath"
"strconv"
"time"
)
type Writer struct {
ulimit int
dir string
files map[string]*os.File
atimes map[string]int64
}
func NewWriter(ulimit uint16, dir string) *Writer {
return &Writer{
ulimit: int(ulimit),
dir: dir + "/",
files: make(map[string]*os.File),
atimes: make(map[string]int64),
}
}
func (w *Writer) open(fname string) (*os.File, error) {
file, ok := w.files[fname]
if ok {
return file, nil
}
if len(w.atimes) == w.ulimit {
var m_k string
var m_t int64 = math.MaxInt64
for k, t := range w.atimes {
if t < m_t {
m_k = k
m_t = t
}
}
if err := w.close(m_k); err != nil {
return nil, err
}
}
// mkdir if not exist
pathTo := w.dir + filepath.Dir(fname)
if info, err := os.Stat(pathTo); os.IsNotExist(err) {
if err := os.MkdirAll(pathTo, 0755); err != nil {
log.Printf("os.MkdirAll error: %s", err)
}
} else {
if err != nil {
return nil, err
}
if !info.IsDir() {
return nil, errors.New("not a directory")
}
}
file, err := os.OpenFile(w.dir+fname, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
if err != nil {
log.Printf("os.OpenFile error: %s", err)
return nil, err
}
w.files[fname] = file
w.atimes[fname] = time.Now().Unix()
return file, nil
}
func (w *Writer) close(fname string) error {
file := w.files[fname]
if file == nil {
return nil
}
if err := file.Sync(); err != nil {
return err
}
if err := file.Close(); err != nil {
return err
}
delete(w.files, fname)
delete(w.atimes, fname)
return nil
}
func (w *Writer) WriteDOM(sid uint64, data []byte) error {
return w.write(strconv.FormatUint(sid, 10)+"/dom.mob", data)
}
func (w *Writer) WriteDEV(sid uint64, data []byte) error {
return w.write(strconv.FormatUint(sid, 10)+"/devtools.mob", data)
}
func (w *Writer) WriteMOB(sid uint64, data []byte) error {
// Use session id as a file name without directory
fname := strconv.FormatUint(sid, 10)
file, err := w.openWithoutDir(fname)
if err != nil {
return err
}
_, err = file.Write(data)
return err
}
func (w *Writer) write(fname string, data []byte) error {
file, err := w.open(fname)
if err != nil {
return err
}
_, err = file.Write(data)
return err
}
func (w *Writer) openWithoutDir(fname string) (*os.File, error) {
file, ok := w.files[fname]
if ok {
return file, nil
}
if len(w.atimes) == w.ulimit {
var m_k string
var m_t int64 = math.MaxInt64
for k, t := range w.atimes {
if t < m_t {
m_k = k
m_t = t
}
}
if err := w.close(m_k); err != nil {
return nil, err
}
}
file, err := os.OpenFile(w.dir+fname, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
if err != nil {
return nil, err
}
w.files[fname] = file
w.atimes[fname] = time.Now().Unix()
return file, nil
}
func (w *Writer) SyncAll() error {
for _, file := range w.files {
if err := file.Sync(); err != nil {
return err
}
}
return nil
}
func (w *Writer) CloseAll() error {
for _, file := range w.files {
if err := file.Sync(); err != nil {
return err
}
if err := file.Close(); err != nil {
return err
}
}
w.files = nil
w.atimes = nil
return nil
}

View file

@ -0,0 +1,76 @@
package sessionwriter
import (
"bufio"
"io"
"log"
"os"
)
type File struct {
file *os.File
buffer *bufio.Writer
updated bool
}
func NewFile(path string, bufSize int) (*File, error) {
file, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
if err != nil {
return nil, err
}
return &File{
file: file,
buffer: bufio.NewWriterSize(file, bufSize),
updated: false,
}, nil
}
func (f *File) Write(data []byte) error {
f.updated = true
if len(data) > f.buffer.Available()+f.buffer.Size() {
// Flush buffer to file
for i := 0; i < 3; i++ {
err := f.buffer.Flush()
if err == nil {
break
}
log.Printf("can't flush buffer: %s", err)
}
// Write big message directly to file
return f.write(f.file, data)
}
return f.write(f.buffer, data)
}
func (f *File) write(w io.Writer, data []byte) error {
leftToWrite := len(data)
for leftToWrite > 0 {
from := len(data) - leftToWrite
writtenDown, err := w.Write(data[from:])
if err != nil {
return err
}
leftToWrite -= writtenDown
}
return nil
}
func (f *File) Sync() error {
if !f.updated {
return nil
}
if err := f.buffer.Flush(); err != nil {
return err
}
if err := f.file.Sync(); err != nil {
return err
}
f.updated = false
return nil
}
func (f *File) Close() error {
_ = f.buffer.Flush()
_ = f.file.Sync()
return f.file.Close()
}

View file

@ -0,0 +1,56 @@
package sessionwriter
import (
"math"
"sync"
"time"
)
type Meta struct {
limit int
lock *sync.Mutex
meta map[uint64]int64
}
func NewMeta(limit int) *Meta {
return &Meta{
limit: limit,
lock: &sync.Mutex{},
meta: make(map[uint64]int64, limit),
}
}
func (m *Meta) Add(sid uint64) {
m.lock.Lock()
m.meta[sid] = time.Now().Unix()
m.lock.Unlock()
}
func (m *Meta) Count() int {
m.lock.Lock()
defer m.lock.Unlock()
return len(m.meta)
}
func (m *Meta) Delete(sid uint64) {
m.lock.Lock()
delete(m.meta, sid)
m.lock.Unlock()
}
func (m *Meta) GetExtra() uint64 {
m.lock.Lock()
defer m.lock.Unlock()
if len(m.meta) >= m.limit {
var extraSessID uint64
var minTimestamp int64 = math.MaxInt64
for sessID, timestamp := range m.meta {
if timestamp < minTimestamp {
extraSessID = sessID
minTimestamp = timestamp
}
}
return extraSessID
}
return 0
}

View file

@ -0,0 +1,96 @@
package sessionwriter
import (
"encoding/binary"
"fmt"
"strconv"
"sync"
"openreplay/backend/pkg/messages"
)
type Session struct {
lock *sync.Mutex
dom *File
dev *File
index []byte
updated bool
}
func NewSession(sessID uint64, workDir string, bufSize int) (*Session, error) {
if sessID == 0 {
return nil, fmt.Errorf("wrong session id")
}
filePath := workDir + strconv.FormatUint(sessID, 10)
dom, err := NewFile(filePath, bufSize)
if err != nil {
return nil, err
}
dev, err := NewFile(filePath+"devtools", bufSize)
if err != nil {
dom.Close()
return nil, err
}
return &Session{
lock: &sync.Mutex{},
dom: dom,
dev: dev,
index: make([]byte, 8),
updated: false,
}, nil
}
func (s *Session) Write(msg messages.Message) error {
s.lock.Lock()
defer s.lock.Unlock()
// Encode message index
binary.LittleEndian.PutUint64(s.index, msg.Meta().Index)
// Write message to dom.mob file
if messages.IsDOMType(msg.TypeID()) {
// Write message index
if err := s.dom.Write(s.index); err != nil {
return err
}
// Write message body
if err := s.dom.Write(msg.Encode()); err != nil {
return err
}
}
s.updated = true
// Write message to dev.mob file
if !messages.IsDOMType(msg.TypeID()) || msg.TypeID() == messages.MsgTimestamp {
// Write message index
if err := s.dev.Write(s.index); err != nil {
return err
}
// Write message body
if err := s.dev.Write(msg.Encode()); err != nil {
return err
}
}
return nil
}
func (s *Session) Sync() error {
s.lock.Lock()
defer s.lock.Unlock()
if err := s.dom.Sync(); err != nil {
return err
}
return s.dev.Sync()
}
func (s *Session) Close() error {
s.lock.Lock()
defer s.lock.Unlock()
if err := s.dom.Close(); err != nil {
return err
}
return s.dev.Close()
}

View file

@ -0,0 +1,126 @@
package sessionwriter
import (
"fmt"
"log"
"sync"
"time"
"openreplay/backend/pkg/messages"
)
type SessionWriter struct {
filesLimit int
workingDir string
fileBuffer int
syncTimeout time.Duration
meta *Meta
sessions *sync.Map
done chan struct{}
stopped chan struct{}
}
func NewWriter(filesLimit uint16, workingDir string, fileBuffer int, syncTimeout int) *SessionWriter {
w := &SessionWriter{
filesLimit: int(filesLimit) / 2, // should divide by 2 because each session has 2 files
workingDir: workingDir + "/",
fileBuffer: fileBuffer,
syncTimeout: time.Duration(syncTimeout) * time.Second,
meta: NewMeta(int(filesLimit)),
sessions: &sync.Map{},
done: make(chan struct{}),
stopped: make(chan struct{}),
}
go w.synchronizer()
return w
}
func (w *SessionWriter) Write(msg messages.Message) (err error) {
var (
sess *Session
sid = msg.SessionID()
)
// Load session
sessObj, ok := w.sessions.Load(sid)
if !ok {
// Create new session
sess, err = NewSession(sid, w.workingDir, w.fileBuffer)
if err != nil {
return fmt.Errorf("can't create session: %d, err: %s", sid, err)
}
// Check opened sessions limit and close extra session if you need to
if extraSessID := w.meta.GetExtra(); extraSessID != 0 {
if err := w.Close(extraSessID); err != nil {
log.Printf("can't close session: %s", err)
}
}
// Add created session
w.sessions.Store(sid, sess)
w.meta.Add(sid)
} else {
sess = sessObj.(*Session)
}
// Write data to session
return sess.Write(msg)
}
func (w *SessionWriter) sync(sid uint64) error {
sessObj, ok := w.sessions.Load(sid)
if !ok {
return fmt.Errorf("session: %d not found", sid)
}
sess := sessObj.(*Session)
return sess.Sync()
}
func (w *SessionWriter) Close(sid uint64) error {
sessObj, ok := w.sessions.LoadAndDelete(sid)
if !ok {
return fmt.Errorf("session: %d not found", sid)
}
sess := sessObj.(*Session)
err := sess.Close()
w.meta.Delete(sid)
return err
}
func (w *SessionWriter) Stop() {
w.done <- struct{}{}
<-w.stopped
}
func (w *SessionWriter) Info() string {
return fmt.Sprintf("%d sessions", w.meta.Count())
}
func (w *SessionWriter) Sync() {
w.sessions.Range(func(sid, lockObj any) bool {
if err := w.sync(sid.(uint64)); err != nil {
log.Printf("can't sync file descriptor: %s", err)
}
return true
})
}
func (w *SessionWriter) synchronizer() {
tick := time.Tick(w.syncTimeout)
for {
select {
case <-tick:
w.Sync()
case <-w.done:
w.sessions.Range(func(sid, lockObj any) bool {
if err := w.Close(sid.(uint64)); err != nil {
log.Printf("can't close file descriptor: %s", err)
}
return true
})
w.stopped <- struct{}{}
return
}
}
}

View file

@ -13,7 +13,6 @@ import (
"openreplay/backend/pkg/storage"
"os"
"strconv"
"strings"
"time"
)
@ -71,43 +70,46 @@ func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Stor
}
func (s *Storage) UploadSessionFiles(msg *messages.SessionEnd) error {
sessionDir := strconv.FormatUint(msg.SessionID(), 10)
if err := s.uploadKey(msg.SessionID(), sessionDir+"/dom.mob", true, 5, msg.EncryptionKey); err != nil {
oldErr := s.uploadKey(msg.SessionID(), sessionDir, true, 5, msg.EncryptionKey)
if oldErr != nil {
return fmt.Errorf("upload file error: %s. failed checking mob file using old path: %s", err, oldErr)
}
// Exit method anyway because we don't have dev tools separation in prev version
return nil
}
if err := s.uploadKey(msg.SessionID(), sessionDir+"/devtools.mob", false, 4, msg.EncryptionKey); err != nil {
if err := s.uploadKey(msg.SessionID(), "/dom.mob", true, 5, msg.EncryptionKey); err != nil {
return err
}
if err := s.uploadKey(msg.SessionID(), "/devtools.mob", false, 4, msg.EncryptionKey); err != nil {
log.Printf("can't find devtools for session: %d, err: %s", msg.SessionID(), err)
}
return nil
}
// TODO: make a bit cleaner
func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCount int, encryptionKey string) error {
// TODO: make a bit cleaner.
// TODO: Of course, I'll do!
func (s *Storage) uploadKey(sessID uint64, suffix string, shouldSplit bool, retryCount int, encryptionKey string) error {
if retryCount <= 0 {
return nil
}
start := time.Now()
file, err := os.Open(s.cfg.FSDir + "/" + key)
fileName := strconv.FormatUint(sessID, 10)
mobFileName := fileName
if suffix == "/devtools.mob" {
mobFileName += "devtools"
}
filePath := s.cfg.FSDir + "/" + mobFileName
// Check file size before download into memory
info, err := os.Stat(filePath)
if err == nil {
if info.Size() > s.cfg.MaxFileSize {
log.Printf("big file, size: %d, session: %d", info.Size(), sessID)
return nil
}
}
file, err := os.Open(filePath)
if err != nil {
return fmt.Errorf("File open error: %v; sessID: %s, part: %d, sessStart: %s\n",
err, key, sessID%16,
err, fileName, sessID%16,
time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))),
)
}
defer file.Close()
// Ignore "s" at the end of mob file name for "old" sessions
newVers := false
if strings.Contains(key, "/") {
newVers = true
}
var fileSize int64 = 0
fileInfo, err := file.Stat()
if err != nil {
@ -117,17 +119,18 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo
}
var encryptedData []byte
fileName += suffix
if shouldSplit {
nRead, err := file.Read(s.startBytes)
if err != nil {
log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s",
err,
key,
fileName,
sessID%16,
time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))),
)
time.AfterFunc(s.cfg.RetryTimeout, func() {
s.uploadKey(sessID, key, shouldSplit, retryCount-1, encryptionKey)
s.uploadKey(sessID, suffix, shouldSplit, retryCount-1, encryptionKey)
})
return nil
}
@ -146,11 +149,7 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo
}
// Compress and save to s3
startReader := bytes.NewBuffer(encryptedData)
startKey := key
if newVers {
startKey += "s"
}
if err := s.s3.Upload(s.gzipFile(startReader), startKey, "application/octet-stream", true); err != nil {
if err := s.s3.Upload(s.gzipFile(startReader), fileName+"s", "application/octet-stream", true); err != nil {
log.Fatalf("Storage: start upload failed. %v\n", err)
}
// TODO: fix possible error (if we read less then FileSplitSize)
@ -161,7 +160,7 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo
if err != nil {
log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s",
err,
key,
fileName,
sessID%16,
time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))),
)
@ -183,7 +182,7 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo
}
// Compress and save to s3
endReader := bytes.NewBuffer(encryptedData)
if err := s.s3.Upload(s.gzipFile(endReader), key+"e", "application/octet-stream", true); err != nil {
if err := s.s3.Upload(s.gzipFile(endReader), fileName+"e", "application/octet-stream", true); err != nil {
log.Fatalf("Storage: end upload failed. %v\n", err)
}
}
@ -195,7 +194,7 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo
if err != nil {
log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s",
err,
key,
fileName,
sessID%16,
time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))),
)
@ -216,7 +215,7 @@ func (s *Storage) uploadKey(sessID uint64, key string, shouldSplit bool, retryCo
encryptedData = fileData
}
endReader := bytes.NewBuffer(encryptedData)
if err := s.s3.Upload(s.gzipFile(endReader), key+"s", "application/octet-stream", true); err != nil {
if err := s.s3.Upload(s.gzipFile(endReader), fileName, "application/octet-stream", true); err != nil {
log.Fatalf("Storage: end upload failed. %v\n", err)
}
s.archivingTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds()))

View file

@ -11,6 +11,8 @@ import (
. "openreplay/backend/pkg/messages"
)
const SOURCE_JS = "js_exception"
type ErrorEvent struct {
MessageID uint64
Timestamp uint64
@ -64,7 +66,7 @@ func WrapJSException(m *JSException) *ErrorEvent {
return &ErrorEvent{
MessageID: m.Meta().Index,
Timestamp: uint64(m.Meta().Timestamp),
Source: "js_exception",
Source: SOURCE_JS,
Name: m.Name,
Message: m.Message,
Payload: m.Payload,
@ -105,14 +107,16 @@ func (e *ErrorEvent) ID(projectID uint32) string {
hash.Write([]byte(e.Source))
hash.Write([]byte(e.Name))
hash.Write([]byte(e.Message))
frame, err := parseFirstFrame(e.Payload)
if err != nil {
log.Printf("Can't parse stackframe ((( %v ))): %v", e.Payload, err)
}
if frame != nil {
hash.Write([]byte(frame.FileName))
hash.Write([]byte(strconv.Itoa(frame.LineNo)))
hash.Write([]byte(strconv.Itoa(frame.ColNo)))
if e.Source == SOURCE_JS {
frame, err := parseFirstFrame(e.Payload)
if err != nil {
log.Printf("Can't parse stackframe ((( %v ))): %v", e.Payload, err)
}
if frame != nil {
hash.Write([]byte(frame.FileName))
hash.Write([]byte(strconv.Itoa(frame.LineNo)))
hash.Write([]byte(strconv.Itoa(frame.ColNo)))
}
}
return strconv.FormatUint(uint64(projectID), 16) + hex.EncodeToString(hash.Sum(nil))
}

View file

@ -100,6 +100,7 @@ func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) {
tp: msgType,
size: i.size,
reader: reader,
raw: batchData,
skipped: &i.canSkip,
broken: &i.broken,
meta: i.messageInfo,

View file

@ -8,13 +8,17 @@ import (
"log"
)
var (
one = []byte{0}
three = []byte{0, 0, 0}
)
func ReadByte(reader io.Reader) (byte, error) {
p := make([]byte, 1)
_, err := io.ReadFull(reader, p)
_, err := io.ReadFull(reader, one)
if err != nil {
return 0, err
}
return p[0], nil
return one[0], nil
}
func ReadData(reader io.Reader) ([]byte, error) {
@ -156,8 +160,7 @@ func WriteSize(size uint64, buf []byte, p int) {
}
func ReadSize(reader io.Reader) (uint64, error) {
buf := make([]byte, 3)
n, err := io.ReadFull(reader, buf)
n, err := io.ReadFull(reader, three)
if err != nil {
return 0, err
}
@ -165,7 +168,7 @@ func ReadSize(reader io.Reader) (uint64, error) {
return 0, fmt.Errorf("read only %d of 3 size bytes", n)
}
var size uint64
for i, b := range buf {
for i, b := range three {
size += uint64(b) << (8 * i)
}
return size, nil

View file

@ -13,6 +13,7 @@ type RawMessage struct {
size uint64
data []byte
reader *bytes.Reader
raw []byte
meta *message
encoded bool
skipped *bool
@ -23,15 +24,25 @@ func (m *RawMessage) Encode() []byte {
if m.encoded {
return m.data
}
m.data = make([]byte, m.size+1)
m.data[0] = uint8(m.tp)
m.encoded = true
*m.skipped = false
_, err := io.ReadFull(m.reader, m.data[1:])
if err != nil {
log.Printf("message encode err: %s, type: %d, sess: %d", err, m.tp, m.SessionID())
// Try to avoid EOF error
if m.reader.Len() < int(m.size) {
return nil
}
// Get current batch position
currPos, err := m.reader.Seek(0, io.SeekCurrent)
if err != nil {
log.Printf("can't get current batch position: %s", err)
return nil
}
// "Move" message type
if currPos == 0 {
log.Printf("can't move message type, curr position = %d", currPos)
return nil
}
// Dirty hack to avoid extra memory allocation
m.raw[currPos-1] = uint8(m.tp)
m.data = m.raw[currPos-1 : currPos+int64(m.size)]
m.encoded = true
return m.data
}

View file

@ -6,6 +6,7 @@ type Consumer interface {
CommitBack(gap int64) error
Commit() error
Close()
Rebalanced() <-chan interface{}
}
// Producer sends batches of session data to queue (redis or kafka)

View file

@ -27,6 +27,7 @@ type Consumer struct {
idsPending streamPendingIDsMap
lastTs int64
autoCommit bool
event chan interface{}
}
func NewConsumer(group string, streams []string, messageIterator messages.MessageIterator) *Consumer {
@ -57,11 +58,16 @@ func NewConsumer(group string, streams []string, messageIterator messages.Messag
group: group,
autoCommit: true,
idsPending: idsPending,
event: make(chan interface{}, 4),
}
}
const READ_COUNT = 10
func (c *Consumer) Rebalanced() <-chan interface{} {
return c.event
}
func (c *Consumer) ConsumeNext() error {
// MBTODO: read in go routine, send messages to channel
res, err := c.redis.XReadGroup(&_redis.XReadGroupArgs{

View file

@ -2,6 +2,7 @@ package redisstream
import (
"log"
"regexp"
"github.com/go-redis/redis"
@ -14,9 +15,20 @@ func getRedisClient() *redis.Client {
if redisClient != nil {
return redisClient
}
redisClient = redis.NewClient(&redis.Options{
Addr: env.String("REDIS_STRING"),
})
connectionString := env.String("REDIS_STRING")
match, _ := regexp.MatchString("^[^:]+://", connectionString)
if !match {
connectionString = "redis://" + connectionString
}
options, err := redis.ParseURL(connectionString)
if err != nil {
log.Fatalln(err)
}
redisClient = redis.NewClient(options)
if _, err := redisClient.Ping().Result(); err != nil {
log.Fatalln(err)
}

View file

@ -204,7 +204,8 @@ def process():
logging.info(f"Valid alert, notifying users, alertId:{alert['alertId']} name: {alert['name']}")
notifications.append(generate_notification(alert, result))
except Exception as e:
logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}")
logging.error(
f"!!!Error while running alert query for alertId:{alert['alertId']} name: {alert['name']}")
logging.error(query)
logging.error(e)
cur = cur.recreate(rollback=True)
@ -217,12 +218,22 @@ def process():
alerts.process_notifications(notifications)
def __format_value(x):
if x % 1 == 0:
x = int(x)
else:
x = round(x, 2)
return f"{x:,}"
def generate_notification(alert, result):
left = __format_value(result['value'])
right = __format_value(alert['query']['right'])
return {
"alertId": alert["alertId"],
"tenantId": alert["tenantId"],
"title": alert["name"],
"description": f"has been triggered, {alert['query']['left']} = {round(result['value'], 2)} ({alert['query']['operator']} {alert['query']['right']}).",
"description": f"has been triggered, {alert['query']['left']} = {left} ({alert['query']['operator']} {right}).",
"buttonText": "Check metrics for more details",
"buttonUrl": f"/{alert['projectId']}/metrics",
"imageUrl": None,

View file

@ -279,7 +279,8 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
"user_id": user_id, "project_id": project_id, "view_type": data.view_type,
"metric_type": data.metric_type, "metric_of": data.metric_of,
"metric_value": data.metric_value, "metric_format": data.metric_format}
"metric_value": data.metric_value, "metric_format": data.metric_format,
"config": json.dumps(data.config.dict())}
for i, s in enumerate(data.series):
prefix = "u_"
if s.index is None:
@ -329,7 +330,8 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSche
view_type= %(view_type)s, metric_type= %(metric_type)s,
metric_of= %(metric_of)s, metric_value= %(metric_value)s,
metric_format= %(metric_format)s,
edited_at = timezone('utc'::text, now())
edited_at = timezone('utc'::text, now()),
default_config = %(config)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
@ -405,7 +407,7 @@ def get(metric_id, project_id, user_id, flatten=True):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT *
"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series
@ -456,7 +458,7 @@ def get_with_template(metric_id, project_id, user_id, include_dashboard=True):
) AS connected_dashboards ON (TRUE)"""
cur.execute(
cur.mogrify(
f"""SELECT *
f"""SELECT *, default_config AS config
FROM metrics
LEFT JOIN LATERAL (SELECT COALESCE(jsonb_agg(metric_series.* ORDER BY index),'[]'::jsonb) AS series
FROM metric_series

View file

@ -118,6 +118,8 @@ def get_dashboard(project_id, user_id, dashboard_id):
for w in row["widgets"]:
w["created_at"] = TimeUTC.datetime_to_timestamp(w["created_at"])
w["edited_at"] = TimeUTC.datetime_to_timestamp(w["edited_at"])
w["config"]["col"] = w["default_config"]["col"]
w["config"]["row"] = w["default_config"]["row"]
for s in w["series"]:
s["created_at"] = TimeUTC.datetime_to_timestamp(s["created_at"])
return helper.dict_to_camel_case(row)

View file

@ -452,18 +452,18 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_sub_query.append("resources.type = 'img'")
ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args)
ch_sub_query_chart.append("resources.type = 'img'")
ch_sub_query_chart.append("resources.url IN %(url)s")
ch_sub_query_chart.append("resources.url_hostpath IN %(url)s")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.url,
ch_query = f"""SELECT resources.url_hostpath AS url,
COALESCE(avgOrNull(resources.duration),0) AS avg,
COUNT(1) AS count
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0
GROUP BY resources.url ORDER BY avg DESC LIMIT 10;"""
GROUP BY resources.url_hostpath ORDER BY avg DESC LIMIT 10;"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
@ -474,13 +474,13 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
urls = [row["url"] for row in rows]
charts = {}
ch_query = f"""SELECT url,
ch_query = f"""SELECT url_hostpath AS url,
toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(avgOrNull(resources.duration),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
WHERE {" AND ".join(ch_sub_query_chart)} AND resources.duration>0
GROUP BY url, timestamp
ORDER BY url, timestamp;"""
GROUP BY url_hostpath, timestamp
ORDER BY url_hostpath, timestamp;"""
params["url"] = urls
u_rows = ch.execute(query=ch_query, params=params)
for url in urls:
@ -526,13 +526,13 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
if resources and len(resources) > 0:
for r in resources:
if r["type"] == "IMG":
img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s")
img_constraints.append(f"resources.url_hostpath = %(val_{len(img_constraints)})s")
img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value']
elif r["type"] == "LOCATION":
location_constraints.append(f"pages.url_path = %(val_{len(location_constraints)})s")
location_constraints_vals["val_" + str(len(location_constraints) - 1)] = r['value']
else:
request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s")
request_constraints.append(f"resources.url_hostpath = %(val_{len(request_constraints)})s")
request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value']
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
@ -638,7 +638,7 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
if resource_type == "ALL" and not pages_only and not events_only:
ch_sub_query.append("positionUTF8(url_hostpath,%(value)s)!=0")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT arrayJoin(arraySlice(arrayReverseSort(arrayDistinct(groupArray(url))), 1, 5)) AS value,
ch_query = f"""SELECT arrayJoin(arraySlice(arrayReverseSort(arrayDistinct(groupArray(url_hostpath))), 1, 5)) AS value,
type AS key
FROM resources
WHERE {" AND ".join(ch_sub_query)}
@ -884,7 +884,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
if type is not None:
ch_sub_query_chart.append(f"resources.type = '{__get_resource_db_type_from_type(type)}'")
if url is not None:
ch_sub_query_chart.append(f"resources.url = %(value)s")
ch_sub_query_chart.append(f"resources.url_hostpath = %(value)s")
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
ch_sub_query_chart.append("resources.duration>0")
@ -966,7 +966,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_sub_query_chart.append("isNotNull(resources.duration)")
ch_sub_query_chart.append("resources.duration>0")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT any(url) AS url, any(type) AS type,
ch_query = f"""SELECT any(url_hostpath) AS url, any(type) AS type,
splitByChar('/', resources.url_hostpath)[-1] AS name,
COALESCE(avgOrNull(NULLIF(resources.duration,0)),0) AS avg
FROM resources {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""}
@ -2179,7 +2179,7 @@ def get_performance_avg_image_load_time(ch, project_id, startTimestamp=TimeUTC.n
if resources and len(resources) > 0:
for r in resources:
if r["type"] == "IMG":
img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s")
img_constraints.append(f"resources.url_hostpath = %(val_{len(img_constraints)})s")
img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value']
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
@ -2254,7 +2254,7 @@ def get_performance_avg_request_load_time(ch, project_id, startTimestamp=TimeUTC
if resources and len(resources) > 0:
for r in resources:
if r["type"] != "IMG" and r["type"] == "LOCATION":
request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s")
request_constraints.append(f"resources.url_hostpath = %(val_{len(request_constraints)})s")
request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value']
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}

View file

@ -462,18 +462,18 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_sub_query_chart = __get_basic_constraints(table_name="resources", round_start=True, data=args)
# ch_sub_query_chart.append("events.event_type='RESOURCE'")
ch_sub_query_chart.append("resources.type = 'img'")
ch_sub_query_chart.append("resources.url IN %(url)s")
ch_sub_query_chart.append("resources.url_hostpath IN %(url)s")
meta_condition = __get_meta_constraint(args)
ch_sub_query += meta_condition
ch_sub_query_chart += meta_condition
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT resources.url,
ch_query = f"""SELECT resources.url_hostpath AS url,
COALESCE(avgOrNull(resources.duration),0) AS avg,
COUNT(1) AS count
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
WHERE {" AND ".join(ch_sub_query)} AND resources.duration>0
GROUP BY resources.url ORDER BY avg DESC LIMIT 10;"""
GROUP BY resources.url_hostpath ORDER BY avg DESC LIMIT 10;"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
rows = ch.execute(query=ch_query, params=params)
@ -484,13 +484,13 @@ def get_slowest_images(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
urls = [row["url"] for row in rows]
charts = {}
ch_query = f"""SELECT url,
ch_query = f"""SELECT url_hostpath AS url,
toUnixTimestamp(toStartOfInterval(resources.datetime, INTERVAL %(step_size)s second ))*1000 AS timestamp,
COALESCE(avgOrNull(resources.duration),0) AS avg
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
WHERE {" AND ".join(ch_sub_query_chart)} AND resources.duration>0
GROUP BY url, timestamp
ORDER BY url, timestamp;"""
GROUP BY url_hostpath, timestamp
ORDER BY url_hostpath, timestamp;"""
params["url"] = urls
# print(ch.format(query=ch_query, params=params))
u_rows = ch.execute(query=ch_query, params=params)
@ -538,13 +538,13 @@ def get_performance(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTi
if resources and len(resources) > 0:
for r in resources:
if r["type"] == "IMG":
img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s")
img_constraints.append(f"resources.url_hostpath = %(val_{len(img_constraints)})s")
img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value']
elif r["type"] == "LOCATION":
location_constraints.append(f"pages.url_path = %(val_{len(location_constraints)})s")
location_constraints_vals["val_" + str(len(location_constraints) - 1)] = r['value']
else:
request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s")
request_constraints.append(f"resources.url_hostpath = %(val_{len(request_constraints)})s")
request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value']
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}
@ -891,7 +891,7 @@ def get_resources_loading_time(project_id, startTimestamp=TimeUTC.now(delta_days
if type is not None:
ch_sub_query_chart.append(f"resources.type = '{__get_resource_db_type_from_type(type)}'")
if url is not None:
ch_sub_query_chart.append(f"resources.url = %(value)s")
ch_sub_query_chart.append(f"resources.url_hostpath = %(value)s")
meta_condition = __get_meta_constraint(args)
ch_sub_query_chart += meta_condition
ch_sub_query_chart.append("resources.duration>0")
@ -974,7 +974,7 @@ def get_slowest_resources(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
ch_sub_query_chart.append("isNotNull(resources.duration)")
ch_sub_query_chart.append("resources.duration>0")
with ch_client.ClickHouseClient() as ch:
ch_query = f"""SELECT any(url) AS url, any(type) AS type, name,
ch_query = f"""SELECT any(url_hostpath) AS url, any(type) AS type, name,
COALESCE(avgOrNull(NULLIF(resources.duration,0)),0) AS avg
FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources
WHERE {" AND ".join(ch_sub_query)}
@ -2185,7 +2185,7 @@ def get_performance_avg_image_load_time(ch, project_id, startTimestamp=TimeUTC.n
if resources and len(resources) > 0:
for r in resources:
if r["type"] == "IMG":
img_constraints.append(f"resources.url = %(val_{len(img_constraints)})s")
img_constraints.append(f"resources.url_hostpath = %(val_{len(img_constraints)})s")
img_constraints_vals["val_" + str(len(img_constraints) - 1)] = r['value']
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
@ -2260,7 +2260,7 @@ def get_performance_avg_request_load_time(ch, project_id, startTimestamp=TimeUTC
if resources and len(resources) > 0:
for r in resources:
if r["type"] != "IMG" and r["type"] == "LOCATION":
request_constraints.append(f"resources.url = %(val_{len(request_constraints)})s")
request_constraints.append(f"resources.url_hostpath = %(val_{len(request_constraints)})s")
request_constraints_vals["val_" + str(len(request_constraints) - 1)] = r['value']
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp}

View file

@ -51,6 +51,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
AND users.deleted_at ISNULL
AND users.tenant_id = %(tenant_id)s
AND (roles.all_projects OR roles_projects.project_id = s.project_id)
LIMIT 1
) AS role_project ON (TRUE)"""
extra_projection = ""
extra_join = ""
@ -86,22 +87,23 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
{"tenant_id": tenant_id, "user_id": user_id, "now": TimeUTC.now()})
cur.execute(query)
rows = cur.fetchall()
# if recorded is requested, check if it was saved or computed
if recorded:
for r in rows:
u_values = []
params = {}
for i, r in enumerate(rows):
if r["first_recorded_session_at"] is None:
extra_update = ""
if r["recorded"]:
extra_update = ", first_recorded_session_at=to_timestamp(%(first_recorded)s/1000)"
query = cur.mogrify(f"""UPDATE public.projects
SET sessions_last_check_at=(now() at time zone 'utc')
{extra_update}
WHERE project_id=%(project_id)s""",
{"project_id": r["project_id"], "first_recorded": r["first_recorded"]})
cur.execute(query)
u_values.append(f"(%(project_id_{i})s,to_timestamp(%(first_recorded_{i})s/1000))")
params[f"project_id_{i}"] = r["project_id"]
params[f"first_recorded_{i}"] = r["first_recorded"] if r["recorded"] else None
r.pop("first_recorded_session_at")
r.pop("first_recorded")
if len(u_values) > 0:
query = cur.mogrify(f"""UPDATE public.projects
SET sessions_last_check_at=(now() at time zone 'utc'), first_recorded_session_at=u.first_recorded
FROM (VALUES {",".join(u_values)}) AS u(project_id,first_recorded)
WHERE projects.project_id=u.project_id;""", params)
cur.execute(query)
if recording_state and len(rows) > 0:
project_ids = [f'({r["project_id"]})' for r in rows]
@ -111,6 +113,7 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
WHERE sessions.start_ts >= %(startDate)s AND sessions.start_ts <= %(endDate)s
GROUP BY project_id;""",
{"startDate": TimeUTC.now(delta_days=-3), "endDate": TimeUTC.now(delta_days=1)})
cur.execute(query=query)
status = cur.fetchall()
for r in rows:

View file

@ -107,8 +107,7 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu
session_id=session_id, user_id=context.user_id)
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id)
data['live'] = live and assist.is_live(project_id=project_id,
session_id=session_id,
data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id,
project_key=data["projectKey"])
data["inDB"] = True
return data
@ -181,7 +180,7 @@ def _isUndefined_operator(op: schemas.SearchEventOperator):
# This function executes the query and return result
def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False,
error_status=schemas.ErrorStatus.all, count_only=False, issue=None):
error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False):
if data.bookmarked:
data.startDate, data.endDate = sessions_favorite.get_start_end_timestamp(project_id, user_id)
@ -189,9 +188,11 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
favorite_only=data.bookmarked, issue=issue, project_id=project_id,
user_id=user_id)
if data.limit is not None and data.page is not None:
full_args["sessions_limit"] = data.limit
full_args["sessions_limit_s"] = (data.page - 1) * data.limit
full_args["sessions_limit_e"] = data.page * data.limit
else:
full_args["sessions_limit"] = 200
full_args["sessions_limit_s"] = 1
full_args["sessions_limit_e"] = 200
@ -239,6 +240,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
GROUP BY user_id
) AS users_sessions;""",
full_args)
elif ids_only:
main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id
{query_part}
ORDER BY s.session_id desc
LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""",
full_args)
else:
if data.order is None:
data.order = schemas.SortOrderType.desc
@ -246,7 +253,6 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
if data.sort is not None and data.sort != "session_id":
# sort += " " + data.order + "," + helper.key_to_snake_case(data.sort)
sort = helper.key_to_snake_case(data.sort)
meta_keys = metadata.get(project_id=project_id)
main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count,
COALESCE(JSONB_AGG(full_sessions)
@ -270,7 +276,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
print(data.json())
print("--------------------")
raise err
if errors_only:
if errors_only or ids_only:
return helper.list_to_camel_case(cur.fetchall())
sessions = cur.fetchone()

View file

@ -144,7 +144,9 @@ def share_to_slack(tenant_id, user_id, project_id, note_id, webhook_id):
note = get_note(tenant_id=tenant_id, project_id=project_id, user_id=user_id, note_id=note_id, share=user_id)
if note is None:
return {"errors": ["Note not found"]}
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}")
session_url = urljoin(config('SITE_URL'), f"{note['projectId']}/session/{note['sessionId']}?note={note['noteId']}")
if note["timestamp"] > 0:
session_url += f"&jumpto={note['timestamp']}"
title = f"<{session_url}|Note for session {note['sessionId']}>"
blocks = [{"type": "section",

View file

@ -188,9 +188,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
values=s["value"], value_key=f"value{i + 1}")
n_stages_query.append(f"""
(SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp,
'{event_type}' AS type,
'{s["operator"]}' AS operator
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp
FROM {next_table} AS main {" ".join(extra_from)}
WHERE main.timestamp >= {f"T{i}.stage{i}_timestamp" if i > 0 else "%(startTimestamp)s"}
{f"AND main.session_id=T1.session_id" if i > 0 else ""}
@ -198,45 +196,55 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
{(" AND " + " AND ".join(stage_constraints)) if len(stage_constraints) > 0 else ""}
{(" AND " + " AND ".join(first_stage_extra_constraints)) if len(first_stage_extra_constraints) > 0 and i == 0 else ""}
GROUP BY main.session_id)
AS T{i + 1} {"USING (session_id)" if i > 0 else ""}
AS T{i + 1} {"ON (TRUE)" if i > 0 else ""}
""")
if len(n_stages_query) == 0:
n_stages = len(n_stages_query)
if n_stages == 0:
return []
n_stages_query = " LEFT JOIN LATERAL ".join(n_stages_query)
n_stages_query += ") AS stages_t"
n_stages_query = f"""
SELECT stages_and_issues_t.*,sessions.session_id, sessions.user_uuid FROM (
SELECT stages_and_issues_t.*, sessions.user_uuid
FROM (
SELECT * FROM (
SELECT * FROM
{n_stages_query}
SELECT T1.session_id, {",".join([f"stage{i + 1}_timestamp" for i in range(n_stages)])}
FROM {n_stages_query}
LEFT JOIN LATERAL
(
SELECT * FROM
(SELECT ISE.session_id,
ISS.type as issue_type,
( SELECT ISS.type as issue_type,
ISE.timestamp AS issue_timestamp,
ISS.context_string as issue_context,
COALESCE(ISS.context_string,'') as issue_context,
ISS.issue_id as issue_id
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
WHERE ISE.timestamp >= stages_t.stage1_timestamp
AND ISE.timestamp <= stages_t.stage{i + 1}_timestamp
AND ISS.project_id=%(project_id)s
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t
) AS issues_t
USING (session_id)) AS stages_and_issues_t
inner join sessions USING(session_id);
AND ISE.session_id = stages_t.session_id
AND ISS.type!='custom' -- ignore custom issues because they are massive
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}
LIMIT 10 -- remove the limit to get exact stats
) AS issues_t ON (TRUE)
) AS stages_and_issues_t INNER JOIN sessions USING(session_id);
"""
# LIMIT 10000
params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"],
"issueTypes": tuple(filter_issues), **values}
with pg_client.PostgresClient() as cur:
query = cur.mogrify(n_stages_query, params)
# print("---------------------------------------------------")
# print(cur.mogrify(n_stages_query, params))
# print(query)
# print("---------------------------------------------------")
cur.execute(cur.mogrify(n_stages_query, params))
rows = cur.fetchall()
try:
cur.execute(query)
rows = cur.fetchall()
except Exception as err:
print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------")
print(query.decode('UTF-8'))
print("--------- PAYLOAD -----------")
print(filter_d)
print("--------------------")
raise err
return rows
@ -298,7 +306,21 @@ def pearson_corr(x: list, y: list):
return r, confidence, False
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage):
# def tuple_or(t: tuple):
# x = 0
# for el in t:
# x |= el # | is for bitwise OR
# return x
#
# The following function is correct optimization of the previous function because t is a list of 0,1
def tuple_or(t: tuple):
for el in t:
if el > 0:
return 1
return 0
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues, first_stage, last_stage):
"""
Returns two lists with binary values 0/1:
@ -317,12 +339,6 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_
transitions = []
n_sess_affected = 0
errors = {}
for issue in all_issues_with_context:
split = issue.split('__^__')
errors[issue] = {
"errors": [],
"issue_type": split[0],
"context": split[1]}
for row in rows:
t = 0
@ -330,38 +346,26 @@ def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_
last_ts = row[f'stage{last_stage}_timestamp']
if first_ts is None:
continue
elif first_ts is not None and last_ts is not None:
elif last_ts is not None:
t = 1
transitions.append(t)
ic_present = False
for issue_type_with_context in errors:
for error_id in all_issues:
if error_id not in errors:
errors[error_id] = []
ic = 0
issue_type = errors[issue_type_with_context]["issue_type"]
context = errors[issue_type_with_context]["context"]
if row['issue_type'] is not None:
row_issue_id = row['issue_id']
if row_issue_id is not None:
if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts):
context_in_row = row['issue_context'] if row['issue_context'] is not None else ''
if issue_type == row['issue_type'] and context == context_in_row:
if error_id == row_issue_id:
ic = 1
ic_present = True
errors[issue_type_with_context]["errors"].append(ic)
errors[error_id].append(ic)
if ic_present and t:
n_sess_affected += 1
# def tuple_or(t: tuple):
# x = 0
# for el in t:
# x |= el
# return x
def tuple_or(t: tuple):
for el in t:
if el > 0:
return 1
return 0
errors = {key: errors[key]["errors"] for key in errors}
all_errors = [tuple_or(t) for t in zip(*errors.values())]
return transitions, errors, all_errors, n_sess_affected
@ -377,10 +381,9 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage):
"""
affected_users = defaultdict(lambda: set())
affected_sessions = defaultdict(lambda: set())
contexts = defaultdict(lambda: None)
all_issues = {}
n_affected_users_dict = defaultdict(lambda: None)
n_affected_sessions_dict = defaultdict(lambda: None)
all_issues_with_context = set()
n_issues_dict = defaultdict(lambda: 0)
issues_by_session = defaultdict(lambda: 0)
@ -396,15 +399,13 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage):
# check that the issue exists and belongs to subfunnel:
if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or
(row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])):
context_string = row['issue_context'] if row['issue_context'] is not None else ''
issue_with_context = iss + '__^__' + context_string
contexts[issue_with_context] = {"context": context_string, "id": row["issue_id"]}
all_issues_with_context.add(issue_with_context)
n_issues_dict[issue_with_context] += 1
if row["issue_id"] not in all_issues:
all_issues[row["issue_id"]] = {"context": row['issue_context'], "issue_type": row["issue_type"]}
n_issues_dict[row["issue_id"]] += 1
if row['user_uuid'] is not None:
affected_users[issue_with_context].add(row['user_uuid'])
affected_users[row["issue_id"]].add(row['user_uuid'])
affected_sessions[issue_with_context].add(row['session_id'])
affected_sessions[row["issue_id"]].add(row['session_id'])
issues_by_session[row[f'session_id']] += 1
if len(affected_users) > 0:
@ -415,29 +416,28 @@ def get_affected_users_for_all_issues(rows, first_stage, last_stage):
n_affected_sessions_dict.update({
iss: len(affected_sessions[iss]) for iss in affected_sessions
})
return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts
return all_issues, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict
def count_sessions(rows, n_stages):
session_counts = {i: set() for i in range(1, n_stages + 1)}
for ind, row in enumerate(rows):
for row in rows:
for i in range(1, n_stages + 1):
if row[f"stage{i}_timestamp"] is not None:
session_counts[i].add(row[f"session_id"])
session_counts = {i: len(session_counts[i]) for i in session_counts}
return session_counts
def count_users(rows, n_stages):
users_in_stages = defaultdict(lambda: set())
for ind, row in enumerate(rows):
users_in_stages = {i: set() for i in range(1, n_stages + 1)}
for row in rows:
for i in range(1, n_stages + 1):
if row[f"stage{i}_timestamp"] is not None:
users_in_stages[i].add(row["user_uuid"])
users_count = {i: len(users_in_stages[i]) for i in range(1, n_stages + 1)}
return users_count
@ -490,18 +490,18 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
last_stage = n_stages
n_critical_issues = 0
issues_dict = dict({"significant": [],
"insignificant": []})
issues_dict = {"significant": [],
"insignificant": []}
session_counts = count_sessions(rows, n_stages)
drop = session_counts[first_stage] - session_counts[last_stage]
all_issues_with_context, n_issues_dict, affected_users_dict, affected_sessions, contexts = get_affected_users_for_all_issues(
all_issues, n_issues_dict, affected_users_dict, affected_sessions = get_affected_users_for_all_issues(
rows, first_stage, last_stage)
transitions, errors, all_errors, n_sess_affected = get_transitions_and_issues_of_each_type(rows,
all_issues_with_context,
all_issues,
first_stage, last_stage)
# print("len(transitions) =", len(transitions))
del rows
if any(all_errors):
total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors)
@ -514,33 +514,35 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False)
if drop_only:
return total_drop_due_to_issues
for issue in all_issues_with_context:
for issue_id in all_issues:
if not any(errors[issue]):
if not any(errors[issue_id]):
continue
r, confidence, is_sign = pearson_corr(transitions, errors[issue])
r, confidence, is_sign = pearson_corr(transitions, errors[issue_id])
if r is not None and drop is not None and is_sign:
lost_conversions = int(r * affected_sessions[issue])
lost_conversions = int(r * affected_sessions[issue_id])
else:
lost_conversions = None
if r is None:
r = 0
split = issue.split('__^__')
issues_dict['significant' if is_sign else 'insignificant'].append({
"type": split[0],
"title": helper.get_issue_title(split[0]),
"affected_sessions": affected_sessions[issue],
"unaffected_sessions": session_counts[1] - affected_sessions[issue],
"type": all_issues[issue_id]["issue_type"],
"title": helper.get_issue_title(all_issues[issue_id]["issue_type"]),
"affected_sessions": affected_sessions[issue_id],
"unaffected_sessions": session_counts[1] - affected_sessions[issue_id],
"lost_conversions": lost_conversions,
"affected_users": affected_users_dict[issue],
"affected_users": affected_users_dict[issue_id],
"conversion_impact": round(r * 100),
"context_string": contexts[issue]["context"],
"issue_id": contexts[issue]["id"]
"context_string": all_issues[issue_id]["context"],
"issue_id": issue_id
})
if is_sign:
n_critical_issues += n_issues_dict[issue]
n_critical_issues += n_issues_dict[issue_id]
# To limit the number of returned issues to the frontend
issues_dict["significant"] = issues_dict["significant"][:20]
issues_dict["insignificant"] = issues_dict["insignificant"][:20]
return n_critical_issues, issues_dict, total_drop_due_to_issues

View file

@ -1,18 +1,18 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.4
boto3==1.26.14
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.0
elasticsearch==8.5.1
jira==3.4.1
fastapi==0.86.0
uvicorn[standard]==0.19.0
fastapi==0.87.0
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
apscheduler==3.9.1
apscheduler==3.9.1.post1
clickhouse-driver==0.2.4
python-multipart==0.0.5

View file

@ -1,18 +1,18 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.4
boto3==1.26.14
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.0
elasticsearch==8.5.1
jira==3.4.1
fastapi==0.86.0
uvicorn[standard]==0.19.0
fastapi==0.87.0
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
apscheduler==3.9.1
apscheduler==3.9.1.post1
clickhouse-driver==0.2.4
python-multipart==0.0.5

View file

@ -1,18 +1,18 @@
requests==2.28.1
urllib3==1.26.12
boto3==1.26.4
boto3==1.26.14
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.5.0
elasticsearch==8.5.1
jira==3.4.1
fastapi==0.86.0
uvicorn[standard]==0.19.0
fastapi==0.87.0
uvicorn[standard]==0.20.0
python-decouple==3.6
pydantic[email]==1.10.2
apscheduler==3.9.1
apscheduler==3.9.1.post1
clickhouse-driver==0.2.4
python3-saml==1.14.0

View file

@ -2,24 +2,24 @@ package kafka
import (
"log"
"openreplay/backend/pkg/messages"
"os"
"time"
"github.com/pkg/errors"
"gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
"openreplay/backend/pkg/env"
"openreplay/backend/pkg/messages"
"github.com/confluentinc/confluent-kafka-go/kafka"
"github.com/pkg/errors"
)
type Message = kafka.Message
type Consumer struct {
c *kafka.Consumer
messageIterator messages.MessageIterator
commitTicker *time.Ticker
pollTimeout uint
c *kafka.Consumer
messageIterator messages.MessageIterator
commitTicker *time.Ticker
pollTimeout uint
events chan interface{}
lastReceivedPrtTs map[int32]int64
}
@ -47,34 +47,64 @@ func NewConsumer(
kafkaConfig.SetKey("ssl.key.location", os.Getenv("KAFKA_SSL_KEY"))
kafkaConfig.SetKey("ssl.certificate.location", os.Getenv("KAFKA_SSL_CERT"))
}
// Apply Kerberos configuration
if env.Bool("KAFKA_USE_KERBEROS") {
kafkaConfig.SetKey("security.protocol", "sasl_plaintext")
kafkaConfig.SetKey("sasl.mechanisms", "GSSAPI")
kafkaConfig.SetKey("sasl.kerberos.service.name", os.Getenv("KERBEROS_SERVICE_NAME"))
kafkaConfig.SetKey("sasl.kerberos.principal", os.Getenv("KERBEROS_PRINCIPAL"))
kafkaConfig.SetKey("sasl.kerberos.keytab", os.Getenv("KERBEROS_KEYTAB_LOCATION"))
}
c, err := kafka.NewConsumer(kafkaConfig)
if err != nil {
log.Fatalln(err)
}
subREx := "^("
for i, t := range topics {
if i != 0 {
subREx += "|"
}
subREx += t
}
subREx += ")$"
if err := c.Subscribe(subREx, nil); err != nil {
log.Fatalln(err)
}
var commitTicker *time.Ticker
if autoCommit {
commitTicker = time.NewTicker(2 * time.Minute)
}
return &Consumer{
consumer := &Consumer{
c: c,
messageIterator: messageIterator,
commitTicker: commitTicker,
pollTimeout: 200,
lastReceivedPrtTs: make(map[int32]int64),
events: make(chan interface{}, 4),
lastReceivedPrtTs: make(map[int32]int64, 16),
}
subREx := "^("
for i, t := range topics {
if i != 0 {
subREx += "|"
}
subREx += t
}
subREx += ")$"
if err := c.Subscribe(subREx, consumer.reBalanceCallback); err != nil {
log.Fatalln(err)
}
return consumer
}
func (consumer *Consumer) reBalanceCallback(_ *kafka.Consumer, e kafka.Event) error {
switch evt := e.(type) {
case kafka.RevokedPartitions:
// receive before re-balancing partitions; stop consuming messages and commit current state
consumer.events <- evt.String()
case kafka.AssignedPartitions:
// receive after re-balancing partitions; continue consuming messages
//consumer.events <- evt.String()
}
return nil
}
func (consumer *Consumer) Rebalanced() <-chan interface{} {
return consumer.events
}
func (consumer *Consumer) Commit() error {

View file

@ -1,16 +1,15 @@
package kafka
import (
"log"
"fmt"
"log"
"gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
"github.com/confluentinc/confluent-kafka-go/kafka"
)
func logPartitions(s string, prts []kafka.TopicPartition) {
for _, p := range prts {
s = fmt.Sprintf("%v | %v", s, p.Partition)
}
log.Println(s)
}
}

View file

@ -5,7 +5,7 @@ import (
"log"
"os"
"gopkg.in/confluentinc/confluent-kafka-go.v1/kafka"
"github.com/confluentinc/confluent-kafka-go/kafka"
"openreplay/backend/pkg/env"
)
@ -30,6 +30,15 @@ func NewProducer(messageSizeLimit int, useBatch bool) *Producer {
kafkaConfig.SetKey("ssl.key.location", os.Getenv("KAFKA_SSL_KEY"))
kafkaConfig.SetKey("ssl.certificate.location", os.Getenv("KAFKA_SSL_CERT"))
}
// Apply Kerberos configuration
if env.Bool("KAFKA_USE_KERBEROS") {
kafkaConfig.SetKey("security.protocol", "sasl_plaintext")
kafkaConfig.SetKey("sasl.mechanisms", "GSSAPI")
kafkaConfig.SetKey("sasl.kerberos.service.name", os.Getenv("KERBEROS_SERVICE_NAME"))
kafkaConfig.SetKey("sasl.kerberos.principal", os.Getenv("KERBEROS_PRINCIPAL"))
kafkaConfig.SetKey("sasl.kerberos.keytab", os.Getenv("KERBEROS_KEYTAB_LOCATION"))
}
producer, err := kafka.NewProducer(kafkaConfig)
if err != nil {
log.Fatalln(err)

View file

@ -1,8 +1,8 @@
pandas==1.5.1
kafka-python==2.0.2
SQLAlchemy==1.4.43
snowflake-connector-python==2.8.1
snowflake-sqlalchemy==1.4.3
snowflake-connector-python==2.8.2
snowflake-sqlalchemy==1.4.4
PyYAML
asn1crypto==1.5.1
azure-common==1.1.28

View file

@ -74,4 +74,16 @@ DROP INDEX IF EXISTS events_common.requests_url_idx;
DROP INDEX IF EXISTS events_common.requests_url_gin_idx;
DROP INDEX IF EXISTS events_common.requests_url_gin_idx2;
COMMIT;
DROP INDEX IF EXISTS events.resources_url_gin_idx;
DROP INDEX IF EXISTS events.resources_url_idx;
UPDATE metrics
SET default_config=default_config || '{
"col": 4
}'::jsonb
WHERE NOT is_predefined
AND (metric_type = 'funnel' OR (metric_type = 'table' AND metric_of IN ('SESSIONS', 'js_exception')));
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS requests_session_id_status_code_nn_idx ON events_common.requests (session_id, status_code) WHERE status_code IS NOT NULL;

View file

@ -1229,6 +1229,7 @@ $$
CREATE INDEX IF NOT EXISTS requests_request_body_nn_gin_idx ON events_common.requests USING GIN (request_body gin_trgm_ops) WHERE request_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_response_body_nn_gin_idx ON events_common.requests USING GIN (response_body gin_trgm_ops) WHERE response_body IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_status_code_nn_idx ON events_common.requests (status_code) WHERE status_code IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_session_id_status_code_nn_idx ON events_common.requests (session_id, status_code) WHERE status_code IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_host_nn_idx ON events_common.requests (host) WHERE host IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_host_nn_gin_idx ON events_common.requests USING GIN (host gin_trgm_ops) WHERE host IS NOT NULL;
CREATE INDEX IF NOT EXISTS requests_path_nn_idx ON events_common.requests (path) WHERE path IS NOT NULL;

File diff suppressed because it is too large Load diff

View file

@ -23,4 +23,4 @@ MINIO_SECRET_KEY = ''
# APP and TRACKER VERSIONS
VERSION = '1.9.0'
TRACKER_VERSION = '4.1.6'
TRACKER_VERSION = '4.1.9'

View file

@ -126,8 +126,9 @@ class Router extends React.Component {
}
fetchInitialData = async () => {
const siteIdFromPath = parseInt(window.location.pathname.split("/")[1])
await this.props.fetchUserInfo()
await this.props.fetchSiteList()
await this.props.fetchSiteList(siteIdFromPath)
const { mstore } = this.props;
mstore.initClient();
};

View file

@ -2,7 +2,7 @@ import React, { useState } from 'react'
import stl from './ChatControls.module.css'
import cn from 'classnames'
import { Button, Icon } from 'UI'
import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream';
import type { LocalStream } from 'Player';
interface Props {

View file

@ -5,7 +5,7 @@ import Counter from 'App/components/shared/SessionItem/Counter';
import stl from './chatWindow.module.css';
import ChatControls from '../ChatControls/ChatControls';
import Draggable from 'react-draggable';
import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream';
import type { LocalStream } from 'Player';
import { toggleVideoLocalStream } from 'Player'
export interface Props {

View file

@ -3,7 +3,7 @@ import { Button, Tooltip } from 'UI';
import { connect } from 'react-redux';
import cn from 'classnames';
import { toggleChatWindow } from 'Duck/sessions';
import { connectPlayer } from 'Player/store';
import { connectPlayer } from 'Player';
import ChatWindow from '../../ChatWindow';
import {
callPeer,
@ -16,9 +16,9 @@ import {
CallingState,
ConnectionStatus,
RemoteControlStatus,
} from 'Player/MessageDistributor/managers/AssistManager';
import RequestLocalStream from 'Player/MessageDistributor/managers/LocalStream';
import type { LocalStream } from 'Player/MessageDistributor/managers/LocalStream';
RequestLocalStream,
} from 'Player';
import type { LocalStream } from 'Player';
import { toast } from 'react-toastify';
import { confirm } from 'UI';
import stl from './AassistActions.module.css';

View file

@ -46,7 +46,7 @@ function AuditView(props) {
]}
defaultValue={order}
plain
onChange={({ value }) => auditStore.updateKey('order', value)}
onChange={({ value }) => auditStore.updateKey('order', value.value)}
/>
</div>
<AuditSearchField onChange={(value) => auditStore.updateKey('searchQuery', value) }/>

View file

@ -2,6 +2,7 @@ import React from 'react';
import { Icon } from 'UI';
import { checkForRecent } from 'App/date';
import { withSiteId, alertEdit } from 'App/routes';
import { numberWithCommas } from 'App/utils';
// @ts-ignore
import { DateTime } from 'luxon';
import { withRouter, RouteComponentProps } from 'react-router-dom';
@ -108,7 +109,7 @@ function AlertListItem(props: Props) {
{' is '}
<span className="font-semibold" style={{ fontFamily: 'Menlo, Monaco, Consolas' }}>
{alert.query.operator}
{alert.query.right} {alert.metric.unit}
{numberWithCommas(alert.query.right)} {alert.metric.unit}
</span>
{' over the past '}
<span className="font-semibold" style={{ fontFamily: 'Menlo, Monaco, Consolas' }}>{getThreshold(alert.currentPeriod)}</span>

View file

@ -122,6 +122,9 @@ const NewAlert = (props: IProps) => {
) {
remove(instance.alertId).then(() => {
props.history.push(withSiteId(alerts(), siteId));
toast.success('Alert deleted');
}).catch(() => {
toast.error('Failed to delete an alert');
});
}
};
@ -135,6 +138,8 @@ const NewAlert = (props: IProps) => {
} else {
toast.success('Alert updated');
}
}).catch(() => {
toast.error('Failed to create an alert');
});
};

View file

@ -35,7 +35,7 @@ function DashboardsView({ history, siteId }: { history: any, siteId: string }) {
</div>
<div className="text-base text-disabled-text flex items-center px-6">
<Icon name="info-circle-fill" className="mr-2" size={16} />
A dashboard is a custom visualization using your OpenReplay data.
A Dashboard is a collection of Metrics that can be shared across teams.
</div>
<DashboardList />
</div>

View file

@ -49,7 +49,8 @@ function FunnelIssuesDropdown() {
}
}
const onClickOutside = () => {
const onClickOutside = (e: any) => {
if (e.target.id === 'dd-button') return;
if (isOpen) {
setTimeout(() => {
setIsOpen(false);
@ -85,21 +86,23 @@ function FunnelIssuesDropdown() {
IndicatorSeparator: (): any => null,
IndicatorsContainer: (): any => null,
Control: ({ children, ...props }: any) => (
<components.Control {...props}>
<OutsideClickDetectingDiv
// className={ cn("relative flex items-center", { "flex-1" : fullWidth }) }
onClickOutside={onClickOutside}
>
{ children }
<button
className="px-2 py-1 bg-white rounded-2xl border border-teal border-dashed color-teal flex items-center hover:bg-active-blue"
onClick={() => setIsOpen(!isOpen)}
>
<Icon name="funnel" size={16} color="teal" />
<span className="ml-2">Issues</span>
</button>
</OutsideClickDetectingDiv>
</components.Control>
<OutsideClickDetectingDiv
className="relative items-center block"
onClickOutside={onClickOutside}
>
<components.Control {...props}>
{ children }
<button
id="dd-button"
className="px-2 py-1 bg-white rounded-2xl border border-teal border-dashed color-teal flex items-center hover:bg-active-blue select-none"
onClick={() => setIsOpen(!isOpen)}
>
<Icon name="funnel" size={16} color="teal" className="pointer-events-none" />
<span className="ml-2 pointer-events-none">Issues</span>
</button>
</components.Control>
</OutsideClickDetectingDiv>
),
Placeholder: (): any => null,
SingleValue: (): any => null,

View file

@ -30,7 +30,7 @@ function MetricsView({ siteId }: Props) {
</div>
<div className="text-base text-disabled-text flex items-center px-6">
<Icon name="info-circle-fill" className="mr-2" size={16} />
Create custom Metrics to capture key interactions and track KPIs.
Create custom Metrics to capture user frustrations, monitor your app's performance and track other KPIs.
</div>
<MetricsList siteId={siteId} />
</div>

View file

@ -35,8 +35,10 @@ export default class ErrorInfo extends React.PureComponent {
componentDidMount() {
this.ensureInstance();
}
componentDidUpdate() {
this.ensureInstance();
componentDidUpdate(prevProps) {
if (prevProps.errorId !== this.props.errorId || prevProps.errorIdInStore !== this.props.errorIdInStore) {
this.ensureInstance();
}
}
next = () => {
const { list, errorId } = this.props;

View file

@ -3,24 +3,19 @@ import { useEffect, useState } from 'react';
import { connect } from 'react-redux';
import { Loader } from 'UI';
import { toggleFullscreen, closeBottomBlock } from 'Duck/components/player';
import { withRequest } from 'HOCs'
import {
PlayerProvider,
connectPlayer,
init as initPlayer,
clean as cleanPlayer,
} from 'Player';
import { withRequest } from 'HOCs';
import { PlayerProvider, connectPlayer, init as initPlayer, clean as cleanPlayer } from 'Player';
import withPermissions from 'HOCs/withPermissions';
import PlayerBlockHeader from '../Session_/PlayerBlockHeader';
import PlayerBlock from '../Session_/PlayerBlock';
import styles from '../Session_/session.module.css';
const InitLoader = connectPlayer(state => ({
loading: !state.initialized
const InitLoader = connectPlayer((state) => ({
loading: !state.initialized,
}))(Loader);
function LivePlayer ({
function LivePlayer({
session,
toggleFullscreen,
closeBottomBlock,
@ -30,68 +25,88 @@ function LivePlayer ({
request,
isEnterprise,
userEmail,
userName
userName,
}) {
const [fullView, setFullView] = useState(false);
useEffect(() => {
if (!loadingCredentials) {
const sessionWithAgentData = {
...session.toJS(),
agentInfo: {
email: userEmail,
name: userName,
},
}
};
initPlayer(sessionWithAgentData, assistCredendials, true);
}
return () => cleanPlayer()
}, [ session.sessionId, loadingCredentials, assistCredendials ]);
return () => cleanPlayer();
}, [session.sessionId, loadingCredentials, assistCredendials]);
// LAYOUT (TODO: local layout state - useContext or something..)
useEffect(() => {
const queryParams = new URLSearchParams(window.location.search);
if (queryParams.has('fullScreen') && queryParams.get('fullScreen') === 'true') {
setFullView(true);
}
if (isEnterprise) {
request();
}
return () => {
toggleFullscreen(false);
closeBottomBlock();
}
}, [])
};
}, []);
const TABS = {
EVENTS: 'User Steps',
HEATMAPS: 'Click Map',
}
};
const [activeTab, setActiveTab] = useState('');
return (
<PlayerProvider>
<InitLoader className="flex-1 p-3">
<PlayerBlockHeader activeTab={activeTab} setActiveTab={setActiveTab} tabs={TABS} fullscreen={fullscreen}/>
<div className={ styles.session } data-fullscreen={fullscreen}>
<PlayerBlock />
{!fullView && (
<PlayerBlockHeader
activeTab={activeTab}
setActiveTab={setActiveTab}
tabs={TABS}
fullscreen={fullscreen}
/>
)}
<div className={styles.session} data-fullscreen={fullscreen || fullView}>
<PlayerBlock fullView={fullView} />
</div>
</InitLoader>
</PlayerProvider>
);
};
}
export default withRequest({
initialData: null,
endpoint: '/assist/credentials',
dataWrapper: data => data,
dataWrapper: (data) => data,
dataName: 'assistCredendials',
loadingName: 'loadingCredentials',
})(withPermissions(['ASSIST_LIVE'], '', true)(connect(
state => {
return {
session: state.getIn([ 'sessions', 'current' ]),
showAssist: state.getIn([ 'sessions', 'showChatWindow' ]),
fullscreen: state.getIn([ 'components', 'player', 'fullscreen' ]),
isEnterprise: state.getIn([ 'user', 'account', 'edition' ]) === 'ee',
userEmail: state.getIn(['user', 'account', 'email']),
userName: state.getIn(['user', 'account', 'name']),
}
},
{ toggleFullscreen, closeBottomBlock },
)(LivePlayer)));
})(
withPermissions(
['ASSIST_LIVE'],
'',
true
)(
connect(
(state) => {
return {
session: state.getIn(['sessions', 'current']),
showAssist: state.getIn(['sessions', 'showChatWindow']),
fullscreen: state.getIn(['components', 'player', 'fullscreen']),
isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee',
userEmail: state.getIn(['user', 'account', 'email']),
userName: state.getIn(['user', 'account', 'name']),
};
},
{ toggleFullscreen, closeBottomBlock }
)(LivePlayer)
)
);

View file

@ -8,7 +8,6 @@ import { Link, NoContent, Loader } from 'UI';
import { sessions as sessionsRoute } from 'App/routes';
import withPermissions from 'HOCs/withPermissions'
import WebPlayer from './WebPlayer';
import IOSPlayer from './IOSPlayer';
import { useStore } from 'App/mstore';
const SESSIONS_ROUTE = sessionsRoute();
@ -49,10 +48,7 @@ function Session({
}
>
<Loader className="flex-1" loading={ loading || initializing }>
{ session.isIOS
? <IOSPlayer session={session} />
: <WebPlayer />
}
<WebPlayer />
</Loader>
</NoContent>
);

View file

@ -2,28 +2,54 @@ import React, { useEffect } from 'react';
import { connect } from 'react-redux';
import { setAutoplayValues } from 'Duck/sessions';
import { session as sessionRoute } from 'App/routes';
import { Link, Icon, Toggler, Tooltip } from 'UI';
import { connectPlayer } from 'Player/store';
import { Controls as PlayerControls } from 'Player';
import { Link, Icon, Tooltip } from 'UI';
import { withRouter, RouteComponentProps } from 'react-router-dom';
import cn from 'classnames';
import { fetchAutoplaySessions } from 'Duck/search';
function Autoplay(props) {
const { previousId, nextId, autoplay, disabled } = props;
const PER_PAGE = 10;
interface Props extends RouteComponentProps {
previousId: string;
nextId: string;
defaultList: any;
currentPage: number;
total: number;
setAutoplayValues?: () => void;
latestRequestTime: any;
sessionIds: any;
fetchAutoplaySessions?: (page: number) => Promise<void>;
}
function Autoplay(props: Props) {
const {
previousId,
nextId,
currentPage,
total,
sessionIds,
latestRequestTime,
match: {
// @ts-ignore
params: { siteId, sessionId },
},
} = props;
const disabled = sessionIds.length === 0;
useEffect(() => {
props.setAutoplayValues();
if (latestRequestTime) {
props.setAutoplayValues();
const totalPages = Math.ceil(total / PER_PAGE);
const index = sessionIds.indexOf(sessionId);
// check for the last page and load the next
if (currentPage !== totalPages && index === sessionIds.length - 1) {
props.fetchAutoplaySessions(currentPage + 1).then(props.setAutoplayValues);
}
}
}, []);
return (
<div className="flex items-center">
<div
onClick={props.toggleAutoplay}
className="cursor-pointer flex items-center mr-2 hover:bg-gray-light-shade rounded-md p-2"
>
<Toggler name="sessionsLive" onChange={props.toggleAutoplay} checked={autoplay} />
<span className="ml-2 whitespace-nowrap">Auto-Play</span>
</div>
<Tooltip
placement="bottom"
title={<div className="whitespace-nowrap">Play Previous Session</div>}
@ -63,21 +89,14 @@ function Autoplay(props) {
);
}
const connectAutoplay = connect(
(state) => ({
export default connect(
(state: any) => ({
previousId: state.getIn(['sessions', 'previousId']),
nextId: state.getIn(['sessions', 'nextId']),
currentPage: state.getIn(['search', 'currentPage']) || 1,
total: state.getIn(['sessions', 'total']) || 0,
sessionIds: state.getIn(['sessions', 'sessionIds']) || [],
latestRequestTime: state.getIn(['search', 'latestRequestTime']),
}),
{ setAutoplayValues }
);
export default connectAutoplay(
connectPlayer(
(state) => ({
autoplay: state.autoplay,
}),
{
toggleAutoplay: PlayerControls.toggleAutoplay,
}
)(Autoplay)
);
{ setAutoplayValues, fetchAutoplaySessions }
)(withRouter(Autoplay));

View file

@ -50,7 +50,7 @@ function Step({ step, ind, isDefault }: { step: IStep; ind: number; isDefault?:
>
<div className="rounded-3xl px-4 bg-gray-lightest relative z-10">{ind + 1}</div>
<div className="w-full">
<div className="flex items-center w-full gap-2">
<div className="flex items-start w-full gap-2">
<div className="px-1 text-disabled-text">{durationFromMs(step.time)}</div>
{/* @ts-ignore */}
<Icon name={step.icon} size={16} color="gray-darkest" className="relative z-10" />

View file

@ -70,7 +70,7 @@ function XRay({ xrayProps, timePointer, stepPickRadius, clearEventSelection, set
<>
<div className="flex items-center justify-between my-2">
<div className=" text-gray-dark py-2">
XRAY
X-RAY
{timePointer > 0 ? (
<span className="text-disabled-text ml-2">
{Duration.fromMillis(selectedTime).toFormat('hh:mm:ss')}
@ -79,14 +79,11 @@ function XRay({ xrayProps, timePointer, stepPickRadius, clearEventSelection, set
</div>
{!shouldShowPointerReset ? (
<div
className="flex items-center gap-2 rounded bg-active-blue px-2 py-1 whitespace-nowrap overflow-hidden text-clip"
className="flex items-center gap-2 rounded bg-active-blue px-2 py-1 whitespace-nowrap overflow-hidden text-clip group"
id="pdf-ignore"
>
<Icon name="info-circle" size={16} />
<div>
Click anywhere on <span className="font-semibold">X-RAY</span> to drilldown and add
steps
</div>
<div>Click anywhere in the graph below to drilldown and add steps</div>
</div>
) : (
<Button id="pdf-ignore" variant="text-primary" onClick={clearEventSelection}>
@ -94,7 +91,11 @@ function XRay({ xrayProps, timePointer, stepPickRadius, clearEventSelection, set
</Button>
)}
</div>
<div className="relative cursor-pointer" onClick={pickEventRadius} ref={xrayContainer}>
<div
className="relative cursor-pointer group-hover:border-dotted hover:border-dotted group-hover:border-gray-dark hover:border-gray-dark border border-transparent"
onClick={pickEventRadius}
ref={xrayContainer}
>
<div
id="pdf-ignore"
style={{

View file

@ -9,12 +9,14 @@ interface Props {
iconProps: any;
jump?: any;
renderWithNL?: any;
style?: any;
}
function ConsoleRow(props: Props) {
const { log, iconProps, jump, renderWithNL } = props;
const { log, iconProps, jump, renderWithNL, style } = props;
const [expanded, setExpanded] = useState(false);
const lines = log.value.split('\n').filter((l: any) => !!l);
const canExpand = lines.length > 1;
return (
<div
className={cn(stl.line, 'flex py-2 px-4 overflow-hidden group relative select-none', {
@ -23,6 +25,7 @@ function ConsoleRow(props: Props) {
error: log.isRed(),
'cursor-pointer': canExpand,
})}
style={style}
onClick={() => setExpanded(!expanded)}
>
<div className={cn(stl.timestamp)}>
@ -38,7 +41,7 @@ function ConsoleRow(props: Props) {
)}
<span>{renderWithNL(lines.pop())}</span>
</div>
{canExpand && expanded && lines.map((l: any) => <div className="ml-4 mb-1">{l}</div>)}
{canExpand && expanded && lines.map((l: any, i: number) => <div key={l.slice(0,3)+i} className="ml-4 mb-1">{l}</div>)}
</div>
<JumpButton onClick={() => jump(log.time)} />
</div>

View file

@ -5,7 +5,7 @@ import { countries } from 'App/constants';
import { useStore } from 'App/mstore';
import { browserIcon, osIcon, deviceTypeIcon } from 'App/iconNames';
import { formatTimeOrDate } from 'App/date';
import { Avatar, TextEllipsis, CountryFlag, Icon, Tooltip } from 'UI';
import { Avatar, TextEllipsis, CountryFlag, Icon, Tooltip, Popover } from 'UI';
import cn from 'classnames';
import { withRequest } from 'HOCs';
import SessionInfoItem from '../../SessionInfoItem';
@ -75,15 +75,9 @@ function UserCard({ className, request, session, width, height, similarSessions,
{userBrowser}, {userOs}, {userDevice}
</span>
<span className="mx-1 font-bold text-xl">&#183;</span>
<Tooltip
// theme="light"
delay={0}
// hideOnClick="persistent"
// arrow
// interactive
className="!bg-white shadow border !color-gray-dark"
title={
<div className="text-left">
<Popover
render={() => (
<div className="text-left bg-white">
<SessionInfoItem
comp={<CountryFlag country={userCountry} />}
label={countries[userCountry]}
@ -99,14 +93,10 @@ function UserCard({ className, request, session, width, height, similarSessions,
/>
{revId && <SessionInfoItem icon="info" label="Rev ID:" value={revId} isLast />}
</div>
}
position="bottom"
// hoverable
// disabled={false}
on="hover"
)}
>
<span className="color-teal cursor-pointer">More</span>
</Tooltip>
<span className="link">More</span>
</Popover>
</div>
</div>
</div>

View file

@ -18,7 +18,6 @@ const SelectedValue = ({ icon, text }) => {
class IssueForm extends React.PureComponent {
componentDidMount() {
const { projects, issueTypes } = this.props;
this.props.init({
projectId: projects[0] ? projects[0].id : '',
issueType: issueTypes[0] ? issueTypes[0].id : '',
@ -27,8 +26,8 @@ class IssueForm extends React.PureComponent {
componentWillReceiveProps(newProps) {
const { instance } = this.props;
if (instance.projectId && newProps.instance.projectId != instance.projectId) {
this.props.fetchMeta(instance.projectId).then(() => {
if (newProps.instance.projectId && newProps.instance.projectId != instance.projectId) {
this.props.fetchMeta(newProps.instance.projectId).then(() => {
this.props.edit({ issueType: '', assignee: '', projectId: newProps.instance.projectId });
});
}
@ -76,8 +75,6 @@ class IssueForm extends React.PureComponent {
const selectedIssueType = issueTypes.filter((issue) => issue.id == instance.issueType)[0];
console.log('instance', instance);
return (
<Loader loading={projectsLoading} size={40}>
<Form onSubmit={this.onSubmit} className="text-left">
@ -89,7 +86,7 @@ class IssueForm extends React.PureComponent {
<Select
name="projectId"
options={projectOptions}
// value={instance.projectId}
defaultValue={instance.projectId}
fluid
onChange={this.writeOption}
placeholder="Project"
@ -102,7 +99,7 @@ class IssueForm extends React.PureComponent {
name="issueType"
labeled
options={issueTypeOptions}
value={instance.issueType}
defaultValue={instance.issueType}
fluid
onChange={this.writeOption}
placeholder="Select issue type"

View file

@ -1,6 +1,6 @@
import React from 'react';
import { connect } from 'react-redux';
import { Icon, Popover } from 'UI';
import { Icon, Popover, Button } from 'UI';
import IssuesModal from './IssuesModal';
import { fetchProjects, fetchMeta } from 'Duck/assignments';
import stl from './issues.module.css';
@ -67,29 +67,27 @@ class Issues extends React.Component {
const provider = issuesIntegration.provider;
return (
<div className="relative h-full w-full p-3">
<div className={stl.buttonWrapper}>
<Popover
render={({ close }) => (
<div>
<IssuesModal
provider={provider}
sessionId={sessionId}
closeHandler={close}
/>
</div>
)}
>
<div
className="flex items-center"
disabled={!isModalDisplayed && (metaLoading || fetchIssuesLoading || projectsLoading)}
>
<Icon name={`integrations/${provider === 'jira' ? 'jira' : 'github'}`} size="16" />
<span className="ml-2">Create Issue</span>
</div>
</Popover>
<Popover
onOpen={this.handleOpen}
render={({ close }) => (
<div>
<IssuesModal provider={provider} sessionId={sessionId} closeHandler={close} />
</div>
)}
>
<div className="relative">
<Button icon={`integrations/${provider === 'jira' ? 'jira' : 'github'}`} variant="text">
Create Issue
</Button>
</div>
</div>
{/* <div
className="flex items-center cursor-pointer"
disabled={!isModalDisplayed && (metaLoading || fetchIssuesLoading || projectsLoading)}
>
<Icon name={`integrations/${provider === 'jira' ? 'jira' : 'github'}`} size="16" />
<span className="ml-2 whitespace-nowrap">Create Issue</span>
</div> */}
</Popover>
);
}
}

View file

@ -1,4 +1,4 @@
import { connectPlayer } from 'App/player';
import { connectPlayer } from 'Player';
import { toggleBottomBlock } from 'Duck/components/player';
import React, { useEffect } from 'react';
import BottomBlock from '../BottomBlock';
@ -23,8 +23,10 @@ interface Props {
issuesList: any[];
performanceChartData: any;
endTime: number;
fetchPresented?: boolean;
}
function OverviewPanel(props: Props) {
const { fetchPresented = false } = props;
const [dataLoaded, setDataLoaded] = React.useState(false);
const [selectedFeatures, setSelectedFeatures] = React.useState([
'PERFORMANCE',
@ -86,7 +88,10 @@ function OverviewPanel(props: Props) {
<BottomBlock.Content>
<OverviewPanelContainer endTime={props.endTime}>
<TimelineScale endTime={props.endTime} />
<div style={{ width: '100%', height: '187px', overflow: 'hidden' }} className="transition relative">
<div
style={{ width: 'calc(100vw - 1rem)', margin: '0 auto', height: '187px' }}
className="transition relative"
>
<NoContent
show={selectedFeatures.length === 0}
title={
@ -107,7 +112,11 @@ function OverviewPanel(props: Props) {
title={feature}
list={resources[feature]}
renderElement={(pointer: any) => (
<TimelinePointer pointer={pointer} type={feature} />
<TimelinePointer
pointer={pointer}
type={feature}
fetchPresented={fetchPresented}
/>
)}
endTime={props.endTime}
message={HELP_MESSAGE[feature]}
@ -132,6 +141,7 @@ export default connect(
}
)(
connectPlayer((state: any) => ({
fetchPresented: state.fetchList.length > 0,
resourceList: state.resourceList
.filter((r: any) => r.isRed() || r.isYellow())
.concat(state.fetchList.filter((i: any) => parseInt(i.status) >= 400))

View file

@ -37,7 +37,7 @@ const EventRow = React.memo((props: Props) => {
<div
className={cn(
'uppercase color-gray-medium text-sm flex items-center py-1',
props.noMargin ? '' : 'ml-4'
props.noMargin ? '' : 'ml-2'
)}
>
<div
@ -46,7 +46,7 @@ const EventRow = React.memo((props: Props) => {
>
{title}
</div>
{message ? <RowInfo zIndex={props.zIndex} message={message} /> : null}
{message ? <RowInfo message={message} /> : null}
</div>
<div className="relative w-full" style={{ zIndex: props.zIndex ? props.zIndex : undefined }}>
{isGraph ? (
@ -78,9 +78,9 @@ const EventRow = React.memo((props: Props) => {
export default EventRow;
function RowInfo({ message, zIndex }: any) {
function RowInfo({ message }: any) {
return (
<Tooltip title={message} delay={0} style={{ zIndex: zIndex ? zIndex : undefined }}>
<Tooltip title={message} delay={0}>
<Icon name="info-circle" color="gray-medium" />
</Tooltip>
);

View file

@ -30,7 +30,7 @@ function FeatureSelection(props: Props) {
const checked = list.includes(feature);
const _disabled = disabled && !checked;
return (
<Tooltip title="X-RAY supports up to 3 views" disabled={!_disabled} delay={0}>
<Tooltip key={index} title="X-RAY supports up to 3 views" disabled={!_disabled} delay={0}>
<Checkbox
key={index}
label={feature}

View file

@ -1,6 +1,6 @@
import React from 'react';
import VerticalLine from '../VerticalLine';
import { connectPlayer, Controls } from 'App/player';
import { connectPlayer, Controls } from 'Player';
interface Props {
children: React.ReactNode;

View file

@ -1,5 +1,5 @@
import React from 'react';
import { connectPlayer } from 'App/player';
import { connectPlayer } from 'Player';
import { AreaChart, Area, Tooltip, ResponsiveContainer } from 'recharts';
interface Props {

View file

@ -1,5 +1,5 @@
import React from 'react';
import { Controls } from 'App/player';
import { Controls } from 'Player';
import { NETWORK, EXCEPTIONS } from 'Duck/components/player';
import { useModal } from 'App/components/Modal';
import { Icon, Tooltip } from 'UI';
@ -12,6 +12,7 @@ interface Props {
pointer: any;
type: any;
noClick?: boolean;
fetchPresented?: boolean;
}
const TimelinePointer = React.memo((props: Props) => {
const { showModal } = useModal();
@ -35,7 +36,7 @@ const TimelinePointer = React.memo((props: Props) => {
if (pointer.tp === 'graph_ql') {
showModal(<GraphQLDetailsModal resource={pointer} />, { right: true });
} else {
showModal(<FetchDetails resource={pointer} />, { right: true });
showModal(<FetchDetails resource={pointer} fetchPresented={props.fetchPresented} />, { right: true });
}
}
// props.toggleBottomBlock(type);
@ -47,7 +48,7 @@ const TimelinePointer = React.memo((props: Props) => {
<Tooltip
title={
<div className="">
<b>{item.success ? 'Slow resource: ' : 'Missing resource:'}</b>
<b>{item.success ? 'Slow resource: ' : '4xx/5xx Error:'}</b>
<br />
{name.length > 200
? name.slice(0, 100) + ' ... ' + name.slice(-50)

View file

@ -1,5 +1,5 @@
import React from 'react';
import { connectPlayer } from 'App/player';
import { connectPlayer } from 'Player';
import { millisToMinutesAndSeconds } from 'App/utils';
interface Props {

View file

@ -1,10 +1,10 @@
import React from 'react';
import { connectPlayer } from 'App/player';
import { connectPlayer } from 'Player';
import VerticalLine from '../VerticalLine';
interface Props {
time: number;
scale: number;
time?: number;
scale?: number;
}
function VerticalPointerLine(props: Props) {
const { time, scale } = props;

View file

@ -1,7 +1,7 @@
import React, { useState } from 'react';
import stl from './SelectorCard.module.css';
import cn from 'classnames';
import type { MarkedTarget } from 'Player/MessageDistributor/StatedScreen/StatedScreen';
import type { MarkedTarget } from 'Player';
import { activeTarget } from 'Player';
import { Tooltip } from 'react-tippy';

View file

@ -1,8 +1,8 @@
import React from 'react';
import { NoContent } from 'UI';
import { connectPlayer } from 'Player/store';
import { connectPlayer } from 'Player';
import SelectorCard from '../SelectorCard/SelectorCard';
import type { MarkedTarget } from 'Player/MessageDistributor/StatedScreen/StatedScreen';
import type { MarkedTarget } from 'Player';
import stl from './selectorList.module.css';
interface Props {

View file

@ -6,7 +6,7 @@ import {
STORAGE_TYPES,
selectStorageType,
selectStorageListNow,
} from 'Player/store';
} from 'Player';
import LiveTag from 'Shared/LiveTag';
import { jumpToLive } from 'Player';
@ -76,13 +76,13 @@ function getStorageName(type) {
inspectorMode: state.inspectorMode,
fullscreenDisabled: state.messagesLoading,
// logCount: state.logList.length,
logRedCount: state.logRedCount,
logRedCount: state.logMarkedCount,
showExceptions: state.exceptionsList.length > 0,
resourceRedCount: state.resourceRedCount,
fetchRedCount: state.fetchRedCount,
resourceRedCount: state.resourceMarkedCount,
fetchRedCount: state.fetchMarkedCount,
showStack: state.stackList.length > 0,
stackCount: state.stackList.length,
stackRedCount: state.stackRedCount,
stackRedCount: state.stackMarkedCount,
profilesCount: state.profilesList.length,
storageCount: selectStorageListNow(state).length,
storageType: selectStorageType(state),
@ -203,7 +203,7 @@ export default class Controls extends React.Component {
backTenSeconds = () => {
//shouldComponentUpdate
const { time, jump, skipInterval } = this.props;
jump(Math.max(0, time - SKIP_INTERVALS[skipInterval]));
jump(Math.max(1, time - SKIP_INTERVALS[skipInterval]));
};
goLive = () => this.props.jump(this.props.endTime);
@ -242,7 +242,7 @@ export default class Controls extends React.Component {
controlIcon = (icon, size, action, isBackwards, additionalClasses) => (
<div
onClick={action}
className={cn('py-1 px-2 hover-main cursor-pointer bg-gray-lightest', additionalClasses)}
className={cn('py-2 px-2 hover-main cursor-pointer bg-gray-lightest', additionalClasses)}
style={{ transform: isBackwards ? 'rotate(180deg)' : '' }}
>
<Icon name={icon} size={size} color="inherit" />
@ -261,6 +261,7 @@ export default class Controls extends React.Component {
logRedCount,
showExceptions,
resourceRedCount,
fetchRedCount,
showStack,
stackRedCount,
showStorage,
@ -352,7 +353,7 @@ export default class Controls extends React.Component {
onClick={() => toggleBottomTools(NETWORK)}
active={bottomBlock === NETWORK && !inspectorMode}
label="NETWORK"
hasErrors={resourceRedCount > 0}
hasErrors={resourceRedCount > 0 || fetchRedCount > 0}
noIcon
labelClassName="!text-base font-semibold"
containerClassName="mx-2"

View file

@ -26,8 +26,7 @@ let debounceTooltipChange = () => null;
skipIntervals: state.skipIntervals,
events: state.eventList,
skip: state.skip,
// not updating properly rn
// skipToIssue: state.skipToIssue,
skipToIssue: state.skipToIssue,
disabled: state.cssLoading || state.messagesLoading || state.markedTargets,
endTime: state.endTime,
live: state.live,
@ -85,8 +84,7 @@ export default class Timeline extends React.PureComponent {
};
componentDidMount() {
const { issues } = this.props;
const skipToIssue = Controls.updateSkipToIssue();
const { issues, skipToIssue } = this.props;
const firstIssue = issues.get(0);
deboucneJump = debounce(this.props.jump, 500);
debounceTooltipChange = debounce(this.props.setTimelineHoverTime, 50);

View file

@ -93,12 +93,11 @@ function PlayerControls(props: Props) {
)}
<div className="rounded ml-4 bg-active-blue border border-active-blue-border flex items-stretch">
{/* @ts-ignore */}
<Tooltip title="Rewind 10s" position="top">
<button
ref={arrowBackRef}
className="h-full hover:border-active-blue-border focus:border focus:border-blue border-borderColor-transparent"
>
<button
ref={arrowBackRef}
className="h-full hover:border-active-blue-border focus:border focus:border-blue border-borderColor-transparent"
>
<Tooltip title="Rewind 10s">
{controlIcon(
'skip-forward-fill',
18,
@ -106,58 +105,57 @@ function PlayerControls(props: Props) {
true,
'hover:bg-active-blue-border color-main h-full flex items-center'
)}
</button>
</Tooltip>
<div className="p-1 border-l border-r bg-active-blue-border border-active-blue-border">
<OutsideClickDetectingDiv onClickOutside={handleClickOutside}>
<Popover
// open={showTooltip}
// interactive
// @ts-ignore
theme="nopadding"
animation="none"
duration={0}
className="cursor-pointer select-none"
distance={20}
render={() => (
<div className="flex flex-col bg-white border border-borderColor-gray-light-shade text-figmaColors-text-primary rounded">
<div className="font-semibold py-2 px-4 w-full text-left">
Jump <span className="text-disabled-text">(Secs)</span>
</div>
{Object.keys(skipIntervals).map((interval) => (
<div
key={interval}
onClick={() => {
toggleTooltip();
setSkipInterval(parseInt(interval, 10));
}}
className={cn(
'py-2 px-4 cursor-pointer w-full text-left font-semibold',
'hover:bg-active-blue border-t border-borderColor-gray-light-shade'
)}
>
{interval}
<span className="text-disabled-text">s</span>
</div>
))}
</Tooltip>
</button>
<div className="p-1 border-l border-r bg-active-blue-border border-active-blue-border flex items-center">
<Popover
// open={showTooltip}
// interactive
// @ts-ignore
theme="nopadding"
animation="none"
duration={0}
className="cursor-pointer select-none"
distance={20}
render={({ close }: any) => (
<div className="flex flex-col bg-white border border-borderColor-gray-light-shade text-figmaColors-text-primary rounded">
<div className="font-semibold py-2 px-4 w-full text-left">
Jump <span className="text-disabled-text">(Secs)</span>
</div>
)}
>
<div onClick={toggleTooltip} ref={skipRef}>
{/* @ts-ignore */}
<Tooltip disabled={showTooltip} title="Set default skip duration">
{currentInterval}s
</Tooltip>
{Object.keys(skipIntervals).map((interval) => (
<div
key={interval}
onClick={() => {
close();
setSkipInterval(parseInt(interval, 10));
}}
className={cn(
'py-2 px-4 cursor-pointer w-full text-left font-semibold',
'hover:bg-active-blue border-t border-borderColor-gray-light-shade'
)}
>
{interval}
<span className="text-disabled-text">s</span>
</div>
))}
</div>
</Popover>
</OutsideClickDetectingDiv>
</div>
{/* @ts-ignore */}
<Tooltip title="Forward 10s" position="top">
<button
ref={arrowForwardRef}
className="h-full hover:border-active-blue-border focus:border focus:border-blue border-borderColor-transparent"
)}
>
<div onClick={toggleTooltip} ref={skipRef} className="cursor-pointer select-none">
{/* @ts-ignore */}
<Tooltip disabled={showTooltip} title="Set default skip duration">
{currentInterval}s
</Tooltip>
</div>
</Popover>
</div>
<button
ref={arrowForwardRef}
className="h-full hover:border-active-blue-border focus:border focus:border-blue border-borderColor-transparent"
>
<Tooltip title="Forward 10s">
{controlIcon(
'skip-forward-fill',
18,
@ -165,8 +163,8 @@ function PlayerControls(props: Props) {
false,
'hover:bg-active-blue-border color-main h-full flex items-center'
)}
</button>
</Tooltip>
</Tooltip>
</button>
</div>
{!live && (

View file

@ -67,7 +67,7 @@ function ReadNote(props: Props) {
<Icon name="close" size={18} />
</div>
</div>
<div className="text-xl py-3 overflow-y-scroll capitalize-first" style={{ maxHeight: 400 }}>
<div className="text-xl py-3 overflow-y-auto capitalize-first" style={{ maxHeight: 400 }}>
{props.note.message}
</div>
<div className="w-full">

View file

@ -1,8 +1,8 @@
import React from 'react';
import { connectPlayer } from 'Player';
import { getStatusText } from 'Player/MessageDistributor/managers/AssistManager';
import type { MarkedTarget } from 'Player/MessageDistributor/StatedScreen/StatedScreen';
import { CallingState, ConnectionStatus, RemoteControlStatus } from 'Player/MessageDistributor/managers/AssistManager';
import { getStatusText } from 'Player';
import type { MarkedTarget } from 'Player';
import { CallingState, ConnectionStatus, RemoteControlStatus } from 'Player';
import AutoplayTimer from './Overlay/AutoplayTimer';
import PlayIconLayer from './Overlay/PlayIconLayer';

View file

@ -1,9 +1,9 @@
import React, { useEffect, useState } from 'react'
import React, { useEffect, useState } from 'react';
import cn from 'classnames';
import { connect } from 'react-redux'
import { connect } from 'react-redux';
import { withRouter, RouteComponentProps } from 'react-router-dom';
import { Button, Link } from 'UI'
import { session as sessionRoute, withSiteId } from 'App/routes'
import { Button, Link, Icon } from 'UI';
import { session as sessionRoute, withSiteId } from 'App/routes';
import stl from './AutoplayTimer.module.css';
import clsOv from './overlay.module.css';
@ -13,49 +13,55 @@ interface IProps extends RouteComponentProps {
}
function AutoplayTimer({ nextId, siteId, history }: IProps) {
let timer: NodeJS.Timer
let timer: NodeJS.Timer;
const [cancelled, setCancelled] = useState(false);
const [counter, setCounter] = useState(5);
useEffect(() => {
if(counter > 0) {
if (counter > 0) {
timer = setTimeout(() => {
setCounter(counter - 1)
}, 1000)
setCounter(counter - 1);
}, 1000);
}
if (counter === 0) {
history.push(withSiteId(sessionRoute(nextId), siteId))
history.push(withSiteId(sessionRoute(nextId), siteId));
}
return () => clearTimeout(timer);
}, [counter])
}, [counter]);
const cancel = () => {
clearTimeout(timer)
setCancelled(true)
}
clearTimeout(timer);
setCancelled(true);
};
if (cancelled)
return null
if (cancelled) return null;
return (
<div className={ cn(clsOv.overlay, stl.overlayBg) } >
<div className={cn(clsOv.overlay, stl.overlayBg)}>
<div className="border p-6 shadow-lg bg-white rounded">
<div className="py-4">Next recording will be played in {counter}s</div>
<div className="flex items-center">
<Button primary="outline" onClick={cancel}>Cancel</Button>
<Button primary="outline" onClick={cancel}>
Cancel
</Button>
<div className="px-3" />
<Link to={ sessionRoute(nextId) } disabled={!nextId}>
<Link to={sessionRoute(nextId)} disabled={!nextId}>
<Button variant="primary">Play Now</Button>
</Link>
</div>
<div className="mt-2 flex items-center color-gray-dark">
Turn on/off auto-replay in <Icon name="ellipsis-v" className="mx-1" /> More options
</div>
</div>
</div>
)
);
}
export default withRouter(connect(state => ({
siteId: state.getIn([ 'site', 'siteId' ]),
nextId: parseInt(state.getIn([ 'sessions', 'nextId' ])),
}))(AutoplayTimer))
export default withRouter(
connect((state: any) => ({
siteId: state.getIn(['site', 'siteId']),
nextId: parseInt(state.getIn(['sessions', 'nextId'])),
}))(AutoplayTimer)
);

Some files were not shown because too many files have changed in this diff Show more