fix conflicts
This commit is contained in:
commit
47ad8499e8
203 changed files with 3578 additions and 6925 deletions
150
.github/workflows/peers-ee.yaml
vendored
150
.github/workflows/peers-ee.yaml
vendored
|
|
@ -1,150 +0,0 @@
|
|||
# This action will push the peers changes to aws
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip_security_checks:
|
||||
description: "Skip Security checks if there is a unfixable vuln or error. Value: true/false"
|
||||
required: false
|
||||
default: "false"
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
paths:
|
||||
- "ee/peers/**"
|
||||
- "peers/**"
|
||||
- "!peers/.gitignore"
|
||||
- "!peers/*-dev.sh"
|
||||
|
||||
name: Build and Deploy Peers EE
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We need to diff with old commit
|
||||
# to see which workers got changed.
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: ./.github/composite-actions/update-keys
|
||||
with:
|
||||
assist_jwt_secret: ${{ secrets.ASSIST_JWT_SECRET }}
|
||||
assist_key: ${{ secrets.ASSIST_KEY }}
|
||||
domain_name: ${{ secrets.EE_DOMAIN_NAME }}
|
||||
jwt_refresh_secret: ${{ secrets.JWT_REFRESH_SECRET }}
|
||||
jwt_secret: ${{ secrets.EE_JWT_SECRET }}
|
||||
jwt_spot_refresh_secret: ${{ secrets.JWT_SPOT_REFRESH_SECRET }}
|
||||
jwt_spot_secret: ${{ secrets.JWT_SPOT_SECRET }}
|
||||
license_key: ${{ secrets.EE_LICENSE_KEY }}
|
||||
minio_access_key: ${{ secrets.EE_MINIO_ACCESS_KEY }}
|
||||
minio_secret_key: ${{ secrets.EE_MINIO_SECRET_KEY }}
|
||||
pg_password: ${{ secrets.EE_PG_PASSWORD }}
|
||||
registry_url: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
name: Update Keys
|
||||
|
||||
- name: Docker login
|
||||
run: |
|
||||
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
|
||||
|
||||
- uses: azure/k8s-set-context@v1
|
||||
with:
|
||||
method: kubeconfig
|
||||
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
|
||||
id: setcontext
|
||||
|
||||
# Caching docker images
|
||||
- uses: satackey/action-docker-layer-caching@v0.0.11
|
||||
# Ignore the failure of a step and avoid terminating the job.
|
||||
continue-on-error: true
|
||||
|
||||
- name: Building and Pushing peers image
|
||||
id: build-image
|
||||
env:
|
||||
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
|
||||
ENVIRONMENT: staging
|
||||
run: |
|
||||
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
|
||||
cd peers
|
||||
PUSH_IMAGE=0 bash -x ./build.sh ee
|
||||
[[ "x$skip_security_checks" == "xtrue" ]] || {
|
||||
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.56.2/trivy_0.56.2_Linux-64bit.tar.gz | tar -xzf - -C ./
|
||||
images=("peers")
|
||||
for image in ${images[*]};do
|
||||
./trivy image --db-repository ghcr.io/aquasecurity/trivy-db:2 --db-repository public.ecr.aws/aquasecurity/trivy-db:2 --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
|
||||
done
|
||||
err_code=$?
|
||||
[[ $err_code -ne 0 ]] && {
|
||||
exit $err_code
|
||||
}
|
||||
} && {
|
||||
echo "Skipping Security Checks"
|
||||
}
|
||||
images=("peers")
|
||||
for image in ${images[*]};do
|
||||
docker push $DOCKER_REPO/$image:$IMAGE_TAG
|
||||
done
|
||||
- name: Creating old image input
|
||||
run: |
|
||||
#
|
||||
# Create yaml with existing image tags
|
||||
#
|
||||
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
|
||||
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
|
||||
|
||||
echo > /tmp/image_override.yaml
|
||||
|
||||
for line in `cat /tmp/image_tag.txt`;
|
||||
do
|
||||
image_array=($(echo "$line" | tr ':' '\n'))
|
||||
cat <<EOF >> /tmp/image_override.yaml
|
||||
${image_array[0]}:
|
||||
image:
|
||||
# We've to strip off the -ee, as helm will append it.
|
||||
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
|
||||
EOF
|
||||
done
|
||||
|
||||
- name: Deploy to kubernetes
|
||||
run: |
|
||||
cd scripts/helmcharts/
|
||||
|
||||
# Update changed image tag
|
||||
sed -i "/peers/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
|
||||
|
||||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,peers,quickwit,connector} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
|
||||
env:
|
||||
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
||||
ENVIRONMENT: staging
|
||||
|
||||
- name: Alert slack
|
||||
if: ${{ failure() }}
|
||||
uses: rtCamp/action-slack-notify@v2
|
||||
env:
|
||||
SLACK_CHANNEL: ee
|
||||
SLACK_TITLE: "Failed ${{ github.workflow }}"
|
||||
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
|
||||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: "Build failed :bomb:"
|
||||
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# iimit-access-to-actor: true
|
||||
148
.github/workflows/peers.yaml
vendored
148
.github/workflows/peers.yaml
vendored
|
|
@ -1,148 +0,0 @@
|
|||
# This action will push the peers changes to aws
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
skip_security_checks:
|
||||
description: "Skip Security checks if there is a unfixable vuln or error. Value: true/false"
|
||||
required: false
|
||||
default: "false"
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
paths:
|
||||
- "peers/**"
|
||||
- "!peers/.gitignore"
|
||||
- "!peers/*-dev.sh"
|
||||
|
||||
name: Build and Deploy Peers
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We need to diff with old commit
|
||||
# to see which workers got changed.
|
||||
fetch-depth: 2
|
||||
|
||||
- uses: ./.github/composite-actions/update-keys
|
||||
with:
|
||||
assist_jwt_secret: ${{ secrets.ASSIST_JWT_SECRET }}
|
||||
assist_key: ${{ secrets.ASSIST_KEY }}
|
||||
domain_name: ${{ secrets.OSS_DOMAIN_NAME }}
|
||||
jwt_refresh_secret: ${{ secrets.JWT_REFRESH_SECRET }}
|
||||
jwt_secret: ${{ secrets.OSS_JWT_SECRET }}
|
||||
jwt_spot_refresh_secret: ${{ secrets.JWT_SPOT_REFRESH_SECRET }}
|
||||
jwt_spot_secret: ${{ secrets.JWT_SPOT_SECRET }}
|
||||
license_key: ${{ secrets.OSS_LICENSE_KEY }}
|
||||
minio_access_key: ${{ secrets.OSS_MINIO_ACCESS_KEY }}
|
||||
minio_secret_key: ${{ secrets.OSS_MINIO_SECRET_KEY }}
|
||||
pg_password: ${{ secrets.OSS_PG_PASSWORD }}
|
||||
registry_url: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
name: Update Keys
|
||||
|
||||
- name: Docker login
|
||||
run: |
|
||||
docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}"
|
||||
|
||||
- uses: azure/k8s-set-context@v1
|
||||
with:
|
||||
method: kubeconfig
|
||||
kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.
|
||||
id: setcontext
|
||||
|
||||
# Caching docker images
|
||||
- uses: satackey/action-docker-layer-caching@v0.0.11
|
||||
# Ignore the failure of a step and avoid terminating the job.
|
||||
continue-on-error: true
|
||||
|
||||
- name: Building and Pushing peers image
|
||||
id: build-image
|
||||
env:
|
||||
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
||||
ENVIRONMENT: staging
|
||||
run: |
|
||||
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
|
||||
cd peers
|
||||
PUSH_IMAGE=0 bash -x ./build.sh
|
||||
[[ "x$skip_security_checks" == "xtrue" ]] || {
|
||||
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.56.2/trivy_0.56.2_Linux-64bit.tar.gz | tar -xzf - -C ./
|
||||
images=("peers")
|
||||
for image in ${images[*]};do
|
||||
./trivy image --db-repository ghcr.io/aquasecurity/trivy-db:2 --db-repository public.ecr.aws/aquasecurity/trivy-db:2 --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
|
||||
done
|
||||
err_code=$?
|
||||
[[ $err_code -ne 0 ]] && {
|
||||
exit $err_code
|
||||
}
|
||||
} && {
|
||||
echo "Skipping Security Checks"
|
||||
}
|
||||
images=("peers")
|
||||
for image in ${images[*]};do
|
||||
docker push $DOCKER_REPO/$image:$IMAGE_TAG
|
||||
done
|
||||
- name: Creating old image input
|
||||
run: |
|
||||
#
|
||||
# Create yaml with existing image tags
|
||||
#
|
||||
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
|
||||
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
|
||||
|
||||
echo > /tmp/image_override.yaml
|
||||
|
||||
for line in `cat /tmp/image_tag.txt`;
|
||||
do
|
||||
image_array=($(echo "$line" | tr ':' '\n'))
|
||||
cat <<EOF >> /tmp/image_override.yaml
|
||||
${image_array[0]}:
|
||||
image:
|
||||
tag: ${image_array[1]}
|
||||
EOF
|
||||
done
|
||||
|
||||
- name: Deploy to kubernetes
|
||||
run: |
|
||||
cd scripts/helmcharts/
|
||||
|
||||
# Update changed image tag
|
||||
sed -i "/peers/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
|
||||
|
||||
cat /tmp/image_override.yaml
|
||||
# Deploy command
|
||||
mkdir -p /tmp/charts
|
||||
mv openreplay/charts/{ingress-nginx,peers,quickwit,connector} /tmp/charts/
|
||||
rm -rf openreplay/charts/*
|
||||
mv /tmp/charts/* openreplay/charts/
|
||||
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
|
||||
env:
|
||||
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
|
||||
ENVIRONMENT: staging
|
||||
|
||||
- name: Alert slack
|
||||
if: ${{ failure() }}
|
||||
uses: rtCamp/action-slack-notify@v2
|
||||
env:
|
||||
SLACK_CHANNEL: foss
|
||||
SLACK_TITLE: "Failed ${{ github.workflow }}"
|
||||
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
|
||||
SLACK_USERNAME: "OR Bot"
|
||||
SLACK_MESSAGE: "Build failed :bomb:"
|
||||
|
||||
# - name: Debug Job
|
||||
# # if: ${{ failure() }}
|
||||
# uses: mxschmitt/action-tmate@v3
|
||||
# env:
|
||||
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
|
||||
# IMAGE_TAG: ${{ github.sha }}-ee
|
||||
# ENVIRONMENT: staging
|
||||
# with:
|
||||
# limit-access-to-actor: true
|
||||
103
.github/workflows/release-deployment.yaml
vendored
Normal file
103
.github/workflows/release-deployment.yaml
vendored
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
name: Release Deployment
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
services:
|
||||
description: 'Comma-separated list of services to deploy. eg: frontend,api,sink'
|
||||
required: true
|
||||
branch:
|
||||
description: 'Branch to deploy (defaults to dev)'
|
||||
required: false
|
||||
default: 'dev'
|
||||
|
||||
env:
|
||||
IMAGE_REGISTRY_URL: ${{ secrets.OSS_REGISTRY_URL }}
|
||||
DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }}
|
||||
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
- name: Docker login
|
||||
run: |
|
||||
docker login $IMAGE_REGISTRY_URL -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}"
|
||||
|
||||
- name: Set image tag with branch info
|
||||
run: |
|
||||
SHORT_SHA=$(git rev-parse --short HEAD)
|
||||
echo "IMAGE_TAG=${{ github.event.inputs.branch }}-${SHORT_SHA}" >> $GITHUB_ENV
|
||||
echo "Using image tag: $IMAGE_TAG"
|
||||
|
||||
- uses: depot/setup-action@v1
|
||||
|
||||
- name: Build and push Docker images
|
||||
run: |
|
||||
# Parse the comma-separated services list into an array
|
||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||
working_dir=$(pwd)
|
||||
|
||||
# Define backend services (consider moving this to workflow inputs or repo config)
|
||||
ls backend/cmd >> /tmp/backend.txt
|
||||
BUILD_SCRIPT_NAME="build.sh"
|
||||
|
||||
for SERVICE in "${SERVICES[@]}"; do
|
||||
# Check if service is backend
|
||||
if grep -q $SERVICE /tmp/backend.txt; then
|
||||
cd $working_dir/backend
|
||||
foss_build_args="nil $SERVICE"
|
||||
ee_build_args="ee $SERVICE"
|
||||
else
|
||||
cd $working_dir
|
||||
[[ $SERVICE == 'chalice' || $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && cd $working_dir/api || cd $SERVICE
|
||||
[[ $SERVICE == 'alerts' || $SERVICE == 'crons' ]] && BUILD_SCRIPT_NAME="build_${SERVICE}.sh"
|
||||
ee_build_args="ee"
|
||||
fi
|
||||
{
|
||||
echo IMAGE_TAG=$IMAGE_TAG DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$IMAGE_REGISTRY_URL PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
IMAGE_TAG=$IMAGE_TAG DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$IMAGE_REGISTRY_URL PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $foss_build_args
|
||||
}&
|
||||
{
|
||||
echo IMAGE_TAG=${IMAGE_TAG}-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$IMAGE_REGISTRY_URL PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
IMAGE_TAG=${IMAGE_TAG}-ee DOCKER_RUNTIME="depot" DOCKER_BUILD_ARGS="--push" ARCH=amd64 DOCKER_REPO=$IMAGE_REGISTRY_URL PUSH_IMAGE=0 bash ${BUILD_SCRIPT_NAME} $ee_build_args
|
||||
}&
|
||||
done
|
||||
wait
|
||||
|
||||
- uses: azure/k8s-set-context@v1
|
||||
name: Using ee release cluster
|
||||
with:
|
||||
method: kubeconfig
|
||||
kubeconfig: ${{ secrets.EE_RELEASE_KUBECONFIG }}
|
||||
|
||||
- name: Deploy to ee release Kubernetes
|
||||
run: |
|
||||
echo "Deploying services to EE cluster: ${{ github.event.inputs.services }}"
|
||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||
for SERVICE in "${SERVICES[@]}"; do
|
||||
SERVICE=$(echo $SERVICE | xargs) # Trim whitespace
|
||||
echo "Deploying $SERVICE to EE cluster with image tag: ${IMAGE_TAG}"
|
||||
kubectl set image deployment/$SERVICE-openreplay -n app $SERVICE=${IMAGE_REGISTRY_URL}/$SERVICE:${IMAGE_TAG}-ee
|
||||
done
|
||||
|
||||
- uses: azure/k8s-set-context@v1
|
||||
name: Using foss release cluster
|
||||
with:
|
||||
method: kubeconfig
|
||||
kubeconfig: ${{ secrets.FOSS_RELEASE_KUBECONFIG }}
|
||||
|
||||
- name: Deploy to FOSS release Kubernetes
|
||||
run: |
|
||||
echo "Deploying services to FOSS cluster: ${{ github.event.inputs.services }}"
|
||||
IFS=',' read -ra SERVICES <<< "${{ github.event.inputs.services }}"
|
||||
for SERVICE in "${SERVICES[@]}"; do
|
||||
SERVICE=$(echo $SERVICE | xargs) # Trim whitespace
|
||||
echo "Deploying $SERVICE to FOSS cluster with image tag: ${IMAGE_TAG}"
|
||||
echo "Deploying $SERVICE to FOSS cluster with image tag: ${IMAGE_TAG}"
|
||||
kubectl set image deployment/$SERVICE-openreplay -n app $SERVICE=${IMAGE_REGISTRY_URL}/$SERVICE:${IMAGE_TAG}
|
||||
done
|
||||
2
LICENSE
2
LICENSE
|
|
@ -1,4 +1,4 @@
|
|||
Copyright (c) 2021-2024 Asayer, Inc dba OpenReplay
|
||||
Copyright (c) 2021-2025 Asayer, Inc dba OpenReplay
|
||||
|
||||
OpenReplay monorepo uses multiple licenses. Portions of this software are licensed as follows:
|
||||
- All content that resides under the "ee/" directory of this repository, is licensed under the license defined in "ee/LICENSE".
|
||||
|
|
|
|||
|
|
@ -3,10 +3,10 @@ import logging
|
|||
from pydantic_core._pydantic_core import ValidationError
|
||||
|
||||
import schemas
|
||||
from chalicelib.core.alerts import alerts, alerts_listener
|
||||
from chalicelib.core.alerts.modules import sessions, alert_helpers
|
||||
from chalicelib.utils import pg_client, ch_client, exp_ch_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from . import alerts, alerts_listener
|
||||
from .modules import sessions, alert_helpers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -448,7 +448,7 @@ def __get_global_attributes(row):
|
|||
if row is None or row.get("cardInfo") is None:
|
||||
return row
|
||||
card_info = row.get("cardInfo", {})
|
||||
row["compareTo"] = card_info.get("compareTo", [])
|
||||
row["compareTo"] = card_info["compareTo"] if card_info.get("compareTo") is not None else []
|
||||
return row
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from typing import List, Union
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.core.sessions import performance_event
|
||||
from . import performance_event
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
from decouple import config
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.core.issue_tracking import integrations_manager, base_issue
|
||||
import json
|
||||
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.core.issue_tracking import integrations_manager, base_issue
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def __get_saved_data(project_id, session_id, issue_id, tool):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from typing import List, Union
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata
|
||||
from chalicelib.core.sessions import performance_event, sessions_legacy
|
||||
from . import performance_event, sessions as sessions_legacy
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1 @@
|
|||
from .sessions_devtool import *
|
||||
|
|
@ -4,7 +4,7 @@ import schemas
|
|||
from chalicelib.utils.storage import StorageClient
|
||||
|
||||
|
||||
def __get_devtools_keys(project_id, session_id):
|
||||
def get_devtools_keys(project_id, session_id):
|
||||
params = {
|
||||
"sessionId": session_id,
|
||||
"projectId": project_id
|
||||
|
|
@ -16,7 +16,7 @@ def __get_devtools_keys(project_id, session_id):
|
|||
|
||||
def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True):
|
||||
results = []
|
||||
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
|
||||
for k in get_devtools_keys(project_id=project_id, session_id=session_id):
|
||||
if check_existence and not StorageClient.exists(bucket=config("sessions_bucket"), key=k):
|
||||
continue
|
||||
results.append(StorageClient.get_presigned_url_for_sharing(
|
||||
|
|
@ -29,5 +29,5 @@ def get_urls(session_id, project_id, context: schemas.CurrentContext, check_exis
|
|||
|
||||
def delete_mobs(project_id, session_ids):
|
||||
for session_id in session_ids:
|
||||
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
|
||||
for k in get_devtools_keys(project_id=project_id, session_id=session_id):
|
||||
StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k)
|
||||
|
|
@ -0,0 +1 @@
|
|||
from .sessions_favorite import *
|
||||
|
|
@ -74,7 +74,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se
|
|||
|
||||
# filter by ownership or shared status
|
||||
if data.shared_only:
|
||||
conditions.append("sessions_notes.is_public")
|
||||
conditions.append("sessions_notes.is_public IS TRUE")
|
||||
elif data.mine_only:
|
||||
conditions.append("sessions_notes.user_id = %(user_id)s")
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import schemas
|
||||
from chalicelib.core import events, metadata, events_mobile, \
|
||||
issues, assist, canvas, user_testing
|
||||
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
|
||||
from . import sessions_mobs, sessions_devtool
|
||||
from chalicelib.utils import errors_helper
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.core.modules import MOB_KEY, get_file_key
|
||||
|
|
|
|||
|
|
@ -1,11 +1,9 @@
|
|||
import logging
|
||||
from typing import List, Union
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import events, metadata, projects
|
||||
from chalicelib.core.sessions import sessions_favorite, performance_event, sessions_legacy
|
||||
from chalicelib.utils import pg_client, helper, metrics_helper
|
||||
from chalicelib.utils import sql_helper as sh
|
||||
from chalicelib.core import metadata, projects
|
||||
from chalicelib.core.sessions import sessions_favorite, sessions_legacy
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
1
api/chalicelib/core/sessions/sessions_viewed/__init__.py
Normal file
1
api/chalicelib/core/sessions/sessions_viewed/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
from .sessions_viewed import *
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
import logging
|
||||
|
||||
from chalicelib.core import sessions, assist
|
||||
from chalicelib.core import assist
|
||||
from chalicelib.core.sessions import sessions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ from fastapi import BackgroundTasks
|
|||
from pydantic import BaseModel
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import authorizers, metadata
|
||||
from chalicelib.core import tenants, spot, scope
|
||||
from chalicelib.core import authorizers
|
||||
from chalicelib.core import tenants, spot
|
||||
from chalicelib.utils import email_helper
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
|
|
|
|||
|
|
@ -41,8 +41,7 @@ class ClickHouseClient:
|
|||
keys = tuple(x for x, y in results[1])
|
||||
return [dict(zip(keys, i)) for i in results[0]]
|
||||
except Exception as err:
|
||||
logger.error("--------- CH EXCEPTION -----------")
|
||||
logger.error(err)
|
||||
logger.error("--------- CH EXCEPTION -----------", exc_info=err)
|
||||
logger.error("--------- CH QUERY EXCEPTION -----------")
|
||||
logger.error(self.format(query=query, parameters=parameters)
|
||||
.replace('\n', '\\n')
|
||||
|
|
|
|||
|
|
@ -108,14 +108,14 @@ def make_pool():
|
|||
try:
|
||||
CH_pool.close_all()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", error)
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
try:
|
||||
CH_pool = ClickHouseConnectionPool(min_size=config("CH_MINCONN", cast=int, default=4),
|
||||
max_size=config("CH_MAXCONN", cast=int, default=8))
|
||||
if CH_pool is not None:
|
||||
logger.info("Connection pool created successfully for CH")
|
||||
except ConnectionError as error:
|
||||
logger.error("Error while connecting to CH", error)
|
||||
logger.error("Error while connecting to CH", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
|
|
@ -174,4 +174,4 @@ async def terminate():
|
|||
CH_pool.close_all()
|
||||
logger.info("Closed all connexions to CH")
|
||||
except Exception as error:
|
||||
logger.error("Error while closing all connexions to CH", error)
|
||||
logger.error("Error while closing all connexions to CH", exc_info=error)
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@ from typing import List
|
|||
|
||||
|
||||
def get_step_size(startTimestamp, endTimestamp, density, decimal=False, factor=1000):
|
||||
if endTimestamp == 0:
|
||||
raise Exception("endTimestamp cannot be 0 in order to get step size")
|
||||
step_size = (endTimestamp // factor - startTimestamp // factor)
|
||||
if density <= 1:
|
||||
return step_size
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ def make_pool():
|
|||
try:
|
||||
postgreSQL_pool.closeall()
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
logger.error("Error while closing all connexions to PostgreSQL", error)
|
||||
logger.error("Error while closing all connexions to PostgreSQL", exc_info=error)
|
||||
try:
|
||||
postgreSQL_pool = ORThreadedConnectionPool(config("PG_MINCONN", cast=int, default=4),
|
||||
config("PG_MAXCONN", cast=int, default=8),
|
||||
|
|
@ -70,10 +70,10 @@ def make_pool():
|
|||
if postgreSQL_pool is not None:
|
||||
logger.info("Connection pool created successfully")
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
logger.error("Error while connecting to PostgreSQL", error)
|
||||
logger.error("Error while connecting to PostgreSQL", exc_info=error)
|
||||
if RETRY < RETRY_MAX:
|
||||
RETRY += 1
|
||||
logger.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
logger.info(f"Waiting for {RETRY_INTERVAL}s before retry n°{RETRY}")
|
||||
time.sleep(RETRY_INTERVAL)
|
||||
make_pool()
|
||||
else:
|
||||
|
|
@ -97,13 +97,17 @@ class PostgresClient:
|
|||
elif long_query:
|
||||
long_config = dict(_PG_CONFIG)
|
||||
long_config["application_name"] += "-LONG"
|
||||
long_config["options"] = f"-c statement_timeout=" \
|
||||
f"{config('pg_long_timeout', cast=int, default=5 * 60) * 1000}"
|
||||
if config('PG_TIMEOUT_LONG', cast=int, default=1) > 0:
|
||||
long_config["options"] = f"-c statement_timeout=" \
|
||||
f"{config('PG_TIMEOUT_LONG', cast=int, default=5 * 60) * 1000}"
|
||||
else:
|
||||
logger.info("Disabled timeout for long query")
|
||||
self.connection = psycopg2.connect(**long_config)
|
||||
elif not use_pool or not config('PG_POOL', cast=bool, default=True):
|
||||
single_config = dict(_PG_CONFIG)
|
||||
single_config["application_name"] += "-NOPOOL"
|
||||
single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=30) * 1000}"
|
||||
if config('PG_TIMEOUT', cast=int, default=1) > 0:
|
||||
single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=30) * 1000}"
|
||||
self.connection = psycopg2.connect(**single_config)
|
||||
else:
|
||||
self.connection = postgreSQL_pool.getconn()
|
||||
|
|
@ -123,7 +127,7 @@ class PostgresClient:
|
|||
if not self.use_pool or self.long_query or self.unlimited_query:
|
||||
self.connection.close()
|
||||
except Exception as error:
|
||||
logger.error("Error while committing/closing PG-connection", error)
|
||||
logger.error("Error while committing/closing PG-connection", exc_info=error)
|
||||
if str(error) == "connection already closed" \
|
||||
and self.use_pool \
|
||||
and not self.long_query \
|
||||
|
|
@ -150,7 +154,7 @@ class PostgresClient:
|
|||
try:
|
||||
self.connection.rollback()
|
||||
except psycopg2.InterfaceError as e:
|
||||
logger.error("!!! Error while rollbacking connection", e)
|
||||
logger.error("!!! Error while rollbacking connection", exc_info=e)
|
||||
logger.error("!!! Trying to recreate the cursor")
|
||||
self.recreate_cursor()
|
||||
raise error
|
||||
|
|
@ -161,11 +165,11 @@ class PostgresClient:
|
|||
try:
|
||||
self.connection.rollback()
|
||||
except Exception as error:
|
||||
logger.error("Error while rollbacking connection for recreation", error)
|
||||
logger.error("Error while rollbacking connection for recreation", exc_info=error)
|
||||
try:
|
||||
self.cursor.close()
|
||||
except Exception as error:
|
||||
logger.error("Error while closing cursor for recreation", error)
|
||||
logger.error("Error while closing cursor for recreation", exc_info=error)
|
||||
self.cursor = None
|
||||
return self.__enter__()
|
||||
|
||||
|
|
@ -183,4 +187,4 @@ async def terminate():
|
|||
postgreSQL_pool.closeall()
|
||||
logger.info("Closed all connexions to PostgreSQL")
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
logger.error("Error while closing all connexions to PostgreSQL", error)
|
||||
logger.error("Error while closing all connexions to PostgreSQL", exc_info=error)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
from fastapi import Depends, Body
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import sessions, events, jobs, projects
|
||||
from chalicelib.core import events, jobs, projects
|
||||
from chalicelib.core.sessions import sessions
|
||||
from or_dependencies import OR_context
|
||||
from routers.base import get_routers
|
||||
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ RUN if [ "$SERVICE_NAME" = "http" ]; then \
|
|||
wget https://static.openreplay.com/geoip/GeoLite2-City.mmdb -O "$MAXMINDDB_FILE"; \
|
||||
elif [ "$SERVICE_NAME" = "imagestorage" ]; then \
|
||||
apk add --no-cache zstd; \
|
||||
elif [ "$SERVICE_NAME" = "canvas-handler" ]; then \
|
||||
elif [ "$SERVICE_NAME" = "canvases" ]; then \
|
||||
apk add --no-cache zstd; \
|
||||
elif [ "$SERVICE_NAME" = "spot" ]; then \
|
||||
apk add --no-cache ffmpeg; \
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@ import (
|
|||
"syscall"
|
||||
"time"
|
||||
|
||||
"openreplay/backend/internal/canvas-handler"
|
||||
config "openreplay/backend/internal/config/canvas-handler"
|
||||
"openreplay/backend/internal/canvases"
|
||||
config "openreplay/backend/internal/config/canvases"
|
||||
"openreplay/backend/pkg/logger"
|
||||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/metrics"
|
||||
|
|
@ -29,7 +29,10 @@ func main() {
|
|||
log.Fatal(ctx, "can't init object storage: %s", err)
|
||||
}
|
||||
|
||||
srv, err := canvas_handler.New(cfg, log, objStore)
|
||||
producer := queue.NewProducer(cfg.MessageSizeLimit, true)
|
||||
defer producer.Close(15000)
|
||||
|
||||
srv, err := canvases.New(cfg, log, objStore, producer)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "can't init canvas service: %s", err)
|
||||
}
|
||||
|
|
@ -38,6 +41,7 @@ func main() {
|
|||
cfg.GroupCanvasImage,
|
||||
[]string{
|
||||
cfg.TopicCanvasImages,
|
||||
cfg.TopicCanvasTrigger,
|
||||
},
|
||||
messages.NewImagesMessageIterator(func(data []byte, sessID uint64) {
|
||||
isSessionEnd := func(data []byte) bool {
|
||||
|
|
@ -55,14 +59,34 @@ func main() {
|
|||
}
|
||||
return true
|
||||
}
|
||||
isTriggerEvent := func(data []byte) (string, string, bool) {
|
||||
reader := messages.NewBytesReader(data)
|
||||
msgType, err := reader.ReadUint()
|
||||
if err != nil {
|
||||
return "", "", false
|
||||
}
|
||||
if msgType != messages.MsgCustomEvent {
|
||||
return "", "", false
|
||||
}
|
||||
msg, err := messages.ReadMessage(msgType, reader)
|
||||
if err != nil {
|
||||
return "", "", false
|
||||
}
|
||||
customEvent := msg.(*messages.CustomEvent)
|
||||
return customEvent.Payload, customEvent.Name, true
|
||||
}
|
||||
sessCtx := context.WithValue(context.Background(), "sessionID", sessID)
|
||||
|
||||
if isSessionEnd(data) {
|
||||
if err := srv.PackSessionCanvases(sessCtx, sessID); err != nil {
|
||||
if err := srv.PrepareSessionCanvases(sessCtx, sessID); err != nil {
|
||||
if !strings.Contains(err.Error(), "no such file or directory") {
|
||||
log.Error(sessCtx, "can't pack session's canvases: %s", err)
|
||||
}
|
||||
}
|
||||
} else if path, name, ok := isTriggerEvent(data); ok {
|
||||
if err := srv.ProcessSessionCanvas(sessCtx, sessID, path, name); err != nil {
|
||||
log.Error(sessCtx, "can't process session's canvas: %s", err)
|
||||
}
|
||||
} else {
|
||||
if err := srv.SaveCanvasToDisk(sessCtx, sessID, data); err != nil {
|
||||
log.Error(sessCtx, "can't process canvas image: %s", err)
|
||||
|
|
@ -63,8 +63,8 @@ func main() {
|
|||
// Web messages
|
||||
messages.MsgMetadata, messages.MsgIssueEvent, messages.MsgSessionStart, messages.MsgSessionEnd,
|
||||
messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr,
|
||||
messages.MsgJSException, messages.MsgCustomEvent, messages.MsgCustomIssue,
|
||||
messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, messages.MsgMouseClick,
|
||||
messages.MsgJSException, messages.MsgResourceTiming, messages.MsgCustomEvent, messages.MsgCustomIssue,
|
||||
messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, messages.MsgMouseClick,
|
||||
messages.MsgMouseClickDeprecated, messages.MsgSetPageLocation, messages.MsgSetPageLocationDeprecated,
|
||||
messages.MsgPageLoadTiming, messages.MsgPageRenderTiming,
|
||||
messages.MsgPageEvent, messages.MsgPageEventDeprecated, messages.MsgMouseThrashing, messages.MsgInputChange,
|
||||
|
|
|
|||
|
|
@ -98,7 +98,6 @@ func main() {
|
|||
// Process assets
|
||||
if msg.TypeID() == messages.MsgSetNodeAttributeURLBased ||
|
||||
msg.TypeID() == messages.MsgSetCSSDataURLBased ||
|
||||
msg.TypeID() == messages.MsgCSSInsertRuleURLBased ||
|
||||
msg.TypeID() == messages.MsgAdoptedSSReplaceURLBased ||
|
||||
msg.TypeID() == messages.MsgAdoptedSSInsertRuleURLBased {
|
||||
m := msg.Decode()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
package canvas_handler
|
||||
package canvases
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
|
@ -12,10 +12,12 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
config "openreplay/backend/internal/config/canvas-handler"
|
||||
config "openreplay/backend/internal/config/canvases"
|
||||
"openreplay/backend/pkg/logger"
|
||||
"openreplay/backend/pkg/messages"
|
||||
"openreplay/backend/pkg/objectstorage"
|
||||
"openreplay/backend/pkg/pool"
|
||||
"openreplay/backend/pkg/queue/types"
|
||||
)
|
||||
|
||||
type ImageStorage struct {
|
||||
|
|
@ -23,8 +25,10 @@ type ImageStorage struct {
|
|||
log logger.Logger
|
||||
basePath string
|
||||
saverPool pool.WorkerPool
|
||||
packerPool pool.WorkerPool
|
||||
uploaderPool pool.WorkerPool
|
||||
objStorage objectstorage.ObjectStorage
|
||||
producer types.Producer
|
||||
}
|
||||
|
||||
type saveTask struct {
|
||||
|
|
@ -34,13 +38,20 @@ type saveTask struct {
|
|||
image *bytes.Buffer
|
||||
}
|
||||
|
||||
type packTask struct {
|
||||
ctx context.Context
|
||||
sessionID uint64
|
||||
path string
|
||||
name string
|
||||
}
|
||||
|
||||
type uploadTask struct {
|
||||
ctx context.Context
|
||||
path string
|
||||
name string
|
||||
}
|
||||
|
||||
func New(cfg *config.Config, log logger.Logger, objStorage objectstorage.ObjectStorage) (*ImageStorage, error) {
|
||||
func New(cfg *config.Config, log logger.Logger, objStorage objectstorage.ObjectStorage, producer types.Producer) (*ImageStorage, error) {
|
||||
switch {
|
||||
case cfg == nil:
|
||||
return nil, fmt.Errorf("config is empty")
|
||||
|
|
@ -54,9 +65,11 @@ func New(cfg *config.Config, log logger.Logger, objStorage objectstorage.ObjectS
|
|||
log: log,
|
||||
basePath: path,
|
||||
objStorage: objStorage,
|
||||
producer: producer,
|
||||
}
|
||||
s.saverPool = pool.NewPool(4, 8, s.writeToDisk)
|
||||
s.uploaderPool = pool.NewPool(4, 8, s.sendToS3)
|
||||
s.saverPool = pool.NewPool(2, 2, s.writeToDisk)
|
||||
s.packerPool = pool.NewPool(8, 16, s.packCanvas)
|
||||
s.uploaderPool = pool.NewPool(8, 16, s.sendToS3)
|
||||
return s, nil
|
||||
}
|
||||
|
||||
|
|
@ -97,11 +110,12 @@ func (v *ImageStorage) writeToDisk(payload interface{}) {
|
|||
}
|
||||
outFile.Close()
|
||||
|
||||
v.log.Info(task.ctx, "canvas image saved, name: %s, size: %3.3f mb", task.name, float64(task.image.Len())/1024.0/1024.0)
|
||||
v.log.Debug(task.ctx, "canvas image saved, name: %s, size: %3.3f mb", task.name, float64(task.image.Len())/1024.0/1024.0)
|
||||
return
|
||||
}
|
||||
|
||||
func (v *ImageStorage) PackSessionCanvases(ctx context.Context, sessID uint64) error {
|
||||
func (v *ImageStorage) PrepareSessionCanvases(ctx context.Context, sessID uint64) error {
|
||||
start := time.Now()
|
||||
path := fmt.Sprintf("%s%d/", v.basePath, sessID)
|
||||
|
||||
// Check that the directory exists
|
||||
|
|
@ -117,6 +131,10 @@ func (v *ImageStorage) PackSessionCanvases(ctx context.Context, sessID uint64) e
|
|||
|
||||
// Build the list of canvas images sets
|
||||
for _, file := range files {
|
||||
// Skip already created archives
|
||||
if strings.HasSuffix(file.Name(), ".tar.zst") {
|
||||
continue
|
||||
}
|
||||
name := strings.Split(file.Name(), ".")
|
||||
parts := strings.Split(name[0], "_")
|
||||
if len(name) != 2 || len(parts) != 3 {
|
||||
|
|
@ -127,26 +145,46 @@ func (v *ImageStorage) PackSessionCanvases(ctx context.Context, sessID uint64) e
|
|||
names[canvasID] = true
|
||||
}
|
||||
|
||||
sessionID := strconv.FormatUint(sessID, 10)
|
||||
for name := range names {
|
||||
// Save to archives
|
||||
archPath := fmt.Sprintf("%s%s.tar.zst", path, name)
|
||||
fullCmd := fmt.Sprintf("find %s -type f -name '%s*' | tar -cf - --files-from=- | zstd -o %s",
|
||||
path, name, archPath)
|
||||
cmd := exec.Command("sh", "-c", fullCmd)
|
||||
var stdout, stderr bytes.Buffer
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to execute command, err: %s, stderr: %v", err, stderr.String())
|
||||
msg := &messages.CustomEvent{
|
||||
Name: name,
|
||||
Payload: path,
|
||||
}
|
||||
if err := v.producer.Produce(v.cfg.TopicCanvasTrigger, sessID, msg.Encode()); err != nil {
|
||||
v.log.Error(ctx, "can't send canvas trigger: %s", err)
|
||||
}
|
||||
v.uploaderPool.Submit(&uploadTask{ctx: ctx, path: archPath, name: sessionID + "/" + name + ".tar.zst"})
|
||||
}
|
||||
v.log.Info(ctx, "session canvases (%d) prepared in %.3fs, session: %d", len(names), time.Since(start).Seconds(), sessID)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *ImageStorage) ProcessSessionCanvas(ctx context.Context, sessID uint64, path, name string) error {
|
||||
v.packerPool.Submit(&packTask{ctx: ctx, sessionID: sessID, path: path, name: name})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *ImageStorage) packCanvas(payload interface{}) {
|
||||
task := payload.(*packTask)
|
||||
start := time.Now()
|
||||
sessionID := strconv.FormatUint(task.sessionID, 10)
|
||||
|
||||
// Save to archives
|
||||
archPath := fmt.Sprintf("%s%s.tar.zst", task.path, task.name)
|
||||
fullCmd := fmt.Sprintf("find %s -type f -name '%s*' ! -name '*.tar.zst' | tar -cf - --files-from=- | zstd -f -o %s",
|
||||
task.path, task.name, archPath)
|
||||
cmd := exec.Command("sh", "-c", fullCmd)
|
||||
var stdout, stderr bytes.Buffer
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
v.log.Fatal(task.ctx, "failed to execute command, err: %s, stderr: %v", err, stderr.String())
|
||||
}
|
||||
v.log.Info(task.ctx, "canvas packed successfully in %.3fs, session: %d", time.Since(start).Seconds(), task.sessionID)
|
||||
v.uploaderPool.Submit(&uploadTask{ctx: task.ctx, path: archPath, name: sessionID + "/" + task.name + ".tar.zst"})
|
||||
}
|
||||
|
||||
func (v *ImageStorage) sendToS3(payload interface{}) {
|
||||
task := payload.(*uploadTask)
|
||||
start := time.Now()
|
||||
|
|
@ -157,6 +195,5 @@ func (v *ImageStorage) sendToS3(payload interface{}) {
|
|||
if err := v.objStorage.Upload(bytes.NewReader(video), task.name, "application/octet-stream", objectstorage.NoContentEncoding, objectstorage.Zstd); err != nil {
|
||||
v.log.Fatal(task.ctx, "failed to upload canvas to storage: %s", err)
|
||||
}
|
||||
v.log.Info(task.ctx, "replay file (size: %d) uploaded successfully in %v", len(video), time.Since(start))
|
||||
return
|
||||
v.log.Info(task.ctx, "replay file (size: %d) uploaded successfully in %.3fs", len(video), time.Since(start).Seconds())
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package canvas_handler
|
||||
package canvases
|
||||
|
||||
import (
|
||||
"openreplay/backend/internal/config/common"
|
||||
|
|
@ -12,8 +12,8 @@ type Config struct {
|
|||
objectstorage.ObjectsConfig
|
||||
FSDir string `env:"FS_DIR,required"`
|
||||
CanvasDir string `env:"CANVAS_DIR,default=canvas"`
|
||||
TopicCanvasImages string `env:"TOPIC_CANVAS_IMAGES,required"`
|
||||
TopicCanvasTrigger string `env:"TOPIC_CANVAS_TRIGGER,required"`
|
||||
TopicCanvasImages string `env:"TOPIC_CANVAS_IMAGES,required"` // For canvas images and sessionEnd events from ender
|
||||
TopicCanvasTrigger string `env:"TOPIC_CANVAS_TRIGGER,required"` // For trigger events to start processing (archive and upload)
|
||||
GroupCanvasImage string `env:"GROUP_CANVAS_IMAGE,required"`
|
||||
UseProfiler bool `env:"PROFILER_ENABLED,default=false"`
|
||||
}
|
||||
|
|
@ -88,6 +88,6 @@ type HTTP struct {
|
|||
HTTPTimeout time.Duration `env:"HTTP_TIMEOUT,default=60s"`
|
||||
JsonSizeLimit int64 `env:"JSON_SIZE_LIMIT,default=131072"` // 128KB, 1000 for HTTP service
|
||||
UseAccessControlHeaders bool `env:"USE_CORS,default=false"`
|
||||
JWTSecret string `env:"JWT_SECRET,required"`
|
||||
JWTSpotSecret string `env:"JWT_SPOT_SECRET,required"`
|
||||
JWTSecret string `env:"JWT_SECRET"`
|
||||
JWTSpotSecret string `env:"JWT_SPOT_SECRET"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -72,14 +72,14 @@ func (s *saverImpl) Handle(msg Message) {
|
|||
if IsMobileType(msg.TypeID()) {
|
||||
if err := s.handleMobileMessage(sessCtx, session, msg); err != nil {
|
||||
if !postgres.IsPkeyViolation(err) {
|
||||
s.log.Error(sessCtx, "mobile message insertion error, msg: %+v, err: %s", msg, err)
|
||||
s.log.Error(sessCtx, "mobile message insertion error, msg: %+v, err: %.200s", msg, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if err := s.handleWebMessage(sessCtx, session, msg); err != nil {
|
||||
if !postgres.IsPkeyViolation(err) {
|
||||
s.log.Error(sessCtx, "web message insertion error, msg: %+v, err: %s", msg, err)
|
||||
s.log.Error(sessCtx, "web message insertion error, msg: %+v, err: %.200s", msg, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
|
|||
|
|
@ -131,17 +131,6 @@ func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message {
|
|||
}
|
||||
newMsg.SetMeta(msg.Meta())
|
||||
return newMsg
|
||||
case *messages.CSSInsertRuleURLBased:
|
||||
if e.shouldSkipAsset(m.BaseURL) {
|
||||
return msg
|
||||
}
|
||||
newMsg := &messages.CSSInsertRule{
|
||||
ID: m.ID,
|
||||
Index: m.Index,
|
||||
Rule: e.handleCSS(m.SessionID(), m.BaseURL, m.Rule),
|
||||
}
|
||||
newMsg.SetMeta(msg.Meta())
|
||||
return newMsg
|
||||
case *messages.AdoptedSSReplaceURLBased:
|
||||
if e.shouldSkipAsset(m.BaseURL) {
|
||||
return msg
|
||||
|
|
|
|||
|
|
@ -7,6 +7,8 @@ import (
|
|||
"openreplay/backend/pkg/logger"
|
||||
)
|
||||
|
||||
var BULK_SIZE = 200
|
||||
|
||||
type bulksTask struct {
|
||||
bulks []Bulk
|
||||
}
|
||||
|
|
@ -102,7 +104,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"autocomplete",
|
||||
"(value, type, project_id)",
|
||||
"($%d, $%d, $%d)",
|
||||
3, 200)
|
||||
3, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create autocomplete bulk: %s", err)
|
||||
}
|
||||
|
|
@ -110,7 +112,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events_common.requests",
|
||||
"(session_id, timestamp, seq_index, url, duration, success)",
|
||||
"($%d, $%d, $%d, LEFT($%d, 8000), $%d, $%d)",
|
||||
6, 200)
|
||||
6, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create requests bulk: %s", err)
|
||||
}
|
||||
|
|
@ -118,7 +120,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events_common.customs",
|
||||
"(session_id, timestamp, seq_index, name, payload)",
|
||||
"($%d, $%d, $%d, LEFT($%d, 2000), $%d)",
|
||||
5, 200)
|
||||
5, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create customEvents bulk: %s", err)
|
||||
}
|
||||
|
|
@ -130,7 +132,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"($%d, $%d, $%d, LEFT($%d, 8000), LEFT($%d, 8000), LEFT($%d, 300), LEFT($%d, 2000), LEFT($%d, 8000), "+
|
||||
"NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0),"+
|
||||
" NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, ''))",
|
||||
19, 200)
|
||||
19, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webPageEvents bulk: %s", err)
|
||||
}
|
||||
|
|
@ -138,7 +140,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events.inputs",
|
||||
"(session_id, message_id, timestamp, label, hesitation, duration)",
|
||||
"($%d, $%d, $%d, NULLIF(LEFT($%d, 2000),''), $%d, $%d)",
|
||||
6, 200)
|
||||
6, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webInputDurations bulk: %s", err)
|
||||
}
|
||||
|
|
@ -146,7 +148,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events.graphql",
|
||||
"(session_id, timestamp, message_id, name, request_body, response_body)",
|
||||
"($%d, $%d, $%d, LEFT($%d, 2000), $%d, $%d)",
|
||||
6, 200)
|
||||
6, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webGraphQL bulk: %s", err)
|
||||
}
|
||||
|
|
@ -154,7 +156,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"errors",
|
||||
"(error_id, project_id, source, name, message, payload)",
|
||||
"($%d, $%d, $%d, $%d, $%d, $%d::jsonb)",
|
||||
6, 200)
|
||||
6, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webErrors bulk: %s", err)
|
||||
}
|
||||
|
|
@ -162,7 +164,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events.errors",
|
||||
"(session_id, message_id, timestamp, error_id)",
|
||||
"($%d, $%d, $%d, $%d)",
|
||||
4, 200)
|
||||
4, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webErrorEvents bulk: %s", err)
|
||||
}
|
||||
|
|
@ -170,7 +172,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"public.errors_tags",
|
||||
"(session_id, message_id, error_id, key, value)",
|
||||
"($%d, $%d, $%d, $%d, $%d)",
|
||||
5, 200)
|
||||
5, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webErrorTags bulk: %s", err)
|
||||
}
|
||||
|
|
@ -178,7 +180,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"issues",
|
||||
"(project_id, issue_id, type, context_string)",
|
||||
"($%d, $%d, $%d, $%d)",
|
||||
4, 200)
|
||||
4, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webIssues bulk: %s", err)
|
||||
}
|
||||
|
|
@ -186,7 +188,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events_common.issues",
|
||||
"(session_id, issue_id, timestamp, seq_index, payload)",
|
||||
"($%d, $%d, $%d, $%d, CAST($%d AS jsonb))",
|
||||
5, 200)
|
||||
5, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webIssueEvents bulk: %s", err)
|
||||
}
|
||||
|
|
@ -194,7 +196,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events_common.customs",
|
||||
"(session_id, seq_index, timestamp, name, payload, level)",
|
||||
"($%d, $%d, $%d, LEFT($%d, 2000), $%d, $%d)",
|
||||
6, 200)
|
||||
6, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webCustomEvents bulk: %s", err)
|
||||
}
|
||||
|
|
@ -202,7 +204,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events.clicks",
|
||||
"(session_id, message_id, timestamp, label, selector, url, path, hesitation)",
|
||||
"($%d, $%d, $%d, NULLIF(LEFT($%d, 2000), ''), LEFT($%d, 8000), LEFT($%d, 2000), LEFT($%d, 2000), $%d)",
|
||||
8, 200)
|
||||
8, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webClickEvents bulk: %s", err)
|
||||
}
|
||||
|
|
@ -210,7 +212,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events.clicks",
|
||||
"(session_id, message_id, timestamp, label, selector, url, path, hesitation, normalized_x, normalized_y)",
|
||||
"($%d, $%d, $%d, NULLIF(LEFT($%d, 2000), ''), LEFT($%d, 8000), LEFT($%d, 2000), LEFT($%d, 2000), $%d, $%d, $%d)",
|
||||
10, 200)
|
||||
10, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webClickEvents bulk: %s", err)
|
||||
}
|
||||
|
|
@ -218,7 +220,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events_common.requests",
|
||||
"(session_id, timestamp, seq_index, url, host, path, query, request_body, response_body, status_code, method, duration, success, transfer_size)",
|
||||
"($%d, $%d, $%d, LEFT($%d, 8000), LEFT($%d, 300), LEFT($%d, 2000), LEFT($%d, 8000), $%d, $%d, $%d::smallint, NULLIF($%d, '')::http_method, $%d, $%d, $%d)",
|
||||
14, 200)
|
||||
14, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webNetworkRequest bulk: %s", err)
|
||||
}
|
||||
|
|
@ -226,7 +228,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events.canvas_recordings",
|
||||
"(session_id, recording_id, timestamp)",
|
||||
"($%d, $%d, $%d)",
|
||||
3, 200)
|
||||
3, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webCanvasNodes bulk: %s", err)
|
||||
}
|
||||
|
|
@ -234,7 +236,7 @@ func (conn *BulkSet) initBulks() {
|
|||
"events.tags",
|
||||
"(session_id, timestamp, seq_index, tag_id)",
|
||||
"($%d, $%d, $%d, $%d)",
|
||||
4, 200)
|
||||
4, BULK_SIZE)
|
||||
if err != nil {
|
||||
conn.log.Fatal(conn.ctx, "can't create webTagTriggers bulk: %s", err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -77,8 +77,6 @@ func (d *DeadClickDetector) Handle(message Message, timestamp uint64) Message {
|
|||
*MoveNode,
|
||||
*RemoveNode,
|
||||
*SetCSSData,
|
||||
*CSSInsertRule,
|
||||
*CSSDeleteRule,
|
||||
*SetInputValue,
|
||||
*SetInputChecked:
|
||||
return d.Build()
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
package messages
|
||||
|
||||
func IsReplayerType(id int) bool {
|
||||
return 1 != id && 3 != id && 17 != id && 23 != id && 24 != id && 25 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 33 != id && 42 != id && 56 != id && 62 != id && 63 != id && 64 != id && 66 != id && 78 != id && 80 != id && 81 != id && 82 != id && 112 != id && 115 != id && 124 != id && 125 != id && 126 != id && 127 != id && 90 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 107 != id && 110 != id
|
||||
return 1 != id && 17 != id && 23 != id && 24 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 33 != id && 42 != id && 56 != id && 63 != id && 64 != id && 66 != id && 78 != id && 81 != id && 82 != id && 112 != id && 115 != id && 124 != id && 125 != id && 126 != id && 127 != id && 90 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 107 != id && 110 != id
|
||||
}
|
||||
|
||||
func IsMobileType(id int) bool {
|
||||
|
|
@ -10,5 +10,5 @@ func IsMobileType(id int) bool {
|
|||
}
|
||||
|
||||
func IsDOMType(id int) bool {
|
||||
return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 34 == id || 35 == id || 37 == id || 38 == id || 49 == id || 50 == id || 51 == id || 43 == id || 52 == id || 54 == id || 55 == id || 57 == id || 58 == id || 59 == id || 60 == id || 61 == id || 67 == id || 68 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 113 == id || 114 == id || 117 == id || 118 == id || 119 == id || 122 == id || 93 == id || 96 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 106 == id || 111 == id
|
||||
return 0 == id || 4 == id || 5 == id || 6 == id || 7 == id || 8 == id || 9 == id || 10 == id || 11 == id || 12 == id || 13 == id || 14 == id || 15 == id || 16 == id || 18 == id || 19 == id || 20 == id || 34 == id || 35 == id || 49 == id || 50 == id || 51 == id || 43 == id || 52 == id || 54 == id || 55 == id || 57 == id || 58 == id || 60 == id || 61 == id || 68 == id || 69 == id || 70 == id || 71 == id || 72 == id || 73 == id || 74 == id || 75 == id || 76 == id || 77 == id || 113 == id || 114 == id || 117 == id || 118 == id || 119 == id || 122 == id || 93 == id || 96 == id || 100 == id || 101 == id || 102 == id || 103 == id || 104 == id || 105 == id || 106 == id || 111 == id
|
||||
}
|
||||
|
|
|
|||
|
|
@ -44,9 +44,8 @@ func NewMessageIterator(log logger.Logger, messageHandler MessageHandler, messag
|
|||
iter.filter = filter
|
||||
}
|
||||
iter.preFilter = map[int]struct{}{
|
||||
MsgBatchMetadata: {}, MsgBatchMeta: {}, MsgTimestamp: {},
|
||||
MsgSessionStart: {}, MsgSessionEnd: {}, MsgSetPageLocation: {},
|
||||
MsgMobileBatchMeta: {},
|
||||
MsgBatchMetadata: {}, MsgTimestamp: {}, MsgSessionStart: {},
|
||||
MsgSessionEnd: {}, MsgSetPageLocation: {}, MsgMobileBatchMeta: {},
|
||||
}
|
||||
return iter
|
||||
}
|
||||
|
|
@ -152,20 +151,6 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error {
|
|||
i.version = m.Version
|
||||
i.batchInfo.version = m.Version
|
||||
|
||||
case *BatchMeta: // Is not required to be present in batch since Mobile doesn't have it (though we might change it)
|
||||
if i.messageInfo.Index > 1 { // Might be several 0-0 BatchMeta in a row without an error though
|
||||
return fmt.Errorf("batchMeta found at the end of the batch, info: %s", i.batchInfo.Info())
|
||||
}
|
||||
i.messageInfo.Index = m.PageNo<<32 + m.FirstIndex // 2^32 is the maximum count of messages per page (ha-ha)
|
||||
i.messageInfo.Timestamp = uint64(m.Timestamp)
|
||||
if m.Timestamp == 0 {
|
||||
i.zeroTsLog("BatchMeta")
|
||||
}
|
||||
// Try to get saved session's page url
|
||||
if savedURL := i.urls.Get(i.messageInfo.batch.sessionID); savedURL != "" {
|
||||
i.messageInfo.Url = savedURL
|
||||
}
|
||||
|
||||
case *Timestamp:
|
||||
i.messageInfo.Timestamp = m.Timestamp
|
||||
if m.Timestamp == 0 {
|
||||
|
|
|
|||
|
|
@ -2,34 +2,6 @@ package messages
|
|||
|
||||
func transformDeprecated(msg Message) Message {
|
||||
switch m := msg.(type) {
|
||||
case *JSExceptionDeprecated:
|
||||
return &JSException{
|
||||
Name: m.Name,
|
||||
Message: m.Message,
|
||||
Payload: m.Payload,
|
||||
Metadata: "{}",
|
||||
}
|
||||
case *Fetch:
|
||||
return &NetworkRequest{
|
||||
Type: "fetch",
|
||||
Method: m.Method,
|
||||
URL: m.URL,
|
||||
Request: m.Request,
|
||||
Response: m.Response,
|
||||
Status: m.Status,
|
||||
Timestamp: m.Timestamp,
|
||||
Duration: m.Duration,
|
||||
}
|
||||
case *IssueEventDeprecated:
|
||||
return &IssueEvent{
|
||||
MessageID: m.MessageID,
|
||||
Timestamp: m.Timestamp,
|
||||
Type: m.Type,
|
||||
ContextString: m.ContextString,
|
||||
Context: m.Context,
|
||||
Payload: m.Payload,
|
||||
URL: "",
|
||||
}
|
||||
case *ResourceTimingDeprecated:
|
||||
return &ResourceTiming{
|
||||
Timestamp: m.Timestamp,
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ package messages
|
|||
const (
|
||||
MsgTimestamp = 0
|
||||
MsgSessionStart = 1
|
||||
MsgSessionEndDeprecated = 3
|
||||
MsgSetPageLocationDeprecated = 4
|
||||
MsgSetViewportSize = 5
|
||||
MsgSetViewportScroll = 6
|
||||
|
|
@ -26,7 +25,6 @@ const (
|
|||
MsgConsoleLog = 22
|
||||
MsgPageLoadTiming = 23
|
||||
MsgPageRenderTiming = 24
|
||||
MsgJSExceptionDeprecated = 25
|
||||
MsgIntegrationEvent = 26
|
||||
MsgCustomEvent = 27
|
||||
MsgUserID = 28
|
||||
|
|
@ -37,9 +35,6 @@ const (
|
|||
MsgPageEvent = 33
|
||||
MsgStringDictGlobal = 34
|
||||
MsgSetNodeAttributeDictGlobal = 35
|
||||
MsgCSSInsertRule = 37
|
||||
MsgCSSDeleteRule = 38
|
||||
MsgFetch = 39
|
||||
MsgProfiler = 40
|
||||
MsgOTable = 41
|
||||
MsgStateAction = 42
|
||||
|
|
@ -59,14 +54,11 @@ const (
|
|||
MsgPerformanceTrackAggr = 56
|
||||
MsgLoadFontFace = 57
|
||||
MsgSetNodeFocus = 58
|
||||
MsgLongTask = 59
|
||||
MsgSetNodeAttributeURLBased = 60
|
||||
MsgSetCSSDataURLBased = 61
|
||||
MsgIssueEventDeprecated = 62
|
||||
MsgTechnicalInfo = 63
|
||||
MsgCustomIssue = 64
|
||||
MsgAssetCache = 66
|
||||
MsgCSSInsertRuleURLBased = 67
|
||||
MsgMouseClick = 68
|
||||
MsgMouseClickDeprecated = 69
|
||||
MsgCreateIFrameDocument = 70
|
||||
|
|
@ -79,7 +71,6 @@ const (
|
|||
MsgAdoptedSSRemoveOwner = 77
|
||||
MsgJSException = 78
|
||||
MsgZustand = 79
|
||||
MsgBatchMeta = 80
|
||||
MsgBatchMetadata = 81
|
||||
MsgPartitionedMessage = 82
|
||||
MsgNetworkRequest = 83
|
||||
|
|
@ -193,27 +184,6 @@ func (msg *SessionStart) TypeID() int {
|
|||
return 1
|
||||
}
|
||||
|
||||
type SessionEndDeprecated struct {
|
||||
message
|
||||
Timestamp uint64
|
||||
}
|
||||
|
||||
func (msg *SessionEndDeprecated) Encode() []byte {
|
||||
buf := make([]byte, 11)
|
||||
buf[0] = 3
|
||||
p := 1
|
||||
p = WriteUint(msg.Timestamp, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *SessionEndDeprecated) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *SessionEndDeprecated) TypeID() int {
|
||||
return 3
|
||||
}
|
||||
|
||||
type SetPageLocationDeprecated struct {
|
||||
message
|
||||
URL string
|
||||
|
|
@ -738,31 +708,6 @@ func (msg *PageRenderTiming) TypeID() int {
|
|||
return 24
|
||||
}
|
||||
|
||||
type JSExceptionDeprecated struct {
|
||||
message
|
||||
Name string
|
||||
Message string
|
||||
Payload string
|
||||
}
|
||||
|
||||
func (msg *JSExceptionDeprecated) Encode() []byte {
|
||||
buf := make([]byte, 31+len(msg.Name)+len(msg.Message)+len(msg.Payload))
|
||||
buf[0] = 25
|
||||
p := 1
|
||||
p = WriteString(msg.Name, buf, p)
|
||||
p = WriteString(msg.Message, buf, p)
|
||||
p = WriteString(msg.Payload, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *JSExceptionDeprecated) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *JSExceptionDeprecated) TypeID() int {
|
||||
return 25
|
||||
}
|
||||
|
||||
type IntegrationEvent struct {
|
||||
message
|
||||
Timestamp uint64
|
||||
|
|
@ -1065,87 +1010,6 @@ func (msg *SetNodeAttributeDictGlobal) TypeID() int {
|
|||
return 35
|
||||
}
|
||||
|
||||
type CSSInsertRule struct {
|
||||
message
|
||||
ID uint64
|
||||
Rule string
|
||||
Index uint64
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRule) Encode() []byte {
|
||||
buf := make([]byte, 31+len(msg.Rule))
|
||||
buf[0] = 37
|
||||
p := 1
|
||||
p = WriteUint(msg.ID, buf, p)
|
||||
p = WriteString(msg.Rule, buf, p)
|
||||
p = WriteUint(msg.Index, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRule) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRule) TypeID() int {
|
||||
return 37
|
||||
}
|
||||
|
||||
type CSSDeleteRule struct {
|
||||
message
|
||||
ID uint64
|
||||
Index uint64
|
||||
}
|
||||
|
||||
func (msg *CSSDeleteRule) Encode() []byte {
|
||||
buf := make([]byte, 21)
|
||||
buf[0] = 38
|
||||
p := 1
|
||||
p = WriteUint(msg.ID, buf, p)
|
||||
p = WriteUint(msg.Index, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *CSSDeleteRule) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *CSSDeleteRule) TypeID() int {
|
||||
return 38
|
||||
}
|
||||
|
||||
type Fetch struct {
|
||||
message
|
||||
Method string
|
||||
URL string
|
||||
Request string
|
||||
Response string
|
||||
Status uint64
|
||||
Timestamp uint64
|
||||
Duration uint64
|
||||
}
|
||||
|
||||
func (msg *Fetch) Encode() []byte {
|
||||
buf := make([]byte, 71+len(msg.Method)+len(msg.URL)+len(msg.Request)+len(msg.Response))
|
||||
buf[0] = 39
|
||||
p := 1
|
||||
p = WriteString(msg.Method, buf, p)
|
||||
p = WriteString(msg.URL, buf, p)
|
||||
p = WriteString(msg.Request, buf, p)
|
||||
p = WriteString(msg.Response, buf, p)
|
||||
p = WriteUint(msg.Status, buf, p)
|
||||
p = WriteUint(msg.Timestamp, buf, p)
|
||||
p = WriteUint(msg.Duration, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *Fetch) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *Fetch) TypeID() int {
|
||||
return 39
|
||||
}
|
||||
|
||||
type Profiler struct {
|
||||
message
|
||||
Name string
|
||||
|
|
@ -1639,39 +1503,6 @@ func (msg *SetNodeFocus) TypeID() int {
|
|||
return 58
|
||||
}
|
||||
|
||||
type LongTask struct {
|
||||
message
|
||||
Timestamp uint64
|
||||
Duration uint64
|
||||
Context uint64
|
||||
ContainerType uint64
|
||||
ContainerSrc string
|
||||
ContainerId string
|
||||
ContainerName string
|
||||
}
|
||||
|
||||
func (msg *LongTask) Encode() []byte {
|
||||
buf := make([]byte, 71+len(msg.ContainerSrc)+len(msg.ContainerId)+len(msg.ContainerName))
|
||||
buf[0] = 59
|
||||
p := 1
|
||||
p = WriteUint(msg.Timestamp, buf, p)
|
||||
p = WriteUint(msg.Duration, buf, p)
|
||||
p = WriteUint(msg.Context, buf, p)
|
||||
p = WriteUint(msg.ContainerType, buf, p)
|
||||
p = WriteString(msg.ContainerSrc, buf, p)
|
||||
p = WriteString(msg.ContainerId, buf, p)
|
||||
p = WriteString(msg.ContainerName, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *LongTask) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *LongTask) TypeID() int {
|
||||
return 59
|
||||
}
|
||||
|
||||
type SetNodeAttributeURLBased struct {
|
||||
message
|
||||
ID uint64
|
||||
|
|
@ -1724,37 +1555,6 @@ func (msg *SetCSSDataURLBased) TypeID() int {
|
|||
return 61
|
||||
}
|
||||
|
||||
type IssueEventDeprecated struct {
|
||||
message
|
||||
MessageID uint64
|
||||
Timestamp uint64
|
||||
Type string
|
||||
ContextString string
|
||||
Context string
|
||||
Payload string
|
||||
}
|
||||
|
||||
func (msg *IssueEventDeprecated) Encode() []byte {
|
||||
buf := make([]byte, 61+len(msg.Type)+len(msg.ContextString)+len(msg.Context)+len(msg.Payload))
|
||||
buf[0] = 62
|
||||
p := 1
|
||||
p = WriteUint(msg.MessageID, buf, p)
|
||||
p = WriteUint(msg.Timestamp, buf, p)
|
||||
p = WriteString(msg.Type, buf, p)
|
||||
p = WriteString(msg.ContextString, buf, p)
|
||||
p = WriteString(msg.Context, buf, p)
|
||||
p = WriteString(msg.Payload, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *IssueEventDeprecated) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *IssueEventDeprecated) TypeID() int {
|
||||
return 62
|
||||
}
|
||||
|
||||
type TechnicalInfo struct {
|
||||
message
|
||||
Type string
|
||||
|
|
@ -1822,33 +1622,6 @@ func (msg *AssetCache) TypeID() int {
|
|||
return 66
|
||||
}
|
||||
|
||||
type CSSInsertRuleURLBased struct {
|
||||
message
|
||||
ID uint64
|
||||
Rule string
|
||||
Index uint64
|
||||
BaseURL string
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRuleURLBased) Encode() []byte {
|
||||
buf := make([]byte, 41+len(msg.Rule)+len(msg.BaseURL))
|
||||
buf[0] = 67
|
||||
p := 1
|
||||
p = WriteUint(msg.ID, buf, p)
|
||||
p = WriteString(msg.Rule, buf, p)
|
||||
p = WriteUint(msg.Index, buf, p)
|
||||
p = WriteString(msg.BaseURL, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRuleURLBased) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *CSSInsertRuleURLBased) TypeID() int {
|
||||
return 67
|
||||
}
|
||||
|
||||
type MouseClick struct {
|
||||
message
|
||||
ID uint64
|
||||
|
|
@ -2149,31 +1922,6 @@ func (msg *Zustand) TypeID() int {
|
|||
return 79
|
||||
}
|
||||
|
||||
type BatchMeta struct {
|
||||
message
|
||||
PageNo uint64
|
||||
FirstIndex uint64
|
||||
Timestamp int64
|
||||
}
|
||||
|
||||
func (msg *BatchMeta) Encode() []byte {
|
||||
buf := make([]byte, 31)
|
||||
buf[0] = 80
|
||||
p := 1
|
||||
p = WriteUint(msg.PageNo, buf, p)
|
||||
p = WriteUint(msg.FirstIndex, buf, p)
|
||||
p = WriteInt(msg.Timestamp, buf, p)
|
||||
return buf[:p]
|
||||
}
|
||||
|
||||
func (msg *BatchMeta) Decode() Message {
|
||||
return msg
|
||||
}
|
||||
|
||||
func (msg *BatchMeta) TypeID() int {
|
||||
return 80
|
||||
}
|
||||
|
||||
type BatchMetadata struct {
|
||||
message
|
||||
Version uint64
|
||||
|
|
|
|||
|
|
@ -68,15 +68,6 @@ func DecodeSessionStart(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeSessionEndDeprecated(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &SessionEndDeprecated{}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeSetPageLocationDeprecated(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &SetPageLocationDeprecated{}
|
||||
|
|
@ -390,21 +381,6 @@ func DecodePageRenderTiming(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeJSExceptionDeprecated(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &JSExceptionDeprecated{}
|
||||
if msg.Name, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Message, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Payload, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeIntegrationEvent(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &IntegrationEvent{}
|
||||
|
|
@ -633,60 +609,6 @@ func DecodeSetNodeAttributeDictGlobal(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeCSSInsertRule(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &CSSInsertRule{}
|
||||
if msg.ID, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Rule, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Index, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeCSSDeleteRule(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &CSSDeleteRule{}
|
||||
if msg.ID, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Index, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeFetch(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &Fetch{}
|
||||
if msg.Method, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.URL, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Request, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Response, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Status, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Duration, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeProfiler(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &Profiler{}
|
||||
|
|
@ -999,33 +921,6 @@ func DecodeSetNodeFocus(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeLongTask(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &LongTask{}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Duration, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Context, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContainerType, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContainerSrc, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContainerId, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContainerName, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeSetNodeAttributeURLBased(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &SetNodeAttributeURLBased{}
|
||||
|
|
@ -1059,30 +954,6 @@ func DecodeSetCSSDataURLBased(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeIssueEventDeprecated(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &IssueEventDeprecated{}
|
||||
if msg.MessageID, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Timestamp, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Type, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.ContextString, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Context, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Payload, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeTechnicalInfo(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &TechnicalInfo{}
|
||||
|
|
@ -1116,24 +987,6 @@ func DecodeAssetCache(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeCSSInsertRuleURLBased(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &CSSInsertRuleURLBased{}
|
||||
if msg.ID, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Rule, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Index, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.BaseURL, err = reader.ReadString(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeMouseClick(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &MouseClick{}
|
||||
|
|
@ -1314,21 +1167,6 @@ func DecodeZustand(reader BytesReader) (Message, error) {
|
|||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeBatchMeta(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &BatchMeta{}
|
||||
if msg.PageNo, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.FirstIndex, err = reader.ReadUint(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if msg.Timestamp, err = reader.ReadInt(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return msg, err
|
||||
}
|
||||
|
||||
func DecodeBatchMetadata(reader BytesReader) (Message, error) {
|
||||
var err error = nil
|
||||
msg := &BatchMetadata{}
|
||||
|
|
@ -2088,8 +1926,6 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodeTimestamp(reader)
|
||||
case 1:
|
||||
return DecodeSessionStart(reader)
|
||||
case 3:
|
||||
return DecodeSessionEndDeprecated(reader)
|
||||
case 4:
|
||||
return DecodeSetPageLocationDeprecated(reader)
|
||||
case 5:
|
||||
|
|
@ -2132,8 +1968,6 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodePageLoadTiming(reader)
|
||||
case 24:
|
||||
return DecodePageRenderTiming(reader)
|
||||
case 25:
|
||||
return DecodeJSExceptionDeprecated(reader)
|
||||
case 26:
|
||||
return DecodeIntegrationEvent(reader)
|
||||
case 27:
|
||||
|
|
@ -2154,12 +1988,6 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodeStringDictGlobal(reader)
|
||||
case 35:
|
||||
return DecodeSetNodeAttributeDictGlobal(reader)
|
||||
case 37:
|
||||
return DecodeCSSInsertRule(reader)
|
||||
case 38:
|
||||
return DecodeCSSDeleteRule(reader)
|
||||
case 39:
|
||||
return DecodeFetch(reader)
|
||||
case 40:
|
||||
return DecodeProfiler(reader)
|
||||
case 41:
|
||||
|
|
@ -2198,22 +2026,16 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodeLoadFontFace(reader)
|
||||
case 58:
|
||||
return DecodeSetNodeFocus(reader)
|
||||
case 59:
|
||||
return DecodeLongTask(reader)
|
||||
case 60:
|
||||
return DecodeSetNodeAttributeURLBased(reader)
|
||||
case 61:
|
||||
return DecodeSetCSSDataURLBased(reader)
|
||||
case 62:
|
||||
return DecodeIssueEventDeprecated(reader)
|
||||
case 63:
|
||||
return DecodeTechnicalInfo(reader)
|
||||
case 64:
|
||||
return DecodeCustomIssue(reader)
|
||||
case 66:
|
||||
return DecodeAssetCache(reader)
|
||||
case 67:
|
||||
return DecodeCSSInsertRuleURLBased(reader)
|
||||
case 68:
|
||||
return DecodeMouseClick(reader)
|
||||
case 69:
|
||||
|
|
@ -2238,8 +2060,6 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) {
|
|||
return DecodeJSException(reader)
|
||||
case 79:
|
||||
return DecodeZustand(reader)
|
||||
case 80:
|
||||
return DecodeBatchMeta(reader)
|
||||
case 81:
|
||||
return DecodeBatchMetadata(reader)
|
||||
case 82:
|
||||
|
|
|
|||
|
|
@ -89,15 +89,13 @@ func (m *messageReaderImpl) Parse() (err error) {
|
|||
if err != nil {
|
||||
return fmt.Errorf("read message err: %s", err)
|
||||
}
|
||||
if m.msgType == MsgBatchMeta || m.msgType == MsgBatchMetadata {
|
||||
if m.msgType == MsgBatchMetadata {
|
||||
if len(m.list) > 0 {
|
||||
return fmt.Errorf("batch meta not at the start of batch")
|
||||
}
|
||||
switch message := msg.(type) {
|
||||
case *BatchMetadata:
|
||||
m.version = int(message.Version)
|
||||
case *BatchMeta:
|
||||
m.version = 0
|
||||
}
|
||||
if m.version != 1 {
|
||||
// Unsupported tracker version, reset reader
|
||||
|
|
|
|||
|
|
@ -79,31 +79,34 @@ func (e *handlersImpl) GetAll() []*api.Description {
|
|||
}
|
||||
}
|
||||
|
||||
func getSessionTimestamp(req *StartSessionRequest, startTimeMili int64) (ts uint64) {
|
||||
ts = uint64(req.Timestamp)
|
||||
func getSessionTimestamp(req *StartSessionRequest, startTimeMili int64) uint64 {
|
||||
if req.IsOffline {
|
||||
return
|
||||
return uint64(req.Timestamp)
|
||||
}
|
||||
c, err := semver.NewConstraint(">=4.1.6")
|
||||
ts := uint64(startTimeMili)
|
||||
if req.BufferDiff > 0 && req.BufferDiff < 5*60*1000 {
|
||||
ts -= req.BufferDiff
|
||||
}
|
||||
return ts
|
||||
}
|
||||
|
||||
func validateTrackerVersion(ver string) error {
|
||||
c, err := semver.NewConstraint(">=6.0.0")
|
||||
if err != nil {
|
||||
return
|
||||
return err
|
||||
}
|
||||
ver := req.TrackerVersion
|
||||
parts := strings.Split(ver, "-")
|
||||
if len(parts) > 1 {
|
||||
ver = parts[0]
|
||||
}
|
||||
v, err := semver.NewVersion(ver)
|
||||
if err != nil {
|
||||
return
|
||||
return err
|
||||
}
|
||||
if c.Check(v) {
|
||||
ts = uint64(startTimeMili)
|
||||
if req.BufferDiff > 0 && req.BufferDiff < 5*60*1000 {
|
||||
ts -= req.BufferDiff
|
||||
}
|
||||
if !c.Check(v) {
|
||||
return errors.New("unsupported tracker version")
|
||||
}
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -132,6 +135,11 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req
|
|||
|
||||
// Add tracker version to context
|
||||
r = r.WithContext(context.WithValue(r.Context(), "tracker", req.TrackerVersion))
|
||||
if err := validateTrackerVersion(req.TrackerVersion); err != nil {
|
||||
e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err)
|
||||
e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize)
|
||||
return
|
||||
}
|
||||
|
||||
// Handler's logic
|
||||
if req.ProjectKey == nil {
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ func (s *sessionsImpl) getFromDB(sessionID uint64) (*Session, error) {
|
|||
}
|
||||
proj, err := s.projects.GetProject(session.ProjectID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to get active project: %d, err: %s", session.ProjectID, err)
|
||||
}
|
||||
session.SaveRequestPayload = proj.SaveRequestPayloads
|
||||
return session, nil
|
||||
|
|
|
|||
11
ee/api/.gitignore
vendored
11
ee/api/.gitignore
vendored
|
|
@ -190,7 +190,10 @@ Pipfile.lock
|
|||
/chalicelib/core/autocomplete/autocomplete.py
|
||||
/chalicelib/core/boarding.py
|
||||
/chalicelib/core/canvas.py
|
||||
/chalicelib/core/collaborations/*
|
||||
/chalicelib/core/collaborations/__init__.py
|
||||
/chalicelib/core/collaborations/collaboration_base.py
|
||||
/chalicelib/core/collaborations/collaboration_msteams.py
|
||||
/chalicelib/core/collaborations/collaboration_slack.py
|
||||
/chalicelib/core/countries.py
|
||||
/chalicelib/core/metrics/custom_metrics.py
|
||||
/chalicelib/core/metrics/dashboards.py
|
||||
|
|
@ -212,15 +215,15 @@ Pipfile.lock
|
|||
/chalicelib/core/saved_search.py
|
||||
/chalicelib/core/sessions/sessions.py
|
||||
/chalicelib/core/sessions/sessions_ch.py
|
||||
/chalicelib/core/sessions/sessions_devtool.py
|
||||
/chalicelib/core/sessions/sessions_favorite.py
|
||||
/chalicelib/core/sessions/sessions_devtool/sessions_devtool.py
|
||||
/chalicelib/core/sessions/sessions_favorite/sessions_favorite.py
|
||||
/chalicelib/core/sessions/sessions_assignments.py
|
||||
/chalicelib/core/sessions/sessions_metas.py
|
||||
/chalicelib/core/sessions/sessions_mobs.py
|
||||
/chalicelib/core/sessions/sessions_replay.py
|
||||
/chalicelib/core/sessions/sessions_search.py
|
||||
/chalicelib/core/sessions/performance_event.py
|
||||
/chalicelib/core/sessions/sessions_viewed.py
|
||||
/chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||
/chalicelib/core/sessions/unprocessed_sessions.py
|
||||
/chalicelib/core/metrics/modules
|
||||
/chalicelib/core/socket_ios.py
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@ from decouple import config
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from . import errors as errors_legacy
|
||||
|
||||
if config("EXP_ERRORS_SEARCH", cast=bool, default=False):
|
||||
logger.info(">>> Using experimental error search")
|
||||
from . import errors as errors_legacy
|
||||
from . import errors_ch as errors
|
||||
from . import errors_details_exp as errors_details
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -12,7 +12,3 @@ if config("EXP_SESSIONS_SEARCH", cast=bool, default=False):
|
|||
else:
|
||||
from . import sessions
|
||||
from . import sessions_search_exp
|
||||
|
||||
from chalicelib.core.sessions import sessions_devtool_ee as sessions_devtool
|
||||
from chalicelib.core.sessions import sessions_viewed_ee as sessions_viewed
|
||||
from chalicelib.core.sessions import sessions_favorite_ee as sessions_favorite
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
from .sessions_devtool import *
|
||||
|
||||
from .sessions_devtool_ee import get_urls
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
from fastapi.security import SecurityScopes
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import permissions
|
||||
from chalicelib.core.sessions.sessions_devtool import *
|
||||
from .sessions_devtool import get_urls as _get_urls
|
||||
|
||||
_get_urls = get_urls
|
||||
SCOPES = SecurityScopes([schemas.Permissions.DEV_TOOLS])
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
from .sessions_favorite import *
|
||||
from .sessions_favorite_ee import *
|
||||
|
|
@ -2,15 +2,15 @@ import logging
|
|||
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
import schemas
|
||||
from chalicelib.core.sessions import sessions_mobs, sessions_devtool
|
||||
from chalicelib.core.sessions.sessions_favorite import *
|
||||
from .sessions_favorite import add_favorite_session as _add_favorite_session, \
|
||||
remove_favorite_session as _remove_favorite_session, \
|
||||
favorite_session_exists
|
||||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
from chalicelib.utils.storage import extra
|
||||
|
||||
_add_favorite_session = add_favorite_session
|
||||
_remove_favorite_session = remove_favorite_session
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def add_favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
|
|
@ -32,7 +32,7 @@ def remove_favorite_session(context: schemas.CurrentContext, project_id, session
|
|||
def favorite_session(context: schemas.CurrentContext, project_id, session_id):
|
||||
keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id)
|
||||
keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions
|
||||
keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id)
|
||||
keys += sessions_devtool.get_devtools_keys(project_id=project_id, session_id=session_id)
|
||||
|
||||
if favorite_session_exists(user_id=context.user_id, session_id=session_id):
|
||||
tag = config('RETENTION_D_VALUE', default='default')
|
||||
|
|
@ -64,7 +64,7 @@ def add_favorite_session_to_ch(project_id, user_id, session_id, sign=1):
|
|||
query = f"""INSERT INTO {exp_ch_helper.get_user_favorite_sessions_table()}(project_id,user_id, session_id, sign)
|
||||
VALUES (%(project_id)s,%(userId)s,%(sessionId)s,%(sign)s);"""
|
||||
params = {"userId": user_id, "sessionId": session_id, "project_id": project_id, "sign": sign}
|
||||
cur.execute(query=query, params=params)
|
||||
cur.execute(query=query, parameters=params)
|
||||
|
||||
except Exception as err:
|
||||
logger.error("------- Exception while adding favorite session to CH")
|
||||
|
|
@ -31,6 +31,7 @@ def get_note(tenant_id, project_id, user_id, note_id, share=None):
|
|||
row = helper.dict_to_camel_case(row)
|
||||
if row:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
row["updatedAt"] = TimeUTC.datetime_to_timestamp(row["updatedAt"])
|
||||
return row
|
||||
|
||||
|
||||
|
|
@ -57,42 +58,75 @@ def get_session_notes(tenant_id, project_id, session_id, user_id):
|
|||
|
||||
def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.SearchNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
conditions = ["sessions_notes.project_id = %(project_id)s", "sessions_notes.deleted_at IS NULL"]
|
||||
extra_params = {}
|
||||
if data.tags and len(data.tags) > 0:
|
||||
k = "tag_value"
|
||||
# base conditions
|
||||
conditions = [
|
||||
"sessions_notes.project_id = %(project_id)s",
|
||||
"sessions_notes.deleted_at IS NULL",
|
||||
"users.tenant_id = %(tenant_id)s"
|
||||
]
|
||||
params = {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id}
|
||||
|
||||
# tag conditions
|
||||
if data.tags:
|
||||
tag_key = "tag_value"
|
||||
conditions.append(
|
||||
sh.multi_conditions(f"%({k})s = sessions_notes.tag", data.tags, value_key=k))
|
||||
extra_params = sh.multi_values(data.tags, value_key=k)
|
||||
sh.multi_conditions(f"%({tag_key})s = sessions_notes.tag", data.tags, value_key=tag_key)
|
||||
)
|
||||
params.update(sh.multi_values(data.tags, value_key=tag_key))
|
||||
|
||||
# filter by ownership or shared status
|
||||
if data.shared_only:
|
||||
conditions.append("sessions_notes.is_public AND users.tenant_id = %(tenant_id)s")
|
||||
conditions.append("sessions_notes.is_public IS TRUE")
|
||||
elif data.mine_only:
|
||||
conditions.append("sessions_notes.user_id = %(user_id)s")
|
||||
else:
|
||||
conditions.append(
|
||||
"(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public AND users.tenant_id = %(tenant_id)s)")
|
||||
query = cur.mogrify(f"""SELECT COUNT(1) OVER () AS full_count, sessions_notes.*, users.name AS user_name
|
||||
FROM sessions_notes INNER JOIN users USING (user_id)
|
||||
WHERE {" AND ".join(conditions)}
|
||||
ORDER BY created_at {data.order}
|
||||
LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""",
|
||||
{"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params})
|
||||
conditions.append("(sessions_notes.user_id = %(user_id)s OR sessions_notes.is_public)")
|
||||
|
||||
# search condition
|
||||
if data.search:
|
||||
conditions.append("sessions_notes.message ILIKE %(search)s")
|
||||
params["search"] = f"%{data.search}%"
|
||||
|
||||
query = f"""
|
||||
SELECT
|
||||
COUNT(1) OVER () AS full_count,
|
||||
sessions_notes.*,
|
||||
users.name AS user_name
|
||||
FROM
|
||||
sessions_notes
|
||||
INNER JOIN
|
||||
users USING (user_id)
|
||||
WHERE
|
||||
{" AND ".join(conditions)}
|
||||
ORDER BY
|
||||
created_at {data.order}
|
||||
LIMIT
|
||||
%(limit)s OFFSET %(offset)s;
|
||||
"""
|
||||
params.update({
|
||||
"limit": data.limit,
|
||||
"offset": data.limit * (data.page - 1)
|
||||
})
|
||||
|
||||
query = cur.mogrify(query, params)
|
||||
logger.debug(query)
|
||||
cur.execute(query=query)
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
|
||||
result = {"count": 0, "notes": helper.list_to_camel_case(rows)}
|
||||
if len(rows) > 0:
|
||||
if rows:
|
||||
result["count"] = rows[0]["fullCount"]
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
row.pop("fullCount")
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
row.pop("fullCount")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNoteSchema):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public)
|
||||
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s)
|
||||
query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public, thumbnail, start_at, end_at)
|
||||
VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s, %(thumbnail)s, %(start_at)s, %(end_at)s)
|
||||
RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""",
|
||||
{"user_id": user_id, "project_id": project_id, "session_id": session_id,
|
||||
**data.model_dump(),
|
||||
|
|
@ -114,6 +148,9 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot
|
|||
sub_query.append("is_public = %(is_public)s")
|
||||
if data.timestamp is not None:
|
||||
sub_query.append("timestamp = %(timestamp)s")
|
||||
|
||||
sub_query.append("updated_at = timezone('utc'::text, now())")
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""UPDATE public.sessions_notes
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import logging
|
|||
|
||||
import schemas
|
||||
from chalicelib.core import metadata, projects
|
||||
from chalicelib.core.sessions import sessions_favorite, sessions_legacy, sessions, sessions_legacy_mobil
|
||||
from . import sessions_favorite, sessions as sessions_legacy, sessions_ch as sessions, sessions_legacy_mobil
|
||||
from chalicelib.utils import pg_client, helper, ch_client, exp_ch_helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
from .sessions_viewed import *
|
||||
from .sessions_viewed_ee import *
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
import logging
|
||||
from decouple import config
|
||||
from chalicelib.core.sessions.sessions_viewed import *
|
||||
|
||||
_view_session = view_session
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.utils import ch_client, exp_ch_helper
|
||||
from .sessions_viewed import view_session as _view_session
|
||||
|
||||
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
|
||||
|
||||
|
|
@ -15,7 +15,7 @@ def view_session(project_id, user_id, session_id):
|
|||
query = f"""INSERT INTO {exp_ch_helper.get_user_viewed_sessions_table()}(project_id, user_id, session_id)
|
||||
VALUES (%(project_id)s,%(userId)s,%(sessionId)s);"""
|
||||
params = {"userId": user_id, "sessionId": session_id, "project_id": project_id}
|
||||
cur.execute(query=query, params=params)
|
||||
cur.execute(query=query, parameters=params)
|
||||
except Exception as err:
|
||||
logging.error("------- Exception while adding viewed session to CH")
|
||||
logging.error(err)
|
||||
|
|
@ -8,8 +8,8 @@ from pydantic import BaseModel
|
|||
from starlette import status
|
||||
|
||||
import schemas
|
||||
from chalicelib.core import authorizers, metadata
|
||||
from chalicelib.core import tenants, roles, spot, scope
|
||||
from chalicelib.core import authorizers
|
||||
from chalicelib.core import tenants, roles, spot
|
||||
from chalicelib.utils import email_helper
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
|
|
|
|||
|
|
@ -10,7 +10,10 @@ rm -rf ./chalicelib/core/announcements.py
|
|||
rm -rf ./chalicelib/core/assist.py
|
||||
rm -rf ./chalicelib/core/authorizers.py
|
||||
rm -rf ./chalicelib/core/autocomplete/autocomplete.py
|
||||
rm -rf ./chalicelib/core/collaborations
|
||||
rm -rf ./chalicelib/core/collaborations/__init__.py
|
||||
rm -rf ./chalicelib/core/collaborations/collaboration_base.py
|
||||
rm -rf ./chalicelib/core/collaborations/collaboration_msteams.py
|
||||
rm -rf ./chalicelib/core/collaborations/collaboration_slack.py
|
||||
rm -rf ./chalicelib/core/countries.py
|
||||
rm -rf ./chalicelib/core/metrics/custom_metrics.py
|
||||
rm -rf ./chalicelib/core/metrics/funnels.py
|
||||
|
|
@ -33,15 +36,15 @@ rm -rf ./chalicelib/core/mobile.py
|
|||
rm -rf ./chalicelib/core/saved_search.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_ch.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_devtool.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_favorite.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_devtool/sessions_devtool.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_favorite/sessions_favorite.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_assignments.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_metas.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_mobs.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_replay.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_search.py
|
||||
rm -rf ./chalicelib/core/sessions/performance_event.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_viewed.py
|
||||
rm -rf ./chalicelib/core/sessions/sessions_viewed/sessions_viewed.py
|
||||
rm -rf ./chalicelib/core/sessions/unprocessed_sessions.py
|
||||
rm -rf ./chalicelib/core/metrics/modules
|
||||
rm -rf ./chalicelib/core/socket_ios.py
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ from chalicelib.core import scope
|
|||
from chalicelib.core import tenants, users, projects, license
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core.collaborations.collaboration_slack import Slack
|
||||
from chalicelib.core.errors import errors
|
||||
from chalicelib.core.errors import errors, errors_details
|
||||
from chalicelib.core.metrics import heatmaps
|
||||
from chalicelib.core.sessions import sessions, sessions_notes, sessions_replay, sessions_favorite, sessions_assignments, \
|
||||
sessions_viewed, unprocessed_sessions, sessions_search
|
||||
|
|
@ -354,8 +354,8 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
|
|||
dependencies=[OR_scope(Permissions.DEV_TOOLS, ServicePermissions.DEV_TOOLS)])
|
||||
def errors_get_details(projectId: int, errorId: str, density24: int = 24, density30: int = 30,
|
||||
context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
|
||||
**{"density24": density24, "density30": density30})
|
||||
data = errors_details.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
|
||||
**{"density24": density24, "density30": density30})
|
||||
return data
|
||||
|
||||
|
||||
|
|
@ -503,6 +503,18 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str,
|
|||
}
|
||||
|
||||
|
||||
@app.get('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
|
||||
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
|
||||
def get_note_by_id(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
|
||||
data = sessions_notes.get_note(tenant_id=context.tenant_id, project_id=projectId, note_id=noteId,
|
||||
user_id=context.user_id)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"],
|
||||
dependencies=[OR_scope(Permissions.SESSION_REPLAY)])
|
||||
def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
|
||||
|
|
|
|||
|
|
@ -35,13 +35,6 @@ class SessionStart(Message):
|
|||
self.user_id = user_id
|
||||
|
||||
|
||||
class SessionEndDeprecated(Message):
|
||||
__id__ = 3
|
||||
|
||||
def __init__(self, timestamp):
|
||||
self.timestamp = timestamp
|
||||
|
||||
|
||||
class SetPageLocationDeprecated(Message):
|
||||
__id__ = 4
|
||||
|
||||
|
|
@ -231,15 +224,6 @@ class PageRenderTiming(Message):
|
|||
self.time_to_interactive = time_to_interactive
|
||||
|
||||
|
||||
class JSExceptionDeprecated(Message):
|
||||
__id__ = 25
|
||||
|
||||
def __init__(self, name, message, payload):
|
||||
self.name = name
|
||||
self.message = message
|
||||
self.payload = payload
|
||||
|
||||
|
||||
class IntegrationEvent(Message):
|
||||
__id__ = 26
|
||||
|
||||
|
|
@ -339,34 +323,21 @@ class PageEvent(Message):
|
|||
self.web_vitals = web_vitals
|
||||
|
||||
|
||||
class CSSInsertRule(Message):
|
||||
__id__ = 37
|
||||
class StringDictGlobal(Message):
|
||||
__id__ = 34
|
||||
|
||||
def __init__(self, id, rule, index):
|
||||
def __init__(self, key, value):
|
||||
self.key = key
|
||||
self.value = value
|
||||
|
||||
|
||||
class SetNodeAttributeDictGlobal(Message):
|
||||
__id__ = 35
|
||||
|
||||
def __init__(self, id, name, value):
|
||||
self.id = id
|
||||
self.rule = rule
|
||||
self.index = index
|
||||
|
||||
|
||||
class CSSDeleteRule(Message):
|
||||
__id__ = 38
|
||||
|
||||
def __init__(self, id, index):
|
||||
self.id = id
|
||||
self.index = index
|
||||
|
||||
|
||||
class Fetch(Message):
|
||||
__id__ = 39
|
||||
|
||||
def __init__(self, method, url, request, response, status, timestamp, duration):
|
||||
self.method = method
|
||||
self.url = url
|
||||
self.request = request
|
||||
self.response = response
|
||||
self.status = status
|
||||
self.timestamp = timestamp
|
||||
self.duration = duration
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
|
||||
class Profiler(Message):
|
||||
|
|
@ -549,19 +520,6 @@ class SetNodeFocus(Message):
|
|||
self.id = id
|
||||
|
||||
|
||||
class LongTask(Message):
|
||||
__id__ = 59
|
||||
|
||||
def __init__(self, timestamp, duration, context, container_type, container_src, container_id, container_name):
|
||||
self.timestamp = timestamp
|
||||
self.duration = duration
|
||||
self.context = context
|
||||
self.container_type = container_type
|
||||
self.container_src = container_src
|
||||
self.container_id = container_id
|
||||
self.container_name = container_name
|
||||
|
||||
|
||||
class SetNodeAttributeURLBased(Message):
|
||||
__id__ = 60
|
||||
|
||||
|
|
@ -581,18 +539,6 @@ class SetCSSDataURLBased(Message):
|
|||
self.base_url = base_url
|
||||
|
||||
|
||||
class IssueEventDeprecated(Message):
|
||||
__id__ = 62
|
||||
|
||||
def __init__(self, message_id, timestamp, type, context_string, context, payload):
|
||||
self.message_id = message_id
|
||||
self.timestamp = timestamp
|
||||
self.type = type
|
||||
self.context_string = context_string
|
||||
self.context = context
|
||||
self.payload = payload
|
||||
|
||||
|
||||
class TechnicalInfo(Message):
|
||||
__id__ = 63
|
||||
|
||||
|
|
@ -616,16 +562,6 @@ class AssetCache(Message):
|
|||
self.url = url
|
||||
|
||||
|
||||
class CSSInsertRuleURLBased(Message):
|
||||
__id__ = 67
|
||||
|
||||
def __init__(self, id, rule, index, base_url):
|
||||
self.id = id
|
||||
self.rule = rule
|
||||
self.index = index
|
||||
self.base_url = base_url
|
||||
|
||||
|
||||
class MouseClick(Message):
|
||||
__id__ = 68
|
||||
|
||||
|
|
@ -734,15 +670,6 @@ class Zustand(Message):
|
|||
self.state = state
|
||||
|
||||
|
||||
class BatchMeta(Message):
|
||||
__id__ = 80
|
||||
|
||||
def __init__(self, page_no, first_index, timestamp):
|
||||
self.page_no = page_no
|
||||
self.first_index = first_index
|
||||
self.timestamp = timestamp
|
||||
|
||||
|
||||
class BatchMetadata(Message):
|
||||
__id__ = 81
|
||||
|
||||
|
|
|
|||
|
|
@ -58,15 +58,6 @@ cdef class SessionStart(PyMessage):
|
|||
self.user_id = user_id
|
||||
|
||||
|
||||
cdef class SessionEndDeprecated(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long timestamp
|
||||
|
||||
def __init__(self, unsigned long timestamp):
|
||||
self.__id__ = 3
|
||||
self.timestamp = timestamp
|
||||
|
||||
|
||||
cdef class SetPageLocationDeprecated(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public str url
|
||||
|
|
@ -340,19 +331,6 @@ cdef class PageRenderTiming(PyMessage):
|
|||
self.time_to_interactive = time_to_interactive
|
||||
|
||||
|
||||
cdef class JSExceptionDeprecated(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public str name
|
||||
cdef public str message
|
||||
cdef public str payload
|
||||
|
||||
def __init__(self, str name, str message, str payload):
|
||||
self.__id__ = 25
|
||||
self.name = name
|
||||
self.message = message
|
||||
self.payload = payload
|
||||
|
||||
|
||||
cdef class IntegrationEvent(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long timestamp
|
||||
|
|
@ -511,49 +489,28 @@ cdef class PageEvent(PyMessage):
|
|||
self.web_vitals = web_vitals
|
||||
|
||||
|
||||
cdef class CSSInsertRule(PyMessage):
|
||||
cdef class StringDictGlobal(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long key
|
||||
cdef public str value
|
||||
|
||||
def __init__(self, unsigned long key, str value):
|
||||
self.__id__ = 34
|
||||
self.key = key
|
||||
self.value = value
|
||||
|
||||
|
||||
cdef class SetNodeAttributeDictGlobal(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long id
|
||||
cdef public str rule
|
||||
cdef public unsigned long index
|
||||
cdef public unsigned long name
|
||||
cdef public unsigned long value
|
||||
|
||||
def __init__(self, unsigned long id, str rule, unsigned long index):
|
||||
self.__id__ = 37
|
||||
def __init__(self, unsigned long id, unsigned long name, unsigned long value):
|
||||
self.__id__ = 35
|
||||
self.id = id
|
||||
self.rule = rule
|
||||
self.index = index
|
||||
|
||||
|
||||
cdef class CSSDeleteRule(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long id
|
||||
cdef public unsigned long index
|
||||
|
||||
def __init__(self, unsigned long id, unsigned long index):
|
||||
self.__id__ = 38
|
||||
self.id = id
|
||||
self.index = index
|
||||
|
||||
|
||||
cdef class Fetch(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public str method
|
||||
cdef public str url
|
||||
cdef public str request
|
||||
cdef public str response
|
||||
cdef public unsigned long status
|
||||
cdef public unsigned long timestamp
|
||||
cdef public unsigned long duration
|
||||
|
||||
def __init__(self, str method, str url, str request, str response, unsigned long status, unsigned long timestamp, unsigned long duration):
|
||||
self.__id__ = 39
|
||||
self.method = method
|
||||
self.url = url
|
||||
self.request = request
|
||||
self.response = response
|
||||
self.status = status
|
||||
self.timestamp = timestamp
|
||||
self.duration = duration
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
|
||||
cdef class Profiler(PyMessage):
|
||||
|
|
@ -821,27 +778,6 @@ cdef class SetNodeFocus(PyMessage):
|
|||
self.id = id
|
||||
|
||||
|
||||
cdef class LongTask(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long timestamp
|
||||
cdef public unsigned long duration
|
||||
cdef public unsigned long context
|
||||
cdef public unsigned long container_type
|
||||
cdef public str container_src
|
||||
cdef public str container_id
|
||||
cdef public str container_name
|
||||
|
||||
def __init__(self, unsigned long timestamp, unsigned long duration, unsigned long context, unsigned long container_type, str container_src, str container_id, str container_name):
|
||||
self.__id__ = 59
|
||||
self.timestamp = timestamp
|
||||
self.duration = duration
|
||||
self.context = context
|
||||
self.container_type = container_type
|
||||
self.container_src = container_src
|
||||
self.container_id = container_id
|
||||
self.container_name = container_name
|
||||
|
||||
|
||||
cdef class SetNodeAttributeURLBased(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long id
|
||||
|
|
@ -870,25 +806,6 @@ cdef class SetCSSDataURLBased(PyMessage):
|
|||
self.base_url = base_url
|
||||
|
||||
|
||||
cdef class IssueEventDeprecated(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long message_id
|
||||
cdef public unsigned long timestamp
|
||||
cdef public str type
|
||||
cdef public str context_string
|
||||
cdef public str context
|
||||
cdef public str payload
|
||||
|
||||
def __init__(self, unsigned long message_id, unsigned long timestamp, str type, str context_string, str context, str payload):
|
||||
self.__id__ = 62
|
||||
self.message_id = message_id
|
||||
self.timestamp = timestamp
|
||||
self.type = type
|
||||
self.context_string = context_string
|
||||
self.context = context
|
||||
self.payload = payload
|
||||
|
||||
|
||||
cdef class TechnicalInfo(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public str type
|
||||
|
|
@ -920,21 +837,6 @@ cdef class AssetCache(PyMessage):
|
|||
self.url = url
|
||||
|
||||
|
||||
cdef class CSSInsertRuleURLBased(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long id
|
||||
cdef public str rule
|
||||
cdef public unsigned long index
|
||||
cdef public str base_url
|
||||
|
||||
def __init__(self, unsigned long id, str rule, unsigned long index, str base_url):
|
||||
self.__id__ = 67
|
||||
self.id = id
|
||||
self.rule = rule
|
||||
self.index = index
|
||||
self.base_url = base_url
|
||||
|
||||
|
||||
cdef class MouseClick(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long id
|
||||
|
|
@ -1091,19 +993,6 @@ cdef class Zustand(PyMessage):
|
|||
self.state = state
|
||||
|
||||
|
||||
cdef class BatchMeta(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long page_no
|
||||
cdef public unsigned long first_index
|
||||
cdef public long timestamp
|
||||
|
||||
def __init__(self, unsigned long page_no, unsigned long first_index, long timestamp):
|
||||
self.__id__ = 80
|
||||
self.page_no = page_no
|
||||
self.first_index = first_index
|
||||
self.timestamp = timestamp
|
||||
|
||||
|
||||
cdef class BatchMetadata(PyMessage):
|
||||
cdef public int __id__
|
||||
cdef public unsigned long version
|
||||
|
|
|
|||
|
|
@ -118,11 +118,6 @@ class MessageCodec(Codec):
|
|||
user_id=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 3:
|
||||
return SessionEndDeprecated(
|
||||
timestamp=self.read_uint(reader)
|
||||
)
|
||||
|
||||
if message_id == 4:
|
||||
return SetPageLocationDeprecated(
|
||||
url=self.read_string(reader),
|
||||
|
|
@ -270,13 +265,6 @@ class MessageCodec(Codec):
|
|||
time_to_interactive=self.read_uint(reader)
|
||||
)
|
||||
|
||||
if message_id == 25:
|
||||
return JSExceptionDeprecated(
|
||||
name=self.read_string(reader),
|
||||
message=self.read_string(reader),
|
||||
payload=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 26:
|
||||
return IntegrationEvent(
|
||||
timestamp=self.read_uint(reader),
|
||||
|
|
@ -360,28 +348,17 @@ class MessageCodec(Codec):
|
|||
web_vitals=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 37:
|
||||
return CSSInsertRule(
|
||||
id=self.read_uint(reader),
|
||||
rule=self.read_string(reader),
|
||||
index=self.read_uint(reader)
|
||||
if message_id == 34:
|
||||
return StringDictGlobal(
|
||||
key=self.read_uint(reader),
|
||||
value=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 38:
|
||||
return CSSDeleteRule(
|
||||
if message_id == 35:
|
||||
return SetNodeAttributeDictGlobal(
|
||||
id=self.read_uint(reader),
|
||||
index=self.read_uint(reader)
|
||||
)
|
||||
|
||||
if message_id == 39:
|
||||
return Fetch(
|
||||
method=self.read_string(reader),
|
||||
url=self.read_string(reader),
|
||||
request=self.read_string(reader),
|
||||
response=self.read_string(reader),
|
||||
status=self.read_uint(reader),
|
||||
timestamp=self.read_uint(reader),
|
||||
duration=self.read_uint(reader)
|
||||
name=self.read_uint(reader),
|
||||
value=self.read_uint(reader)
|
||||
)
|
||||
|
||||
if message_id == 40:
|
||||
|
|
@ -526,17 +503,6 @@ class MessageCodec(Codec):
|
|||
id=self.read_int(reader)
|
||||
)
|
||||
|
||||
if message_id == 59:
|
||||
return LongTask(
|
||||
timestamp=self.read_uint(reader),
|
||||
duration=self.read_uint(reader),
|
||||
context=self.read_uint(reader),
|
||||
container_type=self.read_uint(reader),
|
||||
container_src=self.read_string(reader),
|
||||
container_id=self.read_string(reader),
|
||||
container_name=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 60:
|
||||
return SetNodeAttributeURLBased(
|
||||
id=self.read_uint(reader),
|
||||
|
|
@ -552,16 +518,6 @@ class MessageCodec(Codec):
|
|||
base_url=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 62:
|
||||
return IssueEventDeprecated(
|
||||
message_id=self.read_uint(reader),
|
||||
timestamp=self.read_uint(reader),
|
||||
type=self.read_string(reader),
|
||||
context_string=self.read_string(reader),
|
||||
context=self.read_string(reader),
|
||||
payload=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 63:
|
||||
return TechnicalInfo(
|
||||
type=self.read_string(reader),
|
||||
|
|
@ -579,14 +535,6 @@ class MessageCodec(Codec):
|
|||
url=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 67:
|
||||
return CSSInsertRuleURLBased(
|
||||
id=self.read_uint(reader),
|
||||
rule=self.read_string(reader),
|
||||
index=self.read_uint(reader),
|
||||
base_url=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 68:
|
||||
return MouseClick(
|
||||
id=self.read_uint(reader),
|
||||
|
|
@ -671,13 +619,6 @@ class MessageCodec(Codec):
|
|||
state=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 80:
|
||||
return BatchMeta(
|
||||
page_no=self.read_uint(reader),
|
||||
first_index=self.read_uint(reader),
|
||||
timestamp=self.read_int(reader)
|
||||
)
|
||||
|
||||
if message_id == 81:
|
||||
return BatchMetadata(
|
||||
version=self.read_uint(reader),
|
||||
|
|
|
|||
|
|
@ -216,11 +216,6 @@ cdef class MessageCodec:
|
|||
user_id=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 3:
|
||||
return SessionEndDeprecated(
|
||||
timestamp=self.read_uint(reader)
|
||||
)
|
||||
|
||||
if message_id == 4:
|
||||
return SetPageLocationDeprecated(
|
||||
url=self.read_string(reader),
|
||||
|
|
@ -368,13 +363,6 @@ cdef class MessageCodec:
|
|||
time_to_interactive=self.read_uint(reader)
|
||||
)
|
||||
|
||||
if message_id == 25:
|
||||
return JSExceptionDeprecated(
|
||||
name=self.read_string(reader),
|
||||
message=self.read_string(reader),
|
||||
payload=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 26:
|
||||
return IntegrationEvent(
|
||||
timestamp=self.read_uint(reader),
|
||||
|
|
@ -458,28 +446,17 @@ cdef class MessageCodec:
|
|||
web_vitals=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 37:
|
||||
return CSSInsertRule(
|
||||
id=self.read_uint(reader),
|
||||
rule=self.read_string(reader),
|
||||
index=self.read_uint(reader)
|
||||
if message_id == 34:
|
||||
return StringDictGlobal(
|
||||
key=self.read_uint(reader),
|
||||
value=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 38:
|
||||
return CSSDeleteRule(
|
||||
if message_id == 35:
|
||||
return SetNodeAttributeDictGlobal(
|
||||
id=self.read_uint(reader),
|
||||
index=self.read_uint(reader)
|
||||
)
|
||||
|
||||
if message_id == 39:
|
||||
return Fetch(
|
||||
method=self.read_string(reader),
|
||||
url=self.read_string(reader),
|
||||
request=self.read_string(reader),
|
||||
response=self.read_string(reader),
|
||||
status=self.read_uint(reader),
|
||||
timestamp=self.read_uint(reader),
|
||||
duration=self.read_uint(reader)
|
||||
name=self.read_uint(reader),
|
||||
value=self.read_uint(reader)
|
||||
)
|
||||
|
||||
if message_id == 40:
|
||||
|
|
@ -624,17 +601,6 @@ cdef class MessageCodec:
|
|||
id=self.read_int(reader)
|
||||
)
|
||||
|
||||
if message_id == 59:
|
||||
return LongTask(
|
||||
timestamp=self.read_uint(reader),
|
||||
duration=self.read_uint(reader),
|
||||
context=self.read_uint(reader),
|
||||
container_type=self.read_uint(reader),
|
||||
container_src=self.read_string(reader),
|
||||
container_id=self.read_string(reader),
|
||||
container_name=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 60:
|
||||
return SetNodeAttributeURLBased(
|
||||
id=self.read_uint(reader),
|
||||
|
|
@ -650,16 +616,6 @@ cdef class MessageCodec:
|
|||
base_url=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 62:
|
||||
return IssueEventDeprecated(
|
||||
message_id=self.read_uint(reader),
|
||||
timestamp=self.read_uint(reader),
|
||||
type=self.read_string(reader),
|
||||
context_string=self.read_string(reader),
|
||||
context=self.read_string(reader),
|
||||
payload=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 63:
|
||||
return TechnicalInfo(
|
||||
type=self.read_string(reader),
|
||||
|
|
@ -677,14 +633,6 @@ cdef class MessageCodec:
|
|||
url=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 67:
|
||||
return CSSInsertRuleURLBased(
|
||||
id=self.read_uint(reader),
|
||||
rule=self.read_string(reader),
|
||||
index=self.read_uint(reader),
|
||||
base_url=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 68:
|
||||
return MouseClick(
|
||||
id=self.read_uint(reader),
|
||||
|
|
@ -769,13 +717,6 @@ cdef class MessageCodec:
|
|||
state=self.read_string(reader)
|
||||
)
|
||||
|
||||
if message_id == 80:
|
||||
return BatchMeta(
|
||||
page_no=self.read_uint(reader),
|
||||
first_index=self.read_uint(reader),
|
||||
timestamp=self.read_int(reader)
|
||||
)
|
||||
|
||||
if message_id == 81:
|
||||
return BatchMetadata(
|
||||
version=self.read_uint(reader),
|
||||
|
|
|
|||
|
|
@ -937,7 +937,11 @@ CREATE TABLE public.sessions_notes
|
|||
session_id bigint NOT NULL REFERENCES public.sessions (session_id) ON DELETE CASCADE,
|
||||
project_id integer NOT NULL REFERENCES public.projects (project_id) ON DELETE CASCADE,
|
||||
timestamp integer NOT NULL DEFAULT -1,
|
||||
is_public boolean NOT NULL DEFAULT FALSE
|
||||
is_public boolean NOT NULL DEFAULT FALSE,
|
||||
thumbnail text NULL,
|
||||
updated_at timestamp without time zone NULL DEFAULT NULL,
|
||||
start_at integer NULL,
|
||||
end_at integer NULL
|
||||
);
|
||||
|
||||
CREATE TABLE public.errors_tags
|
||||
|
|
@ -1327,4 +1331,4 @@ CREATE TABLE IF NOT EXISTS public.session_integrations
|
|||
PRIMARY KEY (session_id, project_id, provider)
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
COMMIT;
|
||||
|
|
|
|||
6
frontend/.gitignore
vendored
6
frontend/.gitignore
vendored
|
|
@ -5,7 +5,9 @@ public/
|
|||
drafts
|
||||
app/components/ui/SVG.js
|
||||
*.DS_Store
|
||||
.env
|
||||
.env*
|
||||
!.env.sample
|
||||
logs
|
||||
*css.d.ts
|
||||
*.cache
|
||||
.yarn/*
|
||||
|
|
@ -19,4 +21,4 @@ cypress.env.json
|
|||
**/__diff_output__/*
|
||||
*.diff.png
|
||||
cypress/videos/
|
||||
coverage
|
||||
coverage
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -4,4 +4,4 @@ enableGlobalCache: true
|
|||
|
||||
nodeLinker: pnpm
|
||||
|
||||
yarnPath: .yarn/releases/yarn-4.6.0.cjs
|
||||
yarnPath: .yarn/releases/yarn-4.7.0.cjs
|
||||
|
|
|
|||
|
|
@ -2,13 +2,14 @@ import withSiteIdUpdater from 'HOCs/withSiteIdUpdater';
|
|||
import React, { Suspense, lazy } from 'react';
|
||||
import { Redirect, Route, Switch } from 'react-router-dom';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { useStore } from './mstore';
|
||||
import { GLOBAL_HAS_NO_RECORDINGS } from 'App/constants/storageKeys';
|
||||
import { OB_DEFAULT_TAB } from 'App/routes';
|
||||
import { Loader } from 'UI';
|
||||
import { useStore } from './mstore';
|
||||
|
||||
import APIClient from './api_client';
|
||||
import * as routes from './routes';
|
||||
import { debounce } from '@/utils';
|
||||
|
||||
const components: any = {
|
||||
SessionPure: lazy(() => import('Components/Session/Session')),
|
||||
|
|
@ -31,7 +32,7 @@ const components: any = {
|
|||
SpotsListPure: lazy(() => import('Components/Spots/SpotsList')),
|
||||
SpotPure: lazy(() => import('Components/Spots/SpotPlayer')),
|
||||
ScopeSetup: lazy(() => import('Components/ScopeForm')),
|
||||
HighlightsPure: lazy(() => import('Components/Highlights/HighlightsList')),
|
||||
HighlightsPure: lazy(() => import('Components/Highlights/HighlightsList'))
|
||||
};
|
||||
|
||||
const enhancedComponents: any = {
|
||||
|
|
@ -51,7 +52,7 @@ const enhancedComponents: any = {
|
|||
SpotsList: withSiteIdUpdater(components.SpotsListPure),
|
||||
Spot: components.SpotPure,
|
||||
ScopeSetup: components.ScopeSetup,
|
||||
Highlights: withSiteIdUpdater(components.HighlightsPure),
|
||||
Highlights: withSiteIdUpdater(components.HighlightsPure)
|
||||
};
|
||||
|
||||
const { withSiteId } = routes;
|
||||
|
|
@ -97,10 +98,11 @@ const SPOT_PATH = routes.spot();
|
|||
const SCOPE_SETUP = routes.scopeSetup();
|
||||
|
||||
const HIGHLIGHTS_PATH = routes.highlights();
|
||||
let debounceSearch: any = () => {};
|
||||
|
||||
function PrivateRoutes() {
|
||||
const { projectsStore, userStore, integrationsStore } = useStore();
|
||||
const { onboarding } = userStore;
|
||||
const { projectsStore, userStore, integrationsStore, searchStore } = useStore();
|
||||
const onboarding = userStore.onboarding;
|
||||
const scope = userStore.scopeState;
|
||||
const { tenantId } = userStore.account;
|
||||
const sites = projectsStore.list;
|
||||
|
|
@ -120,6 +122,16 @@ function PrivateRoutes() {
|
|||
void integrationsStore.integrations.fetchIntegrations(siteId);
|
||||
}
|
||||
}, [siteId]);
|
||||
|
||||
React.useEffect(() => {
|
||||
debounceSearch = debounce(() => searchStore.fetchSessions(), 500);
|
||||
}, []);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!searchStore.urlParsed) return;
|
||||
debounceSearch();
|
||||
}, [searchStore.instance.filters, searchStore.instance.eventsOrder]);
|
||||
|
||||
return (
|
||||
<Suspense fallback={<Loader loading className="flex-1" />}>
|
||||
<Switch key="content">
|
||||
|
|
@ -156,13 +168,13 @@ function PrivateRoutes() {
|
|||
case '/integrations/slack':
|
||||
client.post('integrations/slack/add', {
|
||||
code: location.search.split('=')[1],
|
||||
state: tenantId,
|
||||
state: tenantId
|
||||
});
|
||||
break;
|
||||
case '/integrations/msteams':
|
||||
client.post('integrations/msteams/add', {
|
||||
code: location.search.split('=')[1],
|
||||
state: tenantId,
|
||||
state: tenantId
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
|
@ -187,7 +199,7 @@ function PrivateRoutes() {
|
|||
withSiteId(DASHBOARD_PATH, siteIdList),
|
||||
withSiteId(DASHBOARD_SELECT_PATH, siteIdList),
|
||||
withSiteId(DASHBOARD_METRIC_CREATE_PATH, siteIdList),
|
||||
withSiteId(DASHBOARD_METRIC_DETAILS_PATH, siteIdList),
|
||||
withSiteId(DASHBOARD_METRIC_DETAILS_PATH, siteIdList)
|
||||
]}
|
||||
component={enhancedComponents.Dashboard}
|
||||
/>
|
||||
|
|
@ -248,7 +260,7 @@ function PrivateRoutes() {
|
|||
withSiteId(FFLAG_READ_PATH, siteIdList),
|
||||
withSiteId(FFLAG_CREATE_PATH, siteIdList),
|
||||
withSiteId(NOTES_PATH, siteIdList),
|
||||
withSiteId(BOOKMARKS_PATH, siteIdList),
|
||||
withSiteId(BOOKMARKS_PATH, siteIdList)
|
||||
]}
|
||||
component={enhancedComponents.SessionsOverview}
|
||||
/>
|
||||
|
|
@ -267,7 +279,7 @@ function PrivateRoutes() {
|
|||
{Object.entries(routes.redirects).map(([fr, to]) => (
|
||||
<Redirect key={fr} exact strict from={fr} to={to} />
|
||||
))}
|
||||
<Route path="*">
|
||||
<Route path={'*'}>
|
||||
<Redirect to={withSiteId(routes.sessions(), siteId)} />
|
||||
</Route>
|
||||
</Switch>
|
||||
|
|
|
|||
|
|
@ -103,8 +103,8 @@ export default class APIClient {
|
|||
// Always fetch the latest JWT from the store
|
||||
const jwt = this.getJwt();
|
||||
const headers = new Headers({
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json'
|
||||
});
|
||||
|
||||
if (reqHeaders) {
|
||||
|
|
@ -121,7 +121,7 @@ export default class APIClient {
|
|||
const init: RequestInit = {
|
||||
method,
|
||||
headers,
|
||||
body: params ? JSON.stringify(params) : undefined,
|
||||
body: params ? JSON.stringify(params) : undefined
|
||||
};
|
||||
|
||||
if (method === 'GET') {
|
||||
|
|
@ -185,10 +185,20 @@ export default class APIClient {
|
|||
delete init.body;
|
||||
}
|
||||
|
||||
const noChalice =
|
||||
path.includes('v1/integrations') ||
|
||||
(path.includes('/spot') && !path.includes('/login'));
|
||||
let edp = window.env.API_EDP || `${window.location.origin}/api`;
|
||||
if ((
|
||||
path.includes('login')
|
||||
|| path.includes('refresh')
|
||||
|| path.includes('logout')
|
||||
|| path.includes('reset')
|
||||
) && window.env.NODE_ENV !== 'development'
|
||||
) {
|
||||
init.credentials = 'include';
|
||||
} else {
|
||||
delete init.credentials;
|
||||
}
|
||||
|
||||
const noChalice = path.includes('v1/integrations') || path.includes('/spot') && !path.includes('/login');
|
||||
let edp = window.env.API_EDP || window.location.origin + '/api';
|
||||
if (noChalice && !edp.includes('api.openreplay.com')) {
|
||||
edp = edp.replace('/api', '');
|
||||
}
|
||||
|
|
@ -217,7 +227,8 @@ export default class APIClient {
|
|||
try {
|
||||
const errorData = await response.json();
|
||||
errorMsg = errorData.errors?.[0] || errorMsg;
|
||||
} catch {}
|
||||
} catch {
|
||||
}
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,26 +26,15 @@ function AssistSearchActions() {
|
|||
const showRecords = () => {
|
||||
showModal(<Recordings />, { right: true, width: 960 });
|
||||
};
|
||||
|
||||
const originStr = window.env.ORIGIN || window.location.origin;
|
||||
const isSaas = /app\.openreplay\.com/.test(originStr);
|
||||
return (
|
||||
<div className="flex items-center w-full gap-2">
|
||||
{isEnterprise && !modules.includes(MODULES.OFFLINE_RECORDINGS) ? (
|
||||
<Button type="text" onClick={showRecords}>
|
||||
{t('Training Videos')}
|
||||
</Button>
|
||||
) : null}
|
||||
{isEnterprise && userStore.account?.admin && (
|
||||
<Button
|
||||
type="text"
|
||||
onClick={showStats}
|
||||
disabled={
|
||||
modules.includes(MODULES.ASSIST_STATS) ||
|
||||
modules.includes(MODULES.ASSIST)
|
||||
}
|
||||
>
|
||||
{t('Co-Browsing Reports')}
|
||||
</Button>
|
||||
)}
|
||||
<Tooltip title={t('Clear Search Filters')}>
|
||||
<h3 className="text-2xl capitalize mr-2">
|
||||
<span>{t('Co-Browse')}</span>
|
||||
</h3>
|
||||
<Tooltip title='Clear Search Filters'>
|
||||
<Button
|
||||
type="text"
|
||||
disabled={!hasFilters && !hasEvents}
|
||||
|
|
@ -55,6 +44,15 @@ function AssistSearchActions() {
|
|||
{t('Clear')}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
{!isSaas && isEnterprise && !modules.includes(MODULES.OFFLINE_RECORDINGS)
|
||||
? <Button size={'small'} onClick={showRecords}>Training Videos</Button> : null
|
||||
}
|
||||
{isEnterprise && userStore.account?.admin && (
|
||||
<Button size={'small'} onClick={showStats}
|
||||
disabled={modules.includes(MODULES.ASSIST_STATS) || modules.includes(MODULES.ASSIST)}>
|
||||
{t('Co-Browsing Reports')}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,16 @@
|
|||
.disabled {
|
||||
opacity: 0.5;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.disabled > button {
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
/* Keep the icon color intact */
|
||||
.disabled > button svg {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.divider {
|
||||
width: 1px;
|
||||
height: 49px;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import React, { useState, useEffect } from 'react';
|
||||
import { Button } from 'antd';
|
||||
import {Headset} from 'lucide-react';
|
||||
import cn from 'classnames';
|
||||
import {
|
||||
CallingState,
|
||||
|
|
@ -241,11 +242,9 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
role="button"
|
||||
>
|
||||
<Button
|
||||
icon={
|
||||
<Icon name={annotating ? 'pencil-stop' : 'pencil'} size={16} />
|
||||
}
|
||||
type="text"
|
||||
style={{ height: '28px' }}
|
||||
icon={<Icon name={annotating ? 'pencil-stop' : 'pencil'} size={16} />}
|
||||
type={'text'}
|
||||
size='small'
|
||||
className={annotating ? 'text-red' : 'text-main'}
|
||||
>
|
||||
{t('Annotate')}
|
||||
|
|
@ -257,10 +256,9 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
|
||||
{/* @ts-ignore wtf? */}
|
||||
<ScreenRecorder />
|
||||
<div className={stl.divider} />
|
||||
|
||||
{/* @ts-ignore */}
|
||||
<Tooltip title="Go live to initiate remote control" disabled={livePlay}>
|
||||
<Tooltip title="Call user to initiate remote control" disabled={livePlay}>
|
||||
<div
|
||||
className={cn('cursor-pointer p-2 flex items-center', {
|
||||
[stl.disabled]:
|
||||
|
|
@ -270,21 +268,15 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
role="button"
|
||||
>
|
||||
<Button
|
||||
icon={
|
||||
<Icon
|
||||
name={remoteActive ? 'window-x' : 'remote-control'}
|
||||
size={16}
|
||||
/>
|
||||
}
|
||||
type="text"
|
||||
className={remoteActive ? 'text-red' : 'text-main'}
|
||||
style={{ height: '28px' }}
|
||||
type={'text'}
|
||||
className={remoteActive ? 'text-red' : 'text-teal'}
|
||||
icon={<Icon name={remoteActive ? 'window-x' : 'remote-control'} size={16} color={remoteActive ? 'red' : 'main'} />}
|
||||
size='small'
|
||||
>
|
||||
{t('Remote Control')}
|
||||
</Button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
<div className={stl.divider} />
|
||||
|
||||
<Tooltip
|
||||
title={
|
||||
|
|
@ -302,12 +294,10 @@ function AssistActions({ userId, isCallActive, agentIds }: Props) {
|
|||
role="button"
|
||||
>
|
||||
<Button
|
||||
icon={<Icon name="headset" size={16} />}
|
||||
type="text"
|
||||
className={
|
||||
onCall ? 'text-red' : isPrestart ? 'text-green' : 'text-main'
|
||||
}
|
||||
style={{ height: '28px' }}
|
||||
icon={<Headset size={16} />}
|
||||
type={'text'}
|
||||
className={onCall ? 'text-red' : isPrestart ? 'text-green' : 'text-main'}
|
||||
size='small'
|
||||
>
|
||||
{onCall ? t('End') : isPrestart ? t('Join Call') : t('Call')}
|
||||
</Button>
|
||||
|
|
|
|||
|
|
@ -41,27 +41,10 @@ const EChartsSankey: React.FC<Props> = (props) => {
|
|||
const { data, height = 240, onChartClick, isUngrouped } = props;
|
||||
const chartRef = React.useRef<HTMLDivElement>(null);
|
||||
|
||||
if (data.nodes.length === 0 || data.links.length === 0) {
|
||||
return (
|
||||
<NoContent
|
||||
style={{ minHeight: height }}
|
||||
title={
|
||||
<div className="flex items-center relative">
|
||||
<InfoCircleOutlined className="hidden md:inline-block mr-1" />
|
||||
{t(
|
||||
'Set a start or end point to visualize the journey. If set, try adjusting filters.',
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
show
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
const [finalNodeCount, setFinalNodeCount] = React.useState(data.nodes.length);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!chartRef.current) return;
|
||||
if (!chartRef.current || data.nodes.length === 0 || data.links.length === 0) return;
|
||||
|
||||
const finalNodes = data.nodes;
|
||||
const finalLinks = data.links;
|
||||
|
|
@ -449,6 +432,21 @@ const EChartsSankey: React.FC<Props> = (props) => {
|
|||
};
|
||||
}, [data, height, onChartClick]);
|
||||
|
||||
if (data.nodes.length === 0 || data.links.length === 0) {
|
||||
return (
|
||||
<NoContent
|
||||
style={{ minHeight: height }}
|
||||
title={
|
||||
<div className="flex items-center relative">
|
||||
<InfoCircleOutlined className="hidden md:inline-block mr-1" />
|
||||
Set a start or end point to visualize the journey. If set, try
|
||||
adjusting filters.
|
||||
</div>
|
||||
}
|
||||
show={true}
|
||||
/>
|
||||
);
|
||||
}
|
||||
let containerStyle: React.CSSProperties;
|
||||
if (isUngrouped) {
|
||||
const dynamicMinHeight = finalNodeCount * 15;
|
||||
|
|
|
|||
|
|
@ -39,15 +39,8 @@ function CustomFields() {
|
|||
return (
|
||||
<div className="flex flex-col gap-6">
|
||||
<Typography.Text>
|
||||
{t(
|
||||
'Attach key-value pairs to session replays for enhanced filtering, searching, and identifying relevant user sessions.',
|
||||
)}
|
||||
<a
|
||||
href="https://docs.openreplay.com/installation/metadata"
|
||||
className="link ml-1"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
{t('Attach key-value pairs to session replays for enhanced filtering, searching, and identifying relevant user sessions.')}
|
||||
<a href="https://docs.openreplay.com/en/session-replay/metadata" className="link ml-1" target="_blank">
|
||||
{t('Learn more')}
|
||||
</a>
|
||||
</Typography.Text>
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ function Sites() {
|
|||
actionButton={
|
||||
<TextLink
|
||||
icon="book"
|
||||
href="https://docs.openreplay.com/installation"
|
||||
href="https://docs.openreplay.com/deployment/setup-or"
|
||||
label="Installation Docs"
|
||||
/>
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import React, { useEffect } from 'react';
|
||||
import { Pagination, NoContent, Icon } from 'UI';
|
||||
import ErrorListItem from 'App/components/Dashboard/components/Errors/ErrorListItem';
|
||||
import { withRouter, RouteComponentProps } from 'react-router-dom';
|
||||
import { useModal } from 'App/components/Modal';
|
||||
import ErrorDetailsModal from 'App/components/Dashboard/components/Errors/ErrorDetailsModal';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import React, { useEffect } from "react";
|
||||
import { Pagination, NoContent, Icon } from "UI";
|
||||
import ErrorListItem from "App/components/Dashboard/components/Errors/ErrorListItem";
|
||||
import { withRouter, RouteComponentProps } from "react-router-dom";
|
||||
import { useModal } from "App/components/Modal";
|
||||
import ErrorDetailsModal from "App/components/Dashboard/components/Errors/ErrorDetailsModal";
|
||||
import { useTranslation } from "react-i18next";
|
||||
|
||||
interface Props {
|
||||
metric: any;
|
||||
|
|
@ -14,10 +14,9 @@ interface Props {
|
|||
location: any;
|
||||
}
|
||||
function CustomMetricTableErrors(props: RouteComponentProps & Props) {
|
||||
const { t } = useTranslation();
|
||||
const { metric, data } = props;
|
||||
const errorId = new URLSearchParams(props.location.search).get('errorId');
|
||||
const { showModal, hideModal } = useModal();
|
||||
const { metric, data } = props;
|
||||
const errorId = new URLSearchParams(props.location.search).get("errorId");
|
||||
const { showModal, hideModal } = useModal();
|
||||
|
||||
const onErrorClick = (e: any, error: any) => {
|
||||
e.stopPropagation();
|
||||
|
|
|
|||
|
|
@ -198,9 +198,7 @@ function DashboardList() {
|
|||
{t('Create and organize your insights')}
|
||||
</Typography.Text>
|
||||
<div className="mb-2 text-lg text-gray-500 leading-normal">
|
||||
{t(
|
||||
'Build dashboards to track key metrics and monitor performance in one place.',
|
||||
)}
|
||||
{t('Build dashboards to track key metrics and monitor performance in one place.')}
|
||||
</div>
|
||||
<div className="my-4 mb-10">
|
||||
<CreateDashboardButton />
|
||||
|
|
@ -232,6 +230,7 @@ function DashboardList() {
|
|||
<Table
|
||||
dataSource={list}
|
||||
columns={tableConfig}
|
||||
showSorterTooltip={false}
|
||||
pagination={{
|
||||
showTotal: (total, range) =>
|
||||
`${t('Showing')} ${range[0]}-${range[1]} ${t('of')} ${total} ${t('items')}`,
|
||||
|
|
|
|||
|
|
@ -45,13 +45,9 @@ const ListView: React.FC<Props> = ({
|
|||
selectedList,
|
||||
toggleSelection,
|
||||
disableSelection = false,
|
||||
inLibrary = false,
|
||||
inLibrary = false
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const [sorter, setSorter] = useState<{
|
||||
field: string;
|
||||
order: 'ascend' | 'descend';
|
||||
}>({
|
||||
const [sorter, setSorter] = useState<{ field: string; order: 'ascend' | 'descend' }>({
|
||||
field: 'lastModified',
|
||||
order: 'descend',
|
||||
});
|
||||
|
|
@ -292,6 +288,7 @@ const ListView: React.FC<Props> = ({
|
|||
columns={columns}
|
||||
dataSource={paginatedData}
|
||||
rowKey="metricId"
|
||||
showSorterTooltip={false}
|
||||
onChange={handleTableChange}
|
||||
onRow={
|
||||
inLibrary
|
||||
|
|
|
|||
|
|
@ -29,7 +29,12 @@ function LongLoader({ onClick }: { onClick: () => void }) {
|
|||
<div>
|
||||
{t('Use sample data to speed up query and get a faster response.')}
|
||||
</div>
|
||||
<Button onClick={onClick}>{t('Use Sample Data')}</Button>
|
||||
{/*<div>*/}
|
||||
{/* Use sample data to speed up query and get a faster response.*/}
|
||||
{/*</div>*/}
|
||||
{/*<Button onClick={onClick}>*/}
|
||||
{/* Use Sample Data*/}
|
||||
{/*</Button>*/}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -45,10 +45,8 @@ function WidgetDateRange({
|
|||
};
|
||||
|
||||
React.useEffect(() => {
|
||||
if (presetComparison) {
|
||||
const option = DATE_RANGE_COMPARISON_OPTIONS(t).find(
|
||||
(option: any) => option.value === presetComparison[0],
|
||||
);
|
||||
if (presetComparison && presetComparison.length) {
|
||||
const option = DATE_RANGE_COMPARISON_OPTIONS(t).find((option: any) => option.value === presetComparison[0]);
|
||||
if (option) {
|
||||
// @ts-ignore
|
||||
const newPeriod = new Period({
|
||||
|
|
|
|||
|
|
@ -216,42 +216,37 @@ const PathAnalysisFilter = observer(({ metric, writeOption }: any) => {
|
|||
};
|
||||
return (
|
||||
<div className="rounded-lg bg-white border">
|
||||
<div className="flex flex-col justify-start gap-2 flex-wrap">
|
||||
<Form.Item className="mb-0 hover:bg-bg-blue/30 px-4 pb-1 pt-2">
|
||||
<div className="flex flex-wrap gap-2 items-center justify-start">
|
||||
<span className="font-medium">{t('Journeys With')} </span>
|
||||
<div className="flex gap-2 items-center">
|
||||
<Select
|
||||
className="w-36 rounded-lg !h-[26px]"
|
||||
name="startType"
|
||||
options={[
|
||||
{ value: 'start', label: t('Start Point') },
|
||||
{ value: 'end', label: t('End Point') },
|
||||
]}
|
||||
defaultValue={metric.startType || 'start'}
|
||||
onChange={onPointChange}
|
||||
placeholder={t('Select Start Type')}
|
||||
size="small"
|
||||
/>
|
||||
<div className='flex flex-col justify-start gap-2 flex-wrap'>
|
||||
<Form.Item className='mb-0 hover:bg-bg-blue/30 px-4 pb-1 pt-2'>
|
||||
<div className="flex flex-wrap gap-2 items-center justify-start">
|
||||
<span className="font-medium">{t('Journeys With')}</span>
|
||||
<div className="flex gap-2 items-center">
|
||||
<Select
|
||||
className="w-36 rounded-lg"
|
||||
name="startType"
|
||||
options={[
|
||||
{ value: 'start', label: 'Start Point' },
|
||||
{ value: 'end', label: 'End Point' },
|
||||
]}
|
||||
defaultValue={metric.startType || 'start'}
|
||||
onChange={onPointChange}
|
||||
placeholder="Select Start Type"
|
||||
/>
|
||||
|
||||
<span className="">{t('showing')}</span>
|
||||
|
||||
<Select
|
||||
mode="multiple"
|
||||
className="rounded-lg h-[26px] w-max min-w-44 max-w-58"
|
||||
allowClear
|
||||
name="metricValue"
|
||||
options={metricValueOptions}
|
||||
value={metric.metricValue || []}
|
||||
onChange={(value) =>
|
||||
writeOption({ name: 'metricValue', value })
|
||||
}
|
||||
placeholder={t('Select Metrics')}
|
||||
size="small"
|
||||
maxTagCount="responsive"
|
||||
showSearch={false}
|
||||
/>
|
||||
</div>
|
||||
<Select
|
||||
mode="multiple"
|
||||
className="rounded-lg w-max min-w-44 max-w-58"
|
||||
allowClear
|
||||
name="metricValue"
|
||||
options={metricValueOptions}
|
||||
value={metric.metricValue || []}
|
||||
onChange={(value) => writeOption({ name: 'metricValue', value })}
|
||||
placeholder={t('Select Metrics')}
|
||||
maxTagCount={'responsive'}
|
||||
showSearch={false}
|
||||
/>
|
||||
</div>
|
||||
</Form.Item>
|
||||
<Form.Item className="mb-0 hover:bg-bg-blue/30 px-4 pb-2 pt-1">
|
||||
|
|
|
|||
|
|
@ -151,7 +151,7 @@ function WidgetSessions(props: Props) {
|
|||
seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name;
|
||||
} else if (
|
||||
seriesJson[0].filter.filters[0].value[0] === '' &&
|
||||
!widget.data.nodes
|
||||
!widget.data.nodes?.length
|
||||
) {
|
||||
// no point requesting if we don't have starting point picked by api
|
||||
return;
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import SubserviceHealth from 'Components/Header/HealthStatus/SubserviceHealth/Su
|
|||
import Footer from './Footer';
|
||||
import { IServiceStats } from '../HealthStatus';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { RefreshCcw } from 'lucide-react';
|
||||
|
||||
function HealthModal({
|
||||
getHealth,
|
||||
|
|
@ -79,9 +80,9 @@ function HealthModal({
|
|||
<Button
|
||||
disabled={isLoading}
|
||||
onClick={getHealth}
|
||||
icon="arrow-repeat"
|
||||
type="text"
|
||||
className="text-main"
|
||||
icon={<RefreshCcw size={18} />}
|
||||
type={'text'}
|
||||
className={'text-main'}
|
||||
>
|
||||
{t('Recheck')}
|
||||
</Button>
|
||||
|
|
@ -95,18 +96,18 @@ function HealthModal({
|
|||
{isLoading
|
||||
? null
|
||||
: Object.keys(healthResponse.healthMap).map((service) => (
|
||||
<React.Fragment key={service}>
|
||||
<Category
|
||||
onClick={() => setSelectedService(service)}
|
||||
healthOk={
|
||||
healthResponse.healthMap[service].healthOk
|
||||
}
|
||||
name={healthResponse.healthMap[service].name}
|
||||
isSelectable
|
||||
isSelected={selectedService === service}
|
||||
/>
|
||||
</React.Fragment>
|
||||
))}
|
||||
<React.Fragment key={service}>
|
||||
<Category
|
||||
onClick={() => setSelectedService(service)}
|
||||
healthOk={
|
||||
healthResponse.healthMap[service].healthOk
|
||||
}
|
||||
name={healthResponse.healthMap[service].name}
|
||||
isSelectable
|
||||
isSelected={selectedService === service}
|
||||
/>
|
||||
</React.Fragment>
|
||||
))}
|
||||
</div>
|
||||
<div
|
||||
className="bg-gray-lightest border-l w-fit border-figmaColors-divider overflow-y-scroll relative"
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ function IdentifyUsersTab(props: Props) {
|
|||
</div>
|
||||
|
||||
<a
|
||||
href={`https://docs.openreplay.com/en/installation/identify-user${platform.value === 'web' ? '/#with-npm' : '/#with-ios-app'}`}
|
||||
href={`https://docs.openreplay.com/en/session-replay/identify-user${platform.value === "web" ? "/#with-npm" : "/#with-ios-app"}`}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
|
|
|
|||
|
|
@ -43,18 +43,10 @@ function InstallOpenReplayTab(props: Props) {
|
|||
<ProjectFormButton />
|
||||
</div>
|
||||
</div>
|
||||
<a
|
||||
href="https://docs.openreplay.com/en/using-or/"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<Button
|
||||
size="small"
|
||||
type="text"
|
||||
className="ml-2 flex items-center gap-2"
|
||||
>
|
||||
<Icon name="question-circle" />
|
||||
<div className="text-main">{t('See Documentation')}</div>
|
||||
<a href={"https://docs.openreplay.com/en/sdk/using-or/"} target="_blank">
|
||||
<Button size={"small"} type={"text"} className="ml-2 flex items-center gap-2">
|
||||
<Icon name={"question-circle"} />
|
||||
<div className={"text-main"}>{t('See Documentation')}</div>
|
||||
</Button>
|
||||
</a>
|
||||
</h1>
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ function ManageUsersTab(props: Props) {
|
|||
</div>
|
||||
|
||||
<a
|
||||
href="https://docs.openreplay.com/en/tutorials/adding-users/"
|
||||
href="https://docs.openreplay.com/en/deployment/invite-team-members"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
|
|
|
|||
|
|
@ -90,10 +90,7 @@ function InstallDocs({ site }) {
|
|||
{t('Otherwise, if your web app is')}
|
||||
<strong>{t('Server-Side-Rendered (SSR)')}</strong>
|
||||
{t('(i.e. NextJS, NuxtJS),')}
|
||||
<a
|
||||
className="text-main"
|
||||
href="https://docs.openreplay.com/en/using-or/next/"
|
||||
>
|
||||
<a className={'text-main'} href={'https://docs.openreplay.com/en/sdk/using-or/next/'}>
|
||||
{t('consider async imports')}
|
||||
</a>
|
||||
{t('or cjs version of the library:')}
|
||||
|
|
|
|||
|
|
@ -13,12 +13,11 @@ function Metadata() {
|
|||
if (metaLength === 0) {
|
||||
return (
|
||||
<span className="text-sm color-gray-medium">
|
||||
{t('Check')}
|
||||
{t('Check')}{' '}
|
||||
<a
|
||||
href="https://docs.openreplay.com/installation/metadata"
|
||||
href="https://docs.openreplay.com/en/session-replay/metadata"
|
||||
target="_blank"
|
||||
className="link"
|
||||
rel="noreferrer"
|
||||
>
|
||||
{t('how to use Metadata')}
|
||||
</a>{' '}
|
||||
|
|
|
|||
|
|
@ -90,7 +90,8 @@ function Player(props: IProps) {
|
|||
|
||||
if (!playerContext.player) return null;
|
||||
|
||||
const maxWidth = activeTab ? 'calc(100vw - 270px)' : '100vw';
|
||||
const activeTabWidth = activeTab === 'EXPORT' ? 360 : 270
|
||||
const maxWidth = activeTab ? `calc(100vw - ${activeTabWidth}px)` : '100vw';
|
||||
|
||||
const handleResize = (e: React.MouseEvent<HTMLDivElement>) => {
|
||||
e.preventDefault();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
import React from 'react';
|
||||
import EventsBlock from '../Session_/EventsBlock';
|
||||
import HighlightPanel from "../Session_/Highlight/HighlightPanel";
|
||||
import PageInsightsPanel from '../Session_/PageInsightsPanel/PageInsightsPanel';
|
||||
import UnitStepsModal from "../Session_/UnitStepsModal";
|
||||
import TagWatch from 'Components/Session/Player/TagWatch';
|
||||
import cn from 'classnames';
|
||||
import EventsBlock from '../Session_/EventsBlock';
|
||||
|
|
@ -38,7 +42,13 @@ function RightBlock({
|
|||
<div className={cn('bg-white border-l', stl.panel)}>
|
||||
<HighlightPanel onClose={() => setActiveTab('')} />
|
||||
</div>
|
||||
);
|
||||
)
|
||||
case 'EXPORT':
|
||||
return (
|
||||
<div className={cn('bg-white border-l', stl.extraPanel)}>
|
||||
<UnitStepsModal onClose={() => setActiveTab('EVENTS')} />
|
||||
</div>
|
||||
)
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,3 +4,10 @@
|
|||
height: calc(100vh - 50px);
|
||||
align-self: flex-end;
|
||||
}
|
||||
|
||||
.extraPanel {
|
||||
width: 360px;
|
||||
min-width: 360px;
|
||||
height: calc(100vh - 50px);
|
||||
align-self: flex-end;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,18 +24,6 @@ function EventSearch(props) {
|
|||
onChange={onChange}
|
||||
prefix={<SearchOutlined />}
|
||||
/>
|
||||
|
||||
<Tooltip title="Close Panel" placement="bottom">
|
||||
<Button
|
||||
className="ml-2"
|
||||
type="text"
|
||||
onClick={() => {
|
||||
setActiveTab('');
|
||||
toggleEvents();
|
||||
}}
|
||||
icon={<CloseOutlined />}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -4,20 +4,29 @@ import cn from 'classnames';
|
|||
import { observer } from 'mobx-react-lite';
|
||||
import React from 'react';
|
||||
import { VList, VListHandle } from 'virtua';
|
||||
|
||||
import { Button } from 'antd'
|
||||
import { PlayerContext } from 'App/components/Session/playerContext';
|
||||
import { useStore } from 'App/mstore';
|
||||
import { Icon } from 'UI';
|
||||
|
||||
import { Search } from 'lucide-react'
|
||||
import EventGroupWrapper from './EventGroupWrapper';
|
||||
import EventSearch from './EventSearch/EventSearch';
|
||||
import styles from './eventsBlock.module.css';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { CloseOutlined } from ".store/@ant-design-icons-virtual-42686020c5/package";
|
||||
import { Tooltip } from ".store/antd-virtual-9dbfadb7f6/package";
|
||||
import { getDefaultFramework, frameworkIcons } from "../UnitStepsModal";
|
||||
|
||||
interface IProps {
|
||||
setActiveTab: (tab?: string) => void;
|
||||
}
|
||||
|
||||
const MODES = {
|
||||
SELECT: 'select',
|
||||
SEARCH: 'search',
|
||||
EXPORT: 'export',
|
||||
}
|
||||
|
||||
function EventsBlock(props: IProps) {
|
||||
const { t } = useTranslation();
|
||||
const { notesStore, uxtestingStore, uiPlayerStore, sessionStore } =
|
||||
|
|
@ -224,18 +233,47 @@ function EventsBlock(props: IProps) {
|
|||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
<div className={cn(styles.hAndProgress, 'mt-0')}>
|
||||
<EventSearch
|
||||
onChange={write}
|
||||
setActiveTab={setActiveTab}
|
||||
value={query}
|
||||
eventsText={
|
||||
usedEvents.length
|
||||
? `${usedEvents.length} ${t('Events')}`
|
||||
: `0 ${t('Events')}`
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
{mode === MODES.SELECT ? (
|
||||
<div className={'flex items-center gap-2'}>
|
||||
<Button
|
||||
onClick={() => setActiveTab('EXPORT')}
|
||||
type={'default'}
|
||||
shape={'circle'}
|
||||
>
|
||||
<Icon name={frameworkIcons[defaultFramework]} size={18} />
|
||||
</Button>
|
||||
<Button
|
||||
className={'flex items-center gap-2'}
|
||||
onClick={() => setMode(MODES.SEARCH)}
|
||||
>
|
||||
<Search size={14} />
|
||||
<div>{t('Search')} {usedEvents.length} {t('events')}</div>
|
||||
</Button>
|
||||
<Tooltip title={t('Close Panel')} placement='bottom' >
|
||||
<Button
|
||||
className="ml-auto"
|
||||
type='text'
|
||||
onClick={() => {
|
||||
setActiveTab('');
|
||||
}}
|
||||
icon={<CloseOutlined />}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
) : null}
|
||||
{mode === MODES.SEARCH ?
|
||||
<div className={'flex items-center gap-2'}>
|
||||
<EventSearch
|
||||
onChange={write}
|
||||
setActiveTab={setActiveTab}
|
||||
value={query}
|
||||
eventsText={
|
||||
usedEvents.length ? `${usedEvents.length} ${t('Events')}` : `0 ${t('Events')}`
|
||||
}
|
||||
/>
|
||||
<Button type={'text'} onClick={() => setMode(MODES.SELECT)}>{t('Cancel')}</Button>
|
||||
</div>
|
||||
: null}
|
||||
</div>
|
||||
<div
|
||||
className={cn('flex-1 pb-4', styles.eventsList)}
|
||||
|
|
@ -255,7 +293,9 @@ function EventsBlock(props: IProps) {
|
|||
className={styles.eventsList}
|
||||
ref={scroller}
|
||||
>
|
||||
{usedEvents.map((_, i) => renderGroup({ index: i }))}
|
||||
{usedEvents.map((_, i) => {
|
||||
return renderGroup({ index: i });
|
||||
})}
|
||||
</VList>
|
||||
</div>
|
||||
</>
|
||||
|
|
|
|||
|
|
@ -13,12 +13,11 @@ function Metadata() {
|
|||
if (metaLength === 0) {
|
||||
return (
|
||||
<span className="text-sm color-gray-medium">
|
||||
{t('Check')}
|
||||
{t('Check')}
|
||||
<a
|
||||
href="https://docs.openreplay.com/installation/metadata"
|
||||
href="https://docs.openreplay.com/en/session-replay/metadata"
|
||||
target="_blank"
|
||||
className="link"
|
||||
rel="noreferrer"
|
||||
>
|
||||
{t('how to use Metadata')}
|
||||
</a>{' '}
|
||||
|
|
|
|||
|
|
@ -164,8 +164,7 @@ function ExceptionsCont() {
|
|||
<a
|
||||
className="color-teal underline"
|
||||
target="_blank"
|
||||
href="https://docs.openreplay.com/installation/upload-sourcemaps"
|
||||
rel="noreferrer"
|
||||
href="https://docs.openreplay.com/deployment/upload-sourcemaps"
|
||||
>
|
||||
{t('Upload Source Maps')}{' '}
|
||||
</a>
|
||||
|
|
|
|||
|
|
@ -12,9 +12,12 @@ import {
|
|||
import { WebEventsList, MobEventsList } from './EventsList';
|
||||
import NotesList from './NotesList';
|
||||
import SkipIntervalsList from './SkipIntervalsList';
|
||||
import TooltipContainer from './components/TooltipContainer';
|
||||
import CustomDragLayer, { OnDragCallback } from './components/CustomDragLayer';
|
||||
import stl from './timeline.module.css';
|
||||
import TimelineTracker from 'Components/Session_/Player/Controls/TimelineTracker';
|
||||
import {
|
||||
ZoomDragLayer,
|
||||
HighlightDragLayer,
|
||||
ExportEventsSelection
|
||||
} from "Components/Session_/Player/Controls/components/ZoomDragLayer";
|
||||
|
||||
function Timeline({ isMobile }: { isMobile: boolean }) {
|
||||
const { player, store } = useContext(PlayerContext);
|
||||
|
|
@ -27,6 +30,7 @@ function Timeline({ isMobile }: { isMobile: boolean }) {
|
|||
const { timezone } = sessionStore.current;
|
||||
const issues = sessionStore.current.issues ?? [];
|
||||
const timelineZoomEnabled = uiPlayerStore.timelineZoom.enabled;
|
||||
const exportEventsEnabled = uiPlayerStore.exportEventsSelection.enabled;
|
||||
const highlightEnabled = uiPlayerStore.highlightSelection.enabled;
|
||||
const { playing, skipToIssue, ready, endTime, devtoolsLoading, domLoading } =
|
||||
store.get();
|
||||
|
|
@ -145,6 +149,7 @@ function Timeline({ isMobile }: { isMobile: boolean }) {
|
|||
>
|
||||
{timelineZoomEnabled ? <ZoomDragLayer scale={scale} /> : null}
|
||||
{highlightEnabled ? <HighlightDragLayer scale={scale} /> : null}
|
||||
{exportEventsEnabled ? <ExportEventsSelection scale={scale} /> : null}
|
||||
<div
|
||||
className={stl.progress}
|
||||
onClick={ready ? jumpToTime : undefined}
|
||||
|
|
|
|||
|
|
@ -79,6 +79,29 @@ export const ZoomDragLayer = observer(({ scale }: Props) => {
|
|||
);
|
||||
});
|
||||
|
||||
export const ExportEventsSelection = observer(({ scale }: Props) => {
|
||||
const { uiPlayerStore } = useStore();
|
||||
const toggleExportEventsSelection = uiPlayerStore.toggleExportEventsSelection;
|
||||
const timelineZoomStartTs = uiPlayerStore.exportEventsSelection.startTs;
|
||||
const timelineZoomEndTs = uiPlayerStore.exportEventsSelection.endTs;
|
||||
|
||||
const onDrag = (start: number, end: number) => {
|
||||
toggleExportEventsSelection({
|
||||
enabled: true,
|
||||
range: [start, end],
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<DraggableMarkers
|
||||
scale={scale}
|
||||
onDragEnd={onDrag}
|
||||
defaultStartPos={timelineZoomStartTs}
|
||||
defaultEndPos={timelineZoomEndTs}
|
||||
/>
|
||||
);
|
||||
});
|
||||
|
||||
function DraggableMarkers({
|
||||
scale,
|
||||
onDragEnd,
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import React from 'react';
|
|||
import { screenRecorder } from 'App/utils/screenRecorder';
|
||||
import { Tooltip, Button } from 'antd';
|
||||
import { Icon } from 'UI';
|
||||
import { Disc } from 'lucide-react';
|
||||
import { SessionRecordingStatus } from 'Player';
|
||||
import { recordingsService } from 'App/services';
|
||||
import { toast } from 'react-toastify';
|
||||
|
|
@ -133,11 +134,7 @@ function ScreenRecorder() {
|
|||
<Tooltip
|
||||
title={isEnterprise ? supportedMessage : ENTERPRISE_REQUEIRED(t)}
|
||||
>
|
||||
<Button
|
||||
icon={<Icon name="record-circle" size={16} />}
|
||||
disabled
|
||||
type="text"
|
||||
>
|
||||
<Button icon={<Disc size={16} />} disabled type="text">
|
||||
{t('Record Activity')}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ import IssueForm from 'Components/Session_/Issues/IssueForm';
|
|||
import QueueControls from './QueueControls';
|
||||
import HighlightButton from './Highlight/HighlightButton';
|
||||
import ShareModal from '../shared/SharePopup/SharePopup';
|
||||
import UnitStepsModal from './UnitStepsModal';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
const disableDevtools = 'or_devtools_uxt_toggle';
|
||||
|
|
@ -128,16 +127,6 @@ function SubHeader(props) {
|
|||
});
|
||||
};
|
||||
|
||||
const exportEvents = () => {
|
||||
const allEvents = sessionStore.current.events;
|
||||
const { width } = store.get();
|
||||
const { height } = store.get();
|
||||
openModal(
|
||||
<UnitStepsModal width={width} height={height} events={allEvents} />,
|
||||
{ title: t('Export Events'), width: 640 },
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
|
|
@ -221,17 +210,7 @@ function SubHeader(props) {
|
|||
),
|
||||
onClick: showKbHelp,
|
||||
},
|
||||
{
|
||||
key: '5',
|
||||
label: (
|
||||
<div className="flex items-center gap-2">
|
||||
<Bot size={16} strokeWidth={1} />
|
||||
<span>{t('Export Events')}</span>
|
||||
</div>
|
||||
),
|
||||
onClick: exportEvents,
|
||||
},
|
||||
],
|
||||
]
|
||||
}}
|
||||
>
|
||||
<AntButton size="small">
|
||||
|
|
|
|||
|
|
@ -1,52 +1,63 @@
|
|||
import React from 'react';
|
||||
import { TYPES, Input, Click, Location } from 'App/types/session/event';
|
||||
import { CodeBlock, CopyButton } from 'UI';
|
||||
import { Segmented } from 'antd';
|
||||
import { TYPES } from 'App/types/session/event';
|
||||
import { CodeBlock, Icon } from 'UI';
|
||||
import { Select, Radio, Checkbox } from 'antd';
|
||||
import { useStore } from 'App/mstore';
|
||||
import { observer } from 'mobx-react-lite';
|
||||
import { PlayerContext } from 'Components/Session/playerContext';
|
||||
import { X } from 'lucide-react';
|
||||
import { puppeteerEvents, cypressEvents, playWrightEvents } from './utils';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
interface Props {
|
||||
events: Input[] | Click[] | Location[];
|
||||
width: number;
|
||||
height: number;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
function UnitStepsModal({ events, width, height }: Props) {
|
||||
const defaultFrameworkKey = '__$defaultFrameworkKey$__';
|
||||
export const getDefaultFramework = () => {
|
||||
const stored = localStorage.getItem(defaultFrameworkKey);
|
||||
return stored ?? 'cypress';
|
||||
}
|
||||
export const frameworkIcons = {
|
||||
cypress: 'cypress',
|
||||
puppeteer: 'puppeteer',
|
||||
playwright: 'pwright',
|
||||
}
|
||||
function UnitStepsModal({ onClose }: Props) {
|
||||
const { t } = useTranslation();
|
||||
const { sessionStore, uiPlayerStore } = useStore();
|
||||
const { store, player } = React.useContext(PlayerContext);
|
||||
const [eventStr, setEventStr] = React.useState('');
|
||||
const [activeFramework, setActiveFramework] = React.useState('puppeteer');
|
||||
const [mode, setMode] = React.useState('events');
|
||||
const [activeFramework, setActiveFramework] = React.useState(getDefaultFramework);
|
||||
const events = React.useMemo(() => {
|
||||
if (!uiPlayerStore.exportEventsSelection.enabled) {
|
||||
return sessionStore.current.events;
|
||||
} else {
|
||||
return sessionStore.current.events.filter((ev) => {
|
||||
return (
|
||||
ev.time >= uiPlayerStore.exportEventsSelection.startTs &&
|
||||
ev.time <= uiPlayerStore.exportEventsSelection.endTs
|
||||
);
|
||||
});
|
||||
}
|
||||
}, [
|
||||
sessionStore.current.events,
|
||||
uiPlayerStore.exportEventsSelection.enabled,
|
||||
uiPlayerStore.exportEventsSelection.startTs,
|
||||
uiPlayerStore.exportEventsSelection.endTs,
|
||||
]);
|
||||
const { tabNames, currentTab } = store.get();
|
||||
|
||||
React.useEffect(() => {
|
||||
player.pause();
|
||||
return () => {
|
||||
uiPlayerStore.toggleExportEventsSelection({ enabled: false });
|
||||
};
|
||||
}, []);
|
||||
|
||||
React.useEffect(() => {
|
||||
const userEventTypes = [TYPES.LOCATION, TYPES.CLICK, TYPES.INPUT];
|
||||
const puppeteerEvents = {
|
||||
[TYPES.LOCATION]: (event: Location) => `await page.goto('${event.url}')`,
|
||||
[TYPES.CLICK]: (event: Click) =>
|
||||
`await page.locator('${
|
||||
event.selector.length ? event.selector : event.label
|
||||
}').click()`,
|
||||
[TYPES.INPUT]: (event: Input) =>
|
||||
`await page.locator('${event.label}').type('Test Input')`,
|
||||
screen: () =>
|
||||
`await page.setViewport({width: ${width}, height: ${height})`,
|
||||
};
|
||||
const cypressEvents = {
|
||||
[TYPES.LOCATION]: (event: Location) => `cy.visit('${event.url}')`,
|
||||
[TYPES.CLICK]: (event: Click) =>
|
||||
`cy.get('${
|
||||
event.selector.length ? event.selector : event.label
|
||||
}').click()`,
|
||||
[TYPES.INPUT]: (event: Input) =>
|
||||
`cy.get('${event.label}').type('Test Input')`,
|
||||
screen: () => `cy.viewport(${width}, ${height})`,
|
||||
};
|
||||
const playWrightEvents = {
|
||||
[TYPES.LOCATION]: (event: Location) => `await page.goto('${event.url}')`,
|
||||
[TYPES.CLICK]: (event: Click) =>
|
||||
event.selector.length
|
||||
? `await page.locator('${event.selector}').click()`
|
||||
: `await page.getByText('${event.label}').click()`,
|
||||
[TYPES.INPUT]: (event: Input) =>
|
||||
`await page.getByLabel('${event.label}').fill('Test Input')`,
|
||||
screen: () =>
|
||||
`await page.setViewport({width: ${width}, height: ${height})`,
|
||||
};
|
||||
|
||||
const collections = {
|
||||
puppeteer: puppeteerEvents,
|
||||
|
|
@ -58,39 +69,148 @@ function UnitStepsModal({ events, width, height }: Props) {
|
|||
const usedCollection = collections[activeFramework];
|
||||
|
||||
let finalScript = '';
|
||||
if (mode === 'test') {
|
||||
const pageName = tabNames[currentTab] ?? 'Test Name';
|
||||
const firstUrl =
|
||||
events.find((ev) => ev.type === TYPES.LOCATION)?.url ?? 'page';
|
||||
finalScript += usedCollection.testIntro(pageName, firstUrl);
|
||||
finalScript += '\n';
|
||||
}
|
||||
events.forEach((ev) => {
|
||||
if (userEventTypes.includes(ev.type)) {
|
||||
if (mode === 'test') {
|
||||
finalScript += ' ';
|
||||
}
|
||||
finalScript += usedCollection[ev.type](ev);
|
||||
finalScript += '\n';
|
||||
}
|
||||
});
|
||||
if (mode === 'test') {
|
||||
finalScript += usedCollection.testOutro();
|
||||
}
|
||||
setEventStr(finalScript);
|
||||
}, [events, activeFramework]);
|
||||
}, [events, activeFramework, mode]);
|
||||
|
||||
const enableZoom = () => {
|
||||
const time = store.get().time;
|
||||
const endTime = store.get().endTime;
|
||||
const closestEvent = sessionStore.current.events.reduce((prev, curr) => {
|
||||
return Math.abs(curr.time - time) < Math.abs(prev.time - time)
|
||||
? curr
|
||||
: prev;
|
||||
});
|
||||
const closestInd = sessionStore.current.events.indexOf(closestEvent);
|
||||
if (closestEvent) {
|
||||
const beforeCenter = closestInd > 4 ? closestInd - 4 : null;
|
||||
const afterCenter =
|
||||
closestInd < sessionStore.current.events.length - 4
|
||||
? closestInd + 4
|
||||
: null;
|
||||
|
||||
uiPlayerStore.toggleExportEventsSelection({
|
||||
enabled: true,
|
||||
range: [
|
||||
beforeCenter ? sessionStore.current.events[beforeCenter].time : 0,
|
||||
afterCenter ? sessionStore.current.events[afterCenter].time : endTime,
|
||||
],
|
||||
});
|
||||
} else {
|
||||
const distance = Math.max(endTime / 40, 2500);
|
||||
|
||||
uiPlayerStore.toggleExportEventsSelection({
|
||||
enabled: true,
|
||||
range: [
|
||||
Math.max(time - distance, 0),
|
||||
Math.min(time + distance, endTime),
|
||||
],
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const toggleZoom = (enabled?: boolean) => {
|
||||
if (enabled) {
|
||||
enableZoom();
|
||||
} else {
|
||||
uiPlayerStore.toggleExportEventsSelection({ enabled: false });
|
||||
}
|
||||
};
|
||||
|
||||
const changeFramework = (framework: string) => {
|
||||
localStorage.setItem(defaultFrameworkKey, framework);
|
||||
setActiveFramework(framework);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="bg-white h-full flex flex-col items-start gap-2">
|
||||
<div className="flex items-center gap-4">
|
||||
<Segmented
|
||||
options={[
|
||||
{ label: 'Puppeteer', value: 'puppeteer' },
|
||||
{ label: 'Cypress', value: 'cypress' },
|
||||
{ label: 'Playwright', value: 'playwright' },
|
||||
]}
|
||||
value={activeFramework}
|
||||
onChange={(value) => setActiveFramework(value)}
|
||||
/>
|
||||
<CopyButton
|
||||
size="middle"
|
||||
variant="default"
|
||||
content={eventStr}
|
||||
className="capitalize font-medium text-neutral-400"
|
||||
/>
|
||||
<div
|
||||
className={'bg-white h-screen w-full flex flex-col items-start gap-2 p-4'}
|
||||
style={{ marginTop: -50 }}
|
||||
>
|
||||
<div className={'flex items-center justify-between w-full'}>
|
||||
<div className={'font-semibold text-xl'}>{t('Copy Events')}</div>
|
||||
<div className={'cursor-pointer'} onClick={onClose}>
|
||||
<X size={18} />
|
||||
</div>
|
||||
</div>
|
||||
<div className="w-full">
|
||||
<CodeBlock code={eventStr} language="javascript" />
|
||||
<Select
|
||||
className={'w-full'}
|
||||
options={[
|
||||
{
|
||||
label: (
|
||||
<div className={'flex items-center gap-2'}>
|
||||
<Icon name={'cypress'} size={18} />
|
||||
<div>{t('Cypress')}</div>
|
||||
</div>
|
||||
),
|
||||
value: 'cypress',
|
||||
},
|
||||
{
|
||||
label: (
|
||||
<div className={'flex items-center gap-2'}>
|
||||
<Icon name={'puppeteer'} size={18} />
|
||||
<div>{t('Puppeteer')}</div>
|
||||
</div>
|
||||
),
|
||||
value: 'puppeteer',
|
||||
},
|
||||
{
|
||||
label: (
|
||||
<div className={'flex items-center gap-2'}>
|
||||
<Icon name={'pwright'} size={18} />
|
||||
<div>{t('Playwright')}</div>
|
||||
</div>
|
||||
),
|
||||
value: 'playwright',
|
||||
},
|
||||
]}
|
||||
value={activeFramework}
|
||||
onChange={changeFramework}
|
||||
/>
|
||||
<Radio.Group
|
||||
value={mode}
|
||||
onChange={(e) => setMode(e.target.value)}
|
||||
className={'w-full'}
|
||||
>
|
||||
<Radio value={'events'}>{t('Events Only')}</Radio>
|
||||
<Radio value={'test'}>{t('Complete Test')}</Radio>
|
||||
</Radio.Group>
|
||||
<Checkbox
|
||||
value={uiPlayerStore.exportEventsSelection.enabled}
|
||||
onChange={(e) => toggleZoom(e.target.checked)}
|
||||
>
|
||||
{t('Select events on the timeline')}
|
||||
</Checkbox>
|
||||
<div className={'w-full'}>
|
||||
<CodeBlock
|
||||
width={340}
|
||||
height={'calc(100vh - 146px)'}
|
||||
extra={`${events.length} Events`}
|
||||
copy
|
||||
code={eventStr}
|
||||
language={'javascript'}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default UnitStepsModal;
|
||||
export default observer(UnitStepsModal);
|
||||
|
|
|
|||
40
frontend/app/components/Session_/UnitStepsModal/utils.ts
Normal file
40
frontend/app/components/Session_/UnitStepsModal/utils.ts
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
import { Click, Input, Location, TYPES } from "App/types/session/event";
|
||||
|
||||
export const puppeteerEvents = {
|
||||
[TYPES.LOCATION]: (event: Location) => `await page.goto('${event.url}')`,
|
||||
[TYPES.CLICK]: (event: Click) =>
|
||||
`await page.locator('${
|
||||
event.selector.length ? event.selector : event.label
|
||||
}').click()`,
|
||||
[TYPES.INPUT]: (event: Input) =>
|
||||
`await page.locator('${event.label}').type('Test Input')`,
|
||||
screen: (width: number, height: number) =>
|
||||
`await page.setViewport({width: ${width}, height: ${height})`,
|
||||
testIntro: (pageTitle: string, firstUrl: string) => `describe('${pageTitle}', () => {\n it('Navigates through ${firstUrl}', async () => {`,
|
||||
testOutro: () => ` })\n})`,
|
||||
};
|
||||
export const cypressEvents = {
|
||||
[TYPES.LOCATION]: (event: Location) => `cy.visit('${event.url}')`,
|
||||
[TYPES.CLICK]: (event: Click) =>
|
||||
`cy.get('${
|
||||
event.selector.length ? event.selector : event.label
|
||||
}').click()`,
|
||||
[TYPES.INPUT]: (event: Input) =>
|
||||
`cy.get('${event.label}').type('Test Input')`,
|
||||
screen: (width: number, height: number) => `cy.viewport(${width}, ${height})`,
|
||||
testIntro: (pageTitle: string, firstUrl: string) => `describe('${pageTitle}', () => {\n it('Navigates through ${firstUrl}', () => {`,
|
||||
testOutro: () => ` })\n})`,
|
||||
};
|
||||
export const playWrightEvents = {
|
||||
[TYPES.LOCATION]: (event: Location) => `await page.goto('${event.url}')`,
|
||||
[TYPES.CLICK]: (event: Click) =>
|
||||
event.selector.length
|
||||
? `await page.locator('${event.selector}').click()`
|
||||
: `await page.getByText('${event.label}').click()`,
|
||||
[TYPES.INPUT]: (event: Input) =>
|
||||
`await page.getByLabel('${event.label}').fill('Test Input')`,
|
||||
screen: (width: number, height: number) =>
|
||||
`await page.setViewport({width: ${width}, height: ${height})`,
|
||||
testIntro: (pageTitle: string, firstUrl: string) => `test.describe('${pageTitle}', () => {\n test('Navigates through ${firstUrl}', async () => {`,
|
||||
testOutro: () => ` })\n})`,
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue