Merge pull request #977 from openreplay/api-v1.10.0

CI changes
This commit is contained in:
Kraiem Taha Yassine 2023-02-14 15:53:32 +01:00 committed by GitHub
commit a986b8b82f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
30 changed files with 878 additions and 1631 deletions

141
.github/workflows/alerts-ee.yaml vendored Normal file
View file

@ -0,0 +1,141 @@
# This action will push the alerts changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false'
required: false
default: 'false'
push:
branches:
- api-v1.10.0
paths:
- ee/api/**
- api/**
name: Build and Deploy Alerts EE
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- name: Docker login
run: |
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
# Caching docker images
- uses: satackey/action-docker-layer-caching@v0.0.11
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
ENVIRONMENT: staging
run: |
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
cd api
PUSH_IMAGE=0 bash -x ./build_alerts.sh ee
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("alerts")
for image in ${images[*]};do
./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
err_code=$?
[[ $err_code -ne 0 ]] && {
exit $err_code
}
} && {
echo "Skipping Security Checks"
}
images=("alerts")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Deploy to kubernetes
run: |
cd scripts/helmcharts/
## Update secerts
sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml
sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml
# Update changed image tag
sed -i "/alerts/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mv openreplay/charts/{ingress-nginx,alerts,quickwit} /tmp
rm -rf openreplay/charts/*
mv /tmp/{ingress-nginx,alerts,quickwit} openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
- name: Alert slack
if: ${{ failure() }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: ee
SLACK_TITLE: "Failed ${{ github.workflow }}"
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
SLACK_USERNAME: "OR Bot"
SLACK_MESSAGE: 'Build failed :bomb:'
# - name: Debug Job
# # if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}-ee
# ENVIRONMENT: staging

137
.github/workflows/alerts.yaml vendored Normal file
View file

@ -0,0 +1,137 @@
# This action will push the alerts changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false'
required: false
default: 'false'
push:
branches:
- api-v1.10.0
paths:
- api/**
name: Build and Deploy Alerts
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- name: Docker login
run: |
docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
# Caching docker images
- uses: satackey/action-docker-layer-caching@v0.0.11
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- name: Building and Pushing Alerts image
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
run: |
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
cd api
PUSH_IMAGE=0 bash -x ./build_alerts.sh
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("alerts")
for image in ${images[*]};do
./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
err_code=$?
[[ $err_code -ne 0 ]] && {
exit $err_code
}
} && {
echo "Skipping Security Checks"
}
images=("alerts")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
tag: ${image_array[1]}
EOF
done
- name: Deploy to kubernetes
run: |
cd scripts/helmcharts/
## Update secerts
sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml
# Update changed image tag
sed -i "/alerts/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mv openreplay/charts/{ingress-nginx,alerts,quickwit} /tmp
rm -rf openreplay/charts/*
mv /tmp/{ingress-nginx,alerts,quickwit} openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f -
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
- name: Alert slack
if: ${{ failure() }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: foss
SLACK_TITLE: "Failed ${{ github.workflow }}"
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
SLACK_USERNAME: "OR Bot"
SLACK_MESSAGE: 'Build failed :bomb:'
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}
# ENVIRONMENT: staging

View file

@ -8,7 +8,7 @@ on:
default: 'false'
push:
branches:
- dev
- api-v1.10.0
paths:
- ee/api/**
- api/**
@ -44,7 +44,7 @@ jobs:
continue-on-error: true
- name: Building and Pusing api image
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
@ -56,7 +56,7 @@ jobs:
PUSH_IMAGE=0 bash -x ./build.sh ee
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("chalice" "alerts")
images=("chalice")
for image in ${images[*]};do
./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
@ -67,7 +67,7 @@ jobs:
} && {
echo "Skipping Security Checks"
}
images=("chalice" "alerts")
images=("chalice")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
@ -110,9 +110,9 @@ jobs:
cat /tmp/image_override.yaml
# Deploy command
kubectl config set-context --namespace=app --current
kubectl config get-contexts
k_version=$(kubectl version --short 2>/dev/null | awk '/Server/{print $NF}')
mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp
rm -rf openreplay/charts/*
mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}

View file

@ -43,7 +43,7 @@ jobs:
continue-on-error: true
- name: Building and Pusing api image
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}
@ -55,7 +55,7 @@ jobs:
PUSH_IMAGE=0 bash -x ./build.sh
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("chalice" "alerts")
images=("chalice")
for image in ${images[*]};do
./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
@ -66,7 +66,7 @@ jobs:
} && {
echo "Skipping Security Checks"
}
images=("chalice" "alerts")
images=("chalice")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done

141
.github/workflows/crons-ee.yaml vendored Normal file
View file

@ -0,0 +1,141 @@
# This action will push the crons changes to aws
on:
workflow_dispatch:
inputs:
skip_security_checks:
description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false'
required: false
default: 'false'
push:
branches:
- api-v1.10.0
paths:
- ee/api/**
- api/**
name: Build and Deploy Crons EE
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
# We need to diff with old commit
# to see which workers got changed.
fetch-depth: 2
- name: Docker login
run: |
docker login ${{ secrets.EE_REGISTRY_URL }} -u ${{ secrets.EE_DOCKER_USERNAME }} -p "${{ secrets.EE_REGISTRY_TOKEN }}"
- uses: azure/k8s-set-context@v1
with:
method: kubeconfig
kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
# Caching docker images
- uses: satackey/action-docker-layer-caching@v0.0.11
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}-ee
ENVIRONMENT: staging
run: |
skip_security_checks=${{ github.event.inputs.skip_security_checks }}
cd api
PUSH_IMAGE=0 bash -x ./build_crons.sh ee
[[ "x$skip_security_checks" == "xtrue" ]] || {
curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./
images=("crons")
for image in ${images[*]};do
./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG
done
err_code=$?
[[ $err_code -ne 0 ]] && {
exit $err_code
}
} && {
echo "Skipping Security Checks"
}
images=("crons")
for image in ${images[*]};do
docker push $DOCKER_REPO/$image:$IMAGE_TAG
done
- name: Creating old image input
run: |
#
# Create yaml with existing image tags
#
kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\
tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt
echo > /tmp/image_override.yaml
for line in `cat /tmp/image_tag.txt`;
do
image_array=($(echo "$line" | tr ':' '\n'))
cat <<EOF >> /tmp/image_override.yaml
${image_array[0]}:
image:
# We've to strip off the -ee, as helm will append it.
tag: `echo ${image_array[1]} | cut -d '-' -f 1`
EOF
done
- name: Deploy to kubernetes
run: |
cd scripts/helmcharts/
## Update secerts
sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml
sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml
sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml
sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml
sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml
sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml
sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml
# Update changed image tag
sed -i "/crons/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml
cat /tmp/image_override.yaml
# Deploy command
mv openreplay/charts/{ingress-nginx,crons,quickwit} /tmp
rm -rf openreplay/charts/*
mv /tmp/{ingress-nginx,crons,quickwit} openreplay/charts/
helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -
env:
DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# We're not passing -ee flag, because helm will add that.
IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }}
ENVIRONMENT: staging
- name: Alert slack
if: ${{ failure() }}
uses: rtCamp/action-slack-notify@v2
env:
SLACK_CHANNEL: ee
SLACK_TITLE: "Failed ${{ github.workflow }}"
SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff'
SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }}
SLACK_USERNAME: "OR Bot"
SLACK_MESSAGE: 'Build failed :bomb:'
# - name: Debug Job
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# env:
# DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}
# IMAGE_TAG: ${{ github.sha }}-ee
# ENVIRONMENT: staging
#

View file

@ -38,7 +38,7 @@ jobs:
continue-on-error: true
- name: Building and Pusing sourcemaps-reader image
- name: Building and Pushing sourcemaps-reader image
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}

View file

@ -32,7 +32,7 @@ jobs:
kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret.
id: setcontext
- name: Building and Pusing api image
- name: Building and Pushing api image
id: build-image
env:
DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }}

3
api/.gitignore vendored
View file

@ -176,5 +176,4 @@ SUBNETS.json
./chalicelib/.configs
README/*
.local
build_crons.sh
.local

View file

@ -59,11 +59,11 @@ function build_api(){
check_prereq
build_api $environment
echo buil_complete
IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO SIGN_IMAGE=$SIGN_IMAGE SIGN_KEY=$SIGN_KEY bash build_alerts.sh $1
[[ $environment == "ee" ]] && {
cp ../ee/api/build_crons.sh .
IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO SIGN_IMAGE=$SIGN_IMAGE SIGN_KEY=$SIGN_KEY bash build_crons.sh $1
exit_err $?
rm build_crons.sh
} || true
#IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO SIGN_IMAGE=$SIGN_IMAGE SIGN_KEY=$SIGN_KEY bash build_alerts.sh $1
#
#[[ $environment == "ee" ]] && {
# cp ../ee/api/build_crons.sh .
# IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO SIGN_IMAGE=$SIGN_IMAGE SIGN_KEY=$SIGN_KEY bash build_crons.sh $1
# exit_err $?
# rm build_crons.sh
#} || true

View file

@ -43,4 +43,6 @@ function build_crons(){
}
check_prereq
build_crons $1
[[ $1 == "ee" ]] && {
build_crons $1
}

View file

@ -1,15 +1,15 @@
requests==2.28.2
urllib3==1.26.14
boto3==1.26.53
boto3==1.26.70
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.6.0
elasticsearch==8.6.1
jira==3.4.1
fastapi==0.89.1
fastapi==0.92.0
uvicorn[standard]==0.20.0
python-decouple==3.7
pydantic[email]==1.10.4
apscheduler==3.9.1.post1
apscheduler==3.10.0

View file

@ -1,15 +1,15 @@
requests==2.28.2
urllib3==1.26.14
boto3==1.26.53
boto3==1.26.70
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.6.0
elasticsearch==8.6.1
jira==3.4.1
fastapi==0.89.1
fastapi==0.92.0
uvicorn[standard]==0.20.0
python-decouple==3.7
pydantic[email]==1.10.4
apscheduler==3.9.1.post1
apscheduler==3.10.0

View file

@ -23,7 +23,7 @@ public_app, app, app_apikey = get_routers()
@public_app.post('/login', tags=["authentication"])
def login(data: schemas.UserLoginSchema = Body(...)):
async def login(data: schemas.UserLoginSchema = Body(...)):
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
@ -56,27 +56,27 @@ def login(data: schemas.UserLoginSchema = Body(...)):
@app.get('/logout', tags=["login", "logout"])
def logout_user(response: Response, context: schemas.CurrentContext = Depends(OR_context)):
async def logout_user(response: Response, context: schemas.CurrentContext = Depends(OR_context)):
response.delete_cookie("jwt")
return {"data": "success"}
@app.post('/{projectId}/sessions/search', tags=["sessions"])
def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
async def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id)
return {'data': data}
@app.post('/{projectId}/sessions/search/ids', tags=["sessions"])
def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
async def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True)
return {'data': data}
@app.get('/{projectId}/events/search', tags=["events"])
def events_search(projectId: int, q: str,
async def events_search(projectId: int, q: str,
type: Union[schemas.FilterType, schemas.EventType,
schemas.PerformanceEventType, schemas.FetchFilterType,
schemas.GraphqlFilterType, str] = None,
@ -108,7 +108,7 @@ def events_search(projectId: int, q: str,
@app.get('/{projectId}/integrations', tags=["integrations"])
def get_integrations_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_integrations_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = integrations_global.get_global_integrations_status(tenant_id=context.tenant_id,
user_id=context.user_id,
project_id=projectId)
@ -116,7 +116,7 @@ def get_integrations_status(projectId: int, context: schemas.CurrentContext = De
@app.post('/{projectId}/integrations/{integration}/notify/{webhookId}/{source}/{sourceId}', tags=["integrations"])
def integration_notify(projectId: int, integration: str, webhookId: int, source: str, sourceId: str,
async def integration_notify(projectId: int, integration: str, webhookId: int, source: str, sourceId: str,
data: schemas.IntegrationNotificationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
comment = None
@ -140,144 +140,144 @@ def integration_notify(projectId: int, integration: str, webhookId: int, source:
@app.get('/integrations/sentry', tags=["integrations"])
def get_all_sentry(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_sentry(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sentry.get_all(tenant_id=context.tenant_id)}
@app.get('/{projectId}/integrations/sentry', tags=["integrations"])
def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sentry.get(project_id=projectId)}
@app.post('/{projectId}/integrations/sentry', tags=["integrations"])
def add_edit_sentry(projectId: int, data: schemas.SentrySchema = Body(...),
async def add_edit_sentry(projectId: int, data: schemas.SentrySchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
@app.delete('/{projectId}/integrations/sentry', tags=["integrations"])
def delete_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sentry.delete(tenant_id=context.tenant_id, project_id=projectId)}
@app.get('/{projectId}/integrations/sentry/events/{eventId}', tags=["integrations"])
def proxy_sentry(projectId: int, eventId: str, context: schemas.CurrentContext = Depends(OR_context)):
async def proxy_sentry(projectId: int, eventId: str, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sentry.proxy_get(tenant_id=context.tenant_id, project_id=projectId, event_id=eventId)}
@app.get('/integrations/datadog', tags=["integrations"])
def get_all_datadog(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_datadog(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_datadog.get_all(tenant_id=context.tenant_id)}
@app.get('/{projectId}/integrations/datadog', tags=["integrations"])
def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_datadog.get(project_id=projectId)}
@app.post('/{projectId}/integrations/datadog', tags=["integrations"])
def add_edit_datadog(projectId: int, data: schemas.DatadogSchema = Body(...),
async def add_edit_datadog(projectId: int, data: schemas.DatadogSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
@app.delete('/{projectId}/integrations/datadog', tags=["integrations"])
def delete_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_datadog.delete(tenant_id=context.tenant_id, project_id=projectId)}
@app.get('/integrations/stackdriver', tags=["integrations"])
def get_all_stackdriver(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_stackdriver(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_stackdriver.get_all(tenant_id=context.tenant_id)}
@app.get('/{projectId}/integrations/stackdriver', tags=["integrations"])
def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_stackdriver.get(project_id=projectId)}
@app.post('/{projectId}/integrations/stackdriver', tags=["integrations"])
def add_edit_stackdriver(projectId: int, data: schemas.StackdriverSchema = Body(...),
async def add_edit_stackdriver(projectId: int, data: schemas.StackdriverSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
@app.delete('/{projectId}/integrations/stackdriver', tags=["integrations"])
def delete_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_stackdriver.delete(tenant_id=context.tenant_id, project_id=projectId)}
@app.get('/integrations/newrelic', tags=["integrations"])
def get_all_newrelic(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_newrelic(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_newrelic.get_all(tenant_id=context.tenant_id)}
@app.get('/{projectId}/integrations/newrelic', tags=["integrations"])
def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_newrelic.get(project_id=projectId)}
@app.post('/{projectId}/integrations/newrelic', tags=["integrations"])
def add_edit_newrelic(projectId: int, data: schemas.NewrelicSchema = Body(...),
async def add_edit_newrelic(projectId: int, data: schemas.NewrelicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
@app.delete('/{projectId}/integrations/newrelic', tags=["integrations"])
def delete_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_newrelic.delete(tenant_id=context.tenant_id, project_id=projectId)}
@app.get('/integrations/rollbar', tags=["integrations"])
def get_all_rollbar(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_rollbar(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_rollbar.get_all(tenant_id=context.tenant_id)}
@app.get('/{projectId}/integrations/rollbar', tags=["integrations"])
def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_rollbar.get(project_id=projectId)}
@app.post('/{projectId}/integrations/rollbar', tags=["integrations"])
def add_edit_rollbar(projectId: int, data: schemas.RollbarSchema = Body(...),
async def add_edit_rollbar(projectId: int, data: schemas.RollbarSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
@app.delete('/{projectId}/integrations/rollbar', tags=["integrations"])
def delete_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_rollbar.delete(tenant_id=context.tenant_id, project_id=projectId)}
@app.post('/integrations/bugsnag/list_projects', tags=["integrations"])
def list_projects_bugsnag(data: schemas.BugsnagBasicSchema = Body(...),
async def list_projects_bugsnag(data: schemas.BugsnagBasicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_bugsnag.list_projects(auth_token=data.authorizationToken)}
@app.get('/integrations/bugsnag', tags=["integrations"])
def get_all_bugsnag(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_bugsnag(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_bugsnag.get_all(tenant_id=context.tenant_id)}
@app.get('/{projectId}/integrations/bugsnag', tags=["integrations"])
def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_bugsnag.get(project_id=projectId)}
@app.post('/{projectId}/integrations/bugsnag', tags=["integrations"])
def add_edit_bugsnag(projectId: int, data: schemas.BugsnagSchema = Body(...),
async def add_edit_bugsnag(projectId: int, data: schemas.BugsnagSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
@app.delete('/{projectId}/integrations/bugsnag', tags=["integrations"])
def delete_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_bugsnag.delete(tenant_id=context.tenant_id, project_id=projectId)}
@app.post('/integrations/cloudwatch/list_groups', tags=["integrations"])
def list_groups_cloudwatch(data: schemas.CloudwatchBasicSchema = Body(...),
async def list_groups_cloudwatch(data: schemas.CloudwatchBasicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId,
aws_secret_access_key=data.awsSecretAccessKey,
@ -285,77 +285,77 @@ def list_groups_cloudwatch(data: schemas.CloudwatchBasicSchema = Body(...),
@app.get('/integrations/cloudwatch', tags=["integrations"])
def get_all_cloudwatch(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_cloudwatch(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_cloudwatch.get_all(tenant_id=context.tenant_id)}
@app.get('/{projectId}/integrations/cloudwatch', tags=["integrations"])
def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_cloudwatch.get(project_id=projectId)}
@app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"])
def add_edit_cloudwatch(projectId: int, data: schemas.CloudwatchSchema = Body(...),
async def add_edit_cloudwatch(projectId: int, data: schemas.CloudwatchSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
@app.delete('/{projectId}/integrations/cloudwatch', tags=["integrations"])
def delete_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_cloudwatch.delete(tenant_id=context.tenant_id, project_id=projectId)}
@app.get('/integrations/elasticsearch', tags=["integrations"])
def get_all_elasticsearch(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_elasticsearch(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_elasticsearch.get_all(tenant_id=context.tenant_id)}
@app.get('/{projectId}/integrations/elasticsearch', tags=["integrations"])
def get_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_elasticsearch.get(project_id=projectId)}
@app.post('/integrations/elasticsearch/test', tags=["integrations"])
def test_elasticsearch_connection(data: schemas.ElasticsearchBasicSchema = Body(...),
async def test_elasticsearch_connection(data: schemas.ElasticsearchBasicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_elasticsearch.ping(tenant_id=context.tenant_id, **data.dict())}
@app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"])
def add_edit_elasticsearch(projectId: int, data: schemas.ElasticsearchSchema = Body(...),
async def add_edit_elasticsearch(projectId: int, data: schemas.ElasticsearchSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": log_tool_elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
@app.delete('/{projectId}/integrations/elasticsearch', tags=["integrations"])
def delete_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_elasticsearch.delete(tenant_id=context.tenant_id, project_id=projectId)}
@app.get('/integrations/sumologic', tags=["integrations"])
def get_all_sumologic(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_sumologic(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sumologic.get_all(tenant_id=context.tenant_id)}
@app.get('/{projectId}/integrations/sumologic', tags=["integrations"])
def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sumologic.get(project_id=projectId)}
@app.post('/{projectId}/integrations/sumologic', tags=["integrations"])
def add_edit_sumologic(projectId: int, data: schemas.SumologicSchema = Body(...),
async def add_edit_sumologic(projectId: int, data: schemas.SumologicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())}
@app.delete('/{projectId}/integrations/sumologic', tags=["integrations"])
def delete_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": log_tool_sumologic.delete(tenant_id=context.tenant_id, project_id=projectId)}
@app.get('/integrations/issues', tags=["integrations"])
def get_integration_status(context: schemas.CurrentContext = Depends(OR_context)):
async def get_integration_status(context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id,
user_id=context.user_id)
if error is not None and integration is None:
@ -364,7 +364,7 @@ def get_integration_status(context: schemas.CurrentContext = Depends(OR_context)
@app.get('/integrations/jira', tags=["integrations"])
def get_integration_status_jira(context: schemas.CurrentContext = Depends(OR_context)):
async def get_integration_status_jira(context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id,
user_id=context.user_id,
tool=integration_jira_cloud.PROVIDER)
@ -374,7 +374,7 @@ def get_integration_status_jira(context: schemas.CurrentContext = Depends(OR_con
@app.get('/integrations/github', tags=["integrations"])
def get_integration_status_github(context: schemas.CurrentContext = Depends(OR_context)):
async def get_integration_status_github(context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id,
user_id=context.user_id,
tool=integration_github.PROVIDER)
@ -384,7 +384,7 @@ def get_integration_status_github(context: schemas.CurrentContext = Depends(OR_c
@app.post('/integrations/jira', tags=["integrations"])
def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...),
async def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if not data.url.endswith('atlassian.net'):
return {"errors": ["url must be a valid JIRA URL (example.atlassian.net)"]}
@ -397,7 +397,7 @@ def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...),
@app.post('/integrations/github', tags=["integrations"])
def add_edit_github(data: schemas.GithubSchema = Body(...),
async def add_edit_github(data: schemas.GithubSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER,
tenant_id=context.tenant_id,
@ -408,7 +408,7 @@ def add_edit_github(data: schemas.GithubSchema = Body(...),
@app.delete('/integrations/issues', tags=["integrations"])
def delete_default_issue_tracking_tool(context: schemas.CurrentContext = Depends(OR_context)):
async def delete_default_issue_tracking_tool(context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id,
user_id=context.user_id)
if error is not None and integration is None:
@ -417,7 +417,7 @@ def delete_default_issue_tracking_tool(context: schemas.CurrentContext = Depends
@app.delete('/integrations/jira', tags=["integrations"])
def delete_jira_cloud(context: schemas.CurrentContext = Depends(OR_context)):
async def delete_jira_cloud(context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER,
tenant_id=context.tenant_id,
user_id=context.user_id,
@ -428,7 +428,7 @@ def delete_jira_cloud(context: schemas.CurrentContext = Depends(OR_context)):
@app.delete('/integrations/github', tags=["integrations"])
def delete_github(context: schemas.CurrentContext = Depends(OR_context)):
async def delete_github(context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER,
tenant_id=context.tenant_id,
user_id=context.user_id,
@ -439,7 +439,7 @@ def delete_github(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/integrations/issues/list_projects', tags=["integrations"])
def get_all_issue_tracking_projects(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_issue_tracking_projects(context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id,
user_id=context.user_id)
if error is not None:
@ -451,7 +451,7 @@ def get_all_issue_tracking_projects(context: schemas.CurrentContext = Depends(OR
@app.get('/integrations/issues/{integrationProjectId}', tags=["integrations"])
def get_integration_metadata(integrationProjectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_integration_metadata(integrationProjectId: int, context: schemas.CurrentContext = Depends(OR_context)):
error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id,
user_id=context.user_id)
if error is not None:
@ -463,7 +463,7 @@ def get_integration_metadata(integrationProjectId: int, context: schemas.Current
@app.get('/{projectId}/assignments', tags=["assignment"])
def get_all_assignments(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_assignments(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get_all(project_id=projectId, user_id=context.user_id)
return {
'data': data
@ -471,7 +471,7 @@ def get_all_assignments(projectId: int, context: schemas.CurrentContext = Depend
@app.post('/{projectId}/sessions/{sessionId}/assign/projects/{integrationProjectId}', tags=["assignment"])
def create_issue_assignment(projectId: int, sessionId: int, integrationProjectId,
async def create_issue_assignment(projectId: int, sessionId: int, integrationProjectId,
data: schemas.AssignmentSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.create_new_assignment(tenant_id=context.tenant_id, project_id=projectId,
@ -488,12 +488,12 @@ def create_issue_assignment(projectId: int, sessionId: int, integrationProjectId
@app.get('/{projectId}/gdpr', tags=["projects", "gdpr"])
def get_gdpr(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_gdpr(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_gdpr(project_id=projectId)}
@app.post('/{projectId}/gdpr', tags=["projects", "gdpr"])
def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...),
async def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
result = projects.edit_gdpr(project_id=projectId, gdpr=data.dict())
if "errors" in result:
@ -502,43 +502,43 @@ def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...),
@public_app.post('/password/reset-link', tags=["reset password"])
def reset_password_handler(data: schemas.ForgetPasswordPayloadSchema = Body(...)):
async def reset_password_handler(data: schemas.ForgetPasswordPayloadSchema = Body(...)):
if len(data.email) < 5:
return {"errors": ["please provide a valid email address"]}
return reset_password.reset(data=data)
@app.get('/{projectId}/metadata', tags=["metadata"])
def get_metadata(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_metadata(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": metadata.get(project_id=projectId)}
@app.post('/{projectId}/metadata/list', tags=["metadata"])
def add_edit_delete_metadata(projectId: int, data: schemas.MetadataListSchema = Body(...),
async def add_edit_delete_metadata(projectId: int, data: schemas.MetadataListSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return metadata.add_edit_delete(tenant_id=context.tenant_id, project_id=projectId, new_metas=data.list)
@app.post('/{projectId}/metadata', tags=["metadata"])
def add_metadata(projectId: int, data: schemas.MetadataBasicSchema = Body(...),
async def add_metadata(projectId: int, data: schemas.MetadataBasicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return metadata.add(tenant_id=context.tenant_id, project_id=projectId, new_name=data.key)
@app.post('/{projectId}/metadata/{index}', tags=["metadata"])
def edit_metadata(projectId: int, index: int, data: schemas.MetadataBasicSchema = Body(...),
async def edit_metadata(projectId: int, index: int, data: schemas.MetadataBasicSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return metadata.edit(tenant_id=context.tenant_id, project_id=projectId, index=index,
new_name=data.key)
@app.delete('/{projectId}/metadata/{index}', tags=["metadata"])
def delete_metadata(projectId: int, index: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_metadata(projectId: int, index: int, context: schemas.CurrentContext = Depends(OR_context)):
return metadata.delete(tenant_id=context.tenant_id, project_id=projectId, index=index)
@app.get('/{projectId}/metadata/search', tags=["metadata"])
def search_metadata(projectId: int, q: str, key: str, context: schemas.CurrentContext = Depends(OR_context)):
async def search_metadata(projectId: int, q: str, key: str, context: schemas.CurrentContext = Depends(OR_context)):
if len(q) == 0 and len(key) == 0:
return {"data": []}
if len(q) == 0:
@ -549,72 +549,72 @@ def search_metadata(projectId: int, q: str, key: str, context: schemas.CurrentCo
@app.get('/{projectId}/integration/sources', tags=["integrations"])
def search_integrations(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def search_integrations(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return log_tools.search(project_id=projectId)
@app.get('/{projectId}/sample_rate', tags=["projects"])
def get_capture_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_capture_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_capture_status(project_id=projectId)}
@app.post('/{projectId}/sample_rate', tags=["projects"])
def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...),
async def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.update_capture_status(project_id=projectId, changes=data.dict())}
@app.get('/announcements', tags=["announcements"])
def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": announcements.get_all(user_id=context.user_id)}
@app.get('/announcements/view', tags=["announcements"])
def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": announcements.view(user_id=context.user_id)}
@app.get('/show_banner', tags=["banner"])
def errors_merge(context: schemas.CurrentContext = Depends(OR_context)):
async def errors_merge(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": False}
@app.post('/{projectId}/alerts', tags=["alerts"])
def create_alert(projectId: int, data: schemas.AlertSchema = Body(...),
async def create_alert(projectId: int, data: schemas.AlertSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return alerts.create(project_id=projectId, data=data)
@app.get('/{projectId}/alerts', tags=["alerts"])
def get_all_alerts(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_all_alerts(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": alerts.get_all(project_id=projectId)}
@app.get('/{projectId}/alerts/triggers', tags=["alerts", "customMetrics"])
def get_alerts_triggers(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_alerts_triggers(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": alerts.get_predefined_values() \
+ custom_metrics.get_series_for_alert(project_id=projectId, user_id=context.user_id)}
@app.get('/{projectId}/alerts/{alertId}', tags=["alerts"])
def get_alert(projectId: int, alertId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_alert(projectId: int, alertId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": alerts.get(id=alertId)}
@app.post('/{projectId}/alerts/{alertId}', tags=["alerts"])
def update_alert(projectId: int, alertId: int, data: schemas.AlertSchema = Body(...),
async def update_alert(projectId: int, alertId: int, data: schemas.AlertSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return alerts.update(id=alertId, data=data)
@app.delete('/{projectId}/alerts/{alertId}', tags=["alerts"])
def delete_alert(projectId: int, alertId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_alert(projectId: int, alertId: int, context: schemas.CurrentContext = Depends(OR_context)):
return alerts.delete(project_id=projectId, alert_id=alertId)
@app_apikey.put('/{projectKey}/sourcemaps/', tags=["sourcemaps"])
@app_apikey.put('/{projectKey}/sourcemaps', tags=["sourcemaps"])
def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...),
async def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
project_id = projects.get_internal_project_id(projectKey)
if project_id is None:
@ -624,59 +624,59 @@ def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayl
@app.get('/config/weekly_report', tags=["weekly report config"])
def get_weekly_report_config(context: schemas.CurrentContext = Depends(OR_context)):
async def get_weekly_report_config(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": weekly_report.get_config(user_id=context.user_id)}
@app.post('/config/weekly_report', tags=["weekly report config"])
def edit_weekly_report_config(data: schemas.WeeklyReportConfigSchema = Body(...),
async def edit_weekly_report_config(data: schemas.WeeklyReportConfigSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": weekly_report.edit_config(user_id=context.user_id, weekly_report=data.weekly_report)}
@app.get('/{projectId}/issue_types', tags=["issues"])
def issue_types(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def issue_types(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": issues.get_all_types()}
@app.get('/issue_types', tags=["issues"])
def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)):
async def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": issues.get_all_types()}
@app.get('/{projectId}/assist/sessions', tags=["assist"])
def get_sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)):
async def get_sessions_live(projectId: int, userId: str = None, context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws_user_id(projectId, user_id=userId)
return {'data': data}
@app.post('/{projectId}/assist/sessions', tags=["assist"])
def sessions_live(projectId: int, data: schemas.LiveSessionsSearchPayloadSchema = Body(...),
async def sessions_live(projectId: int, data: schemas.LiveSessionsSearchPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_sessions_ws(projectId, body=data)
return {'data': data}
@app.post('/{projectId}/mobile/{sessionId}/urls', tags=['mobile'])
def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignPayloadSchema = Body(...),
async def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data.keys)}
@public_app.post('/signup', tags=['signup'])
@public_app.put('/signup', tags=['signup'])
def signup_handler(data: schemas.UserSignupSchema = Body(...)):
async def signup_handler(data: schemas.UserSignupSchema = Body(...)):
return signup.create_step1(data)
@app.post('/projects', tags=['projects'])
def create_project(data: schemas.CreateProjectSchema = Body(...),
async def create_project(data: schemas.CreateProjectSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return projects.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data)
@app.get('/projects/{projectId}', tags=['projects'])
def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True,
include_gdpr=True)
if data is None:
@ -685,18 +685,18 @@ def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_con
@app.put('/projects/{projectId}', tags=['projects'])
def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(...),
async def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return projects.edit(tenant_id=context.tenant_id, user_id=context.user_id, data=data, project_id=projectId)
@app.delete('/projects/{projectId}', tags=['projects'])
def delete_project(projectId, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_project(projectId, context: schemas.CurrentContext = Depends(OR_context)):
return projects.delete(tenant_id=context.tenant_id, user_id=context.user_id, project_id=projectId)
@app.get('/client/new_api_key', tags=['client'])
def generate_new_tenant_token(context: schemas.CurrentContext = Depends(OR_context)):
async def generate_new_tenant_token(context: schemas.CurrentContext = Depends(OR_context)):
return {
'data': tenants.generate_new_api_key(context.tenant_id)
}
@ -704,28 +704,28 @@ def generate_new_tenant_token(context: schemas.CurrentContext = Depends(OR_conte
@app.post('/client', tags=['client'])
@app.put('/client', tags=['client'])
def edit_client(data: schemas.UpdateTenantSchema = Body(...),
async def edit_client(data: schemas.UpdateTenantSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return tenants.update(tenant_id=context.tenant_id, user_id=context.user_id, data=data)
@app.get('/notifications', tags=['notifications'])
def get_notifications(context: schemas.CurrentContext = Depends(OR_context)):
async def get_notifications(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": notifications.get_all(tenant_id=context.tenant_id, user_id=context.user_id)}
@app.get('/notifications/count', tags=['notifications'])
def get_notifications_count(context: schemas.CurrentContext = Depends(OR_context)):
async def get_notifications_count(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": notifications.get_all_count(tenant_id=context.tenant_id, user_id=context.user_id)}
@app.get('/notifications/{notificationId}/view', tags=['notifications'])
def view_notifications(notificationId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def view_notifications(notificationId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context.user_id)}
@app.post('/notifications/view', tags=['notifications'])
def batch_view_notifications(data: schemas.NotificationsViewSchema,
async def batch_view_notifications(data: schemas.NotificationsViewSchema,
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": notifications.view_notification(notification_ids=data.ids,
startTimestamp=data.startTimestamp,
@ -735,83 +735,83 @@ def batch_view_notifications(data: schemas.NotificationsViewSchema,
@app.get('/boarding', tags=['boarding'])
def get_boarding_state(context: schemas.CurrentContext = Depends(OR_context)):
async def get_boarding_state(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": boarding.get_state(tenant_id=context.tenant_id)}
@app.get('/boarding/installing', tags=['boarding'])
def get_boarding_state_installing(context: schemas.CurrentContext = Depends(OR_context)):
async def get_boarding_state_installing(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": boarding.get_state_installing(tenant_id=context.tenant_id)}
@app.get('/boarding/identify-users', tags=["boarding"])
def get_boarding_state_identify_users(context: schemas.CurrentContext = Depends(OR_context)):
async def get_boarding_state_identify_users(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": boarding.get_state_identify_users(tenant_id=context.tenant_id)}
@app.get('/boarding/manage-users', tags=["boarding"])
def get_boarding_state_manage_users(context: schemas.CurrentContext = Depends(OR_context)):
async def get_boarding_state_manage_users(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": boarding.get_state_manage_users(tenant_id=context.tenant_id)}
@app.get('/boarding/integrations', tags=["boarding"])
def get_boarding_state_integrations(context: schemas.CurrentContext = Depends(OR_context)):
async def get_boarding_state_integrations(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": boarding.get_state_integrations(tenant_id=context.tenant_id)}
@app.get('/integrations/slack/channels', tags=["integrations"])
def get_slack_channels(context: schemas.CurrentContext = Depends(OR_context)):
async def get_slack_channels(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.slack)}
@app.get('/integrations/slack/{webhookId}', tags=["integrations"])
def get_slack_webhook(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_slack_webhook(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": Slack.get_integration(tenant_id=context.tenant_id, integration_id=webhookId)}
@app.delete('/integrations/slack/{webhookId}', tags=["integrations"])
def delete_slack_integration(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_slack_integration(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
return webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)
@app.put('/webhooks', tags=["webhooks"])
def add_edit_webhook(data: schemas.CreateEditWebhookSchema = Body(...),
async def add_edit_webhook(data: schemas.CreateEditWebhookSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.add_edit(tenant_id=context.tenant_id, data=data.dict(), replace_none=True)}
@app.get('/webhooks', tags=["webhooks"])
def get_webhooks(context: schemas.CurrentContext = Depends(OR_context)):
async def get_webhooks(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.get_by_tenant(tenant_id=context.tenant_id, replace_none=True)}
@app.delete('/webhooks/{webhookId}', tags=["webhooks"])
def delete_webhook(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_webhook(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)}
@app.get('/client/members', tags=["client"])
def get_members(context: schemas.CurrentContext = Depends(OR_context)):
async def get_members(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": users.get_members(tenant_id=context.tenant_id)}
@app.get('/client/members/{memberId}/reset', tags=["client"])
def reset_reinvite_member(memberId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def reset_reinvite_member(memberId: int, context: schemas.CurrentContext = Depends(OR_context)):
return users.reset_member(tenant_id=context.tenant_id, editor_id=context.user_id, user_id_to_update=memberId)
@app.delete('/client/members/{memberId}', tags=["client"])
def delete_member(memberId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_member(memberId: int, context: schemas.CurrentContext = Depends(OR_context)):
return users.delete_member(tenant_id=context.tenant_id, user_id=context.user_id, id_to_delete=memberId)
@app.get('/account/new_api_key', tags=["account"])
def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context)):
async def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": users.generate_new_api_key(user_id=context.user_id)}
@app.post('/account/password', tags=["account"])
def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
async def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.change_password(email=context.email, old_password=data.old_password,
new_password=data.new_password, tenant_id=context.tenant_id,
@ -819,34 +819,34 @@ def change_client_password(data: schemas.EditUserPasswordSchema = Body(...),
@app.post('/{projectId}/saved_search', tags=["savedSearch"])
def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...),
async def add_saved_search(projectId: int, data: schemas.SavedSearchSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return saved_search.create(project_id=projectId, user_id=context.user_id, data=data)
@app.get('/{projectId}/saved_search', tags=["savedSearch"])
def get_saved_searches(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_saved_searches(projectId: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": saved_search.get_all(project_id=projectId, user_id=context.user_id, details=True)}
@app.get('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
def get_saved_search(projectId: int, search_id: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_saved_search(projectId: int, search_id: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": saved_search.get(project_id=projectId, search_id=search_id, user_id=context.user_id)}
@app.post('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
def update_saved_search(projectId: int, search_id: int, data: schemas.SavedSearchSchema = Body(...),
async def update_saved_search(projectId: int, search_id: int, data: schemas.SavedSearchSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": saved_search.update(user_id=context.user_id, search_id=search_id, data=data, project_id=projectId)}
@app.delete('/{projectId}/saved_search/{search_id}', tags=["savedSearch"])
def delete_saved_search(projectId: int, search_id: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_saved_search(projectId: int, search_id: int, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": saved_search.delete(project_id=projectId, user_id=context.user_id, search_id=search_id)}
@app.get('/limits', tags=['accounts'])
def get_limits(context: schemas.CurrentContext = Depends(OR_context)):
async def get_limits(context: schemas.CurrentContext = Depends(OR_context)):
return {
'data': {
"teamMember": -1,
@ -856,12 +856,12 @@ def get_limits(context: schemas.CurrentContext = Depends(OR_context)):
@app.get('/integrations/msteams/channels', tags=["integrations"])
def get_msteams_channels(context: schemas.CurrentContext = Depends(OR_context)):
async def get_msteams_channels(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.msteams)}
@app.post('/integrations/msteams', tags=['integrations'])
def add_msteams_integration(data: schemas.AddCollaborationSchema,
async def add_msteams_integration(data: schemas.AddCollaborationSchema,
context: schemas.CurrentContext = Depends(OR_context)):
n = MSTeams.add(tenant_id=context.tenant_id, data=data)
if n is None:
@ -873,7 +873,7 @@ def add_msteams_integration(data: schemas.AddCollaborationSchema,
@app.post('/integrations/msteams/{webhookId}', tags=['integrations'])
def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborationSchema = Body(...),
async def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.url) > 0:
old = MSTeams.get_integration(tenant_id=context.tenant_id, integration_id=webhookId)
@ -890,12 +890,12 @@ def edit_msteams_integration(webhookId: int, data: schemas.EditCollaborationSche
@app.delete('/integrations/msteams/{webhookId}', tags=["integrations"])
def delete_msteams_integration(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_msteams_integration(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)):
return webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)
@public_app.get('/general_stats', tags=["private"], include_in_schema=False)
def get_general_stats():
async def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}
@ -903,6 +903,6 @@ def get_general_stats():
@public_app.post('/', tags=["health"])
@public_app.put('/', tags=["health"])
@public_app.delete('/', tags=["health"])
def health_check():
async def health_check():
return {"data": {"stage": f"live {config('version_number', default='')}",
"internalCrons": config("LOCAL_CRONS", default=False, cast=bool)}}

View file

@ -20,7 +20,7 @@ public_app, app, app_apikey = get_routers()
@public_app.get('/signup', tags=['signup'])
def get_all_signup():
async def get_all_signup():
return {"data": {"tenants": tenants.tenants_exists(),
"sso": None,
"ssoProvider": None,
@ -28,7 +28,7 @@ def get_all_signup():
@app.get('/account', tags=['accounts'])
def get_account(context: schemas.CurrentContext = Depends(OR_context)):
async def get_account(context: schemas.CurrentContext = Depends(OR_context)):
r = users.get(tenant_id=context.tenant_id, user_id=context.user_id)
t = tenants.get_by_tenant_id(context.tenant_id)
if t is not None:
@ -46,14 +46,14 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)):
@app.post('/account', tags=["account"])
def edit_account(data: schemas.EditUserSchema = Body(...),
async def edit_account(data: schemas.EditUserSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data,
editor_id=context.user_id)
@app.get('/projects/limit', tags=['projects'])
def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
async def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": {
"current": projects.count_by_tenant(tenant_id=context.tenant_id),
"remaining": -1
@ -62,7 +62,7 @@ def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
@app.post('/integrations/slack', tags=['integrations'])
@app.put('/integrations/slack', tags=['integrations'])
def add_slack_integration(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)):
async def add_slack_integration(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)):
n = Slack.add(tenant_id=context.tenant_id, data=data)
if n is None:
return {
@ -72,7 +72,7 @@ def add_slack_integration(data: schemas.AddCollaborationSchema, context: schemas
@app.post('/integrations/slack/{integrationId}', tags=['integrations'])
def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...),
async def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.url) > 0:
old = Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId)
@ -89,14 +89,14 @@ def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSc
@app.post('/client/members', tags=["client"])
def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
async def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(),
background_tasks=background_tasks)
@public_app.get('/users/invitation', tags=['users'])
def process_invitation_link(token: str):
async def process_invitation_link(token: str):
if token is None or len(token) < 64:
return {"errors": ["please provide a valid invitation"]}
user = users.get_by_invitation_token(token)
@ -113,7 +113,7 @@ def process_invitation_link(token: str):
@public_app.post('/password/reset', tags=["users"])
def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)):
async def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)):
if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8:
return {"errors": ["please provide a valid invitation & pass"]}
user = users.get_by_invitation_token(token=data.invitation, pass_token=data.passphrase)
@ -126,14 +126,14 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
@app.put('/client/members/{memberId}', tags=["client"])
def edit_member(memberId: int, data: schemas.EditMemberSchema,
async def edit_member(memberId: int, data: schemas.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
user_id_to_update=memberId)
@app.get('/metadata/session_search', tags=["metadata"])
def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None,
async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None,
context: schemas.CurrentContext = Depends(OR_context)):
if key is None or value is None or len(value) == 0 and len(key) == 0:
return {"errors": ["please provide a key&value for search"]}
@ -147,13 +147,13 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] =
@app.get('/projects', tags=['projects'])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
async def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,
stack_integrations=True)}
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"])
def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str):
return {"errors": ["session not found"]}
@ -170,7 +170,7 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
@app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"])
def get_error_trace(projectId: int, sessionId: int, errorId: str,
async def get_error_trace(projectId: int, sessionId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data:
@ -181,19 +181,19 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
@app.post('/{projectId}/errors/search', tags=['errors'])
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
async def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": errors.search(data, projectId, user_id=context.user_id)}
@app.get('/{projectId}/errors/stats', tags=['errors'])
def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int,
async def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int,
context: schemas.CurrentContext = Depends(OR_context)):
return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp)
@app.get('/{projectId}/errors/{errorId}', tags=['errors'])
def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
async def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
**{"density24": density24, "density30": density30})
@ -204,7 +204,7 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun
@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'])
def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7),
async def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), density: int = 7,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId,
@ -213,7 +213,7 @@ def errors_get_details_right_column(projectId: int, errorId: str, startDate: int
@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'])
def errors_get_details_sourcemaps(projectId: int, errorId: str,
async def errors_get_details_sourcemaps(projectId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data:
@ -224,7 +224,7 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str,
@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"])
def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
async def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)):
if action == "favorite":
return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId)
@ -241,7 +241,7 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa
@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"])
def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks,
async def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
if data is None:
@ -256,7 +256,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"])
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Replay file not found"]}
if isinstance(sessionId, str):
@ -276,7 +276,7 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"])
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Devtools file not found"]}
if isinstance(sessionId, str):
@ -296,20 +296,20 @@ def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"])
def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...),
async def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"])
def add_remove_favorite_session2(projectId: int, sessionId: int,
async def add_remove_favorite_session2(projectId: int, sessionId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)}
@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"])
def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)):
async def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId,
tenant_id=context.tenant_id,
user_id=context.user_id)
@ -321,7 +321,7 @@ def assign_session(projectId: int, sessionId, context: schemas.CurrentContext =
@app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"])
def assign_session(projectId: int, sessionId: int, issueId: str,
async def assign_session(projectId: int, sessionId: int, issueId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId,
tenant_id=context.tenant_id, user_id=context.user_id)
@ -333,7 +333,7 @@ def assign_session(projectId: int, sessionId: int, issueId: str,
@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"])
def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...),
async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, assignment_id=issueId,
@ -346,7 +346,7 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem
@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
async def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
return {"errors": ["Session not found"]}
@ -360,7 +360,7 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"])
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id)
if "errors" in data:
@ -371,7 +371,7 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo
@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, data=data)
@ -383,28 +383,28 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"])
def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId)
return data
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"])
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
async def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"])
def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
async def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.post('/{projectId}/notes', tags=["sessions", "notes"])
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
user_id=context.user_id, data=data)
@ -414,6 +414,6 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
@app.post('/{projectId}/click_maps/search', tags=["click maps"])
def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...),
async def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)}

View file

@ -729,7 +729,7 @@ def authenticate(email, password, for_change_password=False):
if for_change_password:
return True
r = helper.dict_to_camel_case(r)
if config("enforce_SSO", cast=bool, default=False) and not r["superAdmin"] and helper.is_saml2_available():
if config("enforce_SSO", cast=bool, default=False) and helper.is_saml2_available():
return {"errors": ["must sign-in with SSO, enforced by admin"]}
jwt_iat = change_jwt_iat(r['userId'])

View file

@ -1,18 +1,18 @@
requests==2.28.2
urllib3==1.26.14
boto3==1.26.53
boto3==1.26.70
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.6.0
elasticsearch==8.6.1
jira==3.4.1
fastapi==0.89.1
fastapi==0.92.0
uvicorn[standard]==0.20.0
python-decouple==3.7
pydantic[email]==1.10.4
apscheduler==3.9.1.post1
apscheduler==3.10.0
clickhouse-driver==0.2.5
python-multipart==0.0.5

View file

@ -1,13 +1,13 @@
requests==2.28.2
urllib3==1.26.14
boto3==1.26.53
boto3==1.26.70
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.6.0
elasticsearch==8.6.1
jira==3.4.1
apscheduler==3.9.1.post1
apscheduler==3.10.0
clickhouse-driver==0.2.5

View file

@ -1,19 +1,19 @@
requests==2.28.2
urllib3==1.26.14
boto3==1.26.53
boto3==1.26.70
pyjwt==2.6.0
psycopg2-binary==2.9.5
elasticsearch==8.6.0
elasticsearch==8.6.1
jira==3.4.1
fastapi==0.89.1
fastapi==0.92.0
uvicorn[standard]==0.20.0
python-decouple==3.7
pydantic[email]==1.10.4
apscheduler==3.9.1.post1
apscheduler==3.10.0
clickhouse-driver==0.2.5
python3-saml==1.15.0
python-multipart==0.0.5
python-multipart==0.0.5

View file

@ -24,7 +24,7 @@ public_app, app, app_apikey = get_routers()
@public_app.get('/signup', tags=['signup'])
def get_all_signup():
async def get_all_signup():
return {"data": {"tenants": tenants.tenants_exists(),
"sso": SAML2_helper.is_saml2_available(),
"ssoProvider": SAML2_helper.get_saml2_provider(),
@ -32,7 +32,7 @@ def get_all_signup():
@app.get('/account', tags=['accounts'])
def get_account(context: schemas.CurrentContext = Depends(OR_context)):
async def get_account(context: schemas.CurrentContext = Depends(OR_context)):
r = users.get(tenant_id=context.tenant_id, user_id=context.user_id)
t = tenants.get_by_tenant_id(context.tenant_id)
if t is not None:
@ -51,14 +51,14 @@ def get_account(context: schemas.CurrentContext = Depends(OR_context)):
@app.post('/account', tags=["account"])
def edit_account(data: schemas_ee.EditUserSchema = Body(...),
async def edit_account(data: schemas_ee.EditUserSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data,
editor_id=context.user_id)
@app.get('/projects/limit', tags=['projects'])
def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
async def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": {
"current": projects.count_by_tenant(tenant_id=context.tenant_id),
"remaining": -1
@ -67,7 +67,7 @@ def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
@app.post('/integrations/slack', tags=['integrations'])
@app.put('/integrations/slack', tags=['integrations'])
def add_slack_client(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)):
async def add_slack_client(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)):
n = Slack.add(tenant_id=context.tenant_id, data=data)
if n is None:
return {
@ -77,7 +77,7 @@ def add_slack_client(data: schemas.AddCollaborationSchema, context: schemas.Curr
@app.post('/integrations/slack/{integrationId}', tags=['integrations'])
def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...),
async def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.url) > 0:
old = Slack.get_integration(tenant_id=context.tenant_id, integration_id=integrationId)
@ -94,16 +94,16 @@ def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSc
@app.post('/client/members', tags=["client"])
def add_member(background_tasks: BackgroundTasks, data: schemas_ee.CreateMemberSchema = Body(...),
async def add_member(background_tasks: BackgroundTasks, data: schemas_ee.CreateMemberSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(),
background_tasks=background_tasks)
@public_app.get('/users/invitation', tags=['users'])
def process_invitation_link(token: str, request: Request):
async def process_invitation_link(token: str, request: Request):
if config("enforce_SSO", cast=bool, default=False) and helper.is_saml2_available():
return saml.start_sso(request=request)
return await saml.start_sso(request=request)
if token is None or len(token) < 64:
return {"errors": ["please provide a valid invitation"]}
user = users.get_by_invitation_token(token)
@ -120,7 +120,7 @@ def process_invitation_link(token: str, request: Request):
@public_app.post('/password/reset', tags=["users"])
def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)):
async def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)):
if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8:
return {"errors": ["please provide a valid invitation & pass"]}
user = users.get_by_invitation_token(token=data.invitation, pass_token=data.passphrase)
@ -133,14 +133,14 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema =
@app.put('/client/members/{memberId}', tags=["client"])
def edit_member(memberId: int, data: schemas_ee.EditMemberSchema,
async def edit_member(memberId: int, data: schemas_ee.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data,
user_id_to_update=memberId)
@app.get('/metadata/session_search', tags=["metadata"])
def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None,
async def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None,
context: schemas.CurrentContext = Depends(OR_context)):
if key is None or value is None or len(value) == 0 and len(key) == 0:
return {"errors": ["please provide a key&value for search"]}
@ -158,13 +158,13 @@ def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] =
@app.get('/projects', tags=['projects'])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
async def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,
stack_integrations=True, user_id=context.user_id)}
@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)])
def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks,
context: schemas.CurrentContext = Depends(OR_context)):
if isinstance(sessionId, str):
return {"errors": ["session not found"]}
@ -182,7 +182,7 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba
@app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"],
dependencies=[OR_scope(Permissions.dev_tools)])
def get_error_trace(projectId: int, sessionId: int, errorId: str,
async def get_error_trace(projectId: int, sessionId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data:
@ -193,19 +193,19 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str,
@app.post('/{projectId}/errors/search', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
async def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": errors.search(data, projectId, user_id=context.user_id)}
@app.get('/{projectId}/errors/stats', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int,
async def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int,
context: schemas.CurrentContext = Depends(OR_context)):
return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp)
@app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
async def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24,
density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId,
**{"density24": density24, "density30": density30})
@ -216,7 +216,7 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun
@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7),
async def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), density: int = 7,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId,
@ -225,7 +225,7 @@ def errors_get_details_right_column(projectId: int, errorId: str, startDate: int
@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)])
def errors_get_details_sourcemaps(projectId: int, errorId: str,
async def errors_get_details_sourcemaps(projectId: int, errorId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data:
@ -236,7 +236,7 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str,
@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], dependencies=[OR_scope(Permissions.dev_tools)])
def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
async def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7),
endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)):
if action == "favorite":
return errors_favorite.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId)
@ -253,7 +253,7 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa
@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live)])
def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks,
async def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks,
context: schemas_ee.CurrentContext = Depends(OR_context)):
data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId)
if data is None:
@ -269,7 +269,7 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun
@app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"],
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)])
def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
async def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Replay file not found"]}
if isinstance(sessionId, str):
@ -290,7 +290,7 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str],
@app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"],
dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)])
def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
async def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
context: schemas.CurrentContext = Depends(OR_context)):
not_found = {"errors": ["Devtools file not found"]}
if isinstance(sessionId, str):
@ -310,14 +310,14 @@ def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str],
@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.session_replay)])
def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...),
async def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
@app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"],
dependencies=[OR_scope(Permissions.session_replay)])
def add_remove_favorite_session2(projectId: int, sessionId: int,
async def add_remove_favorite_session2(projectId: int, sessionId: int,
context: schemas_ee.CurrentContext = Depends(OR_context)):
return {
"data": sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId)}
@ -325,7 +325,7 @@ def add_remove_favorite_session2(projectId: int, sessionId: int,
@app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"],
dependencies=[OR_scope(Permissions.session_replay)])
def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)):
async def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId,
tenant_id=context.tenant_id,
user_id=context.user_id)
@ -338,7 +338,7 @@ def assign_session(projectId: int, sessionId, context: schemas.CurrentContext =
@app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"],
dependencies=[OR_scope(Permissions.session_replay)])
def assign_session(projectId: int, sessionId: int, issueId: str,
async def assign_session(projectId: int, sessionId: int, issueId: str,
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId,
tenant_id=context.tenant_id, user_id=context.user_id)
@ -351,7 +351,7 @@ def assign_session(projectId: int, sessionId: int, issueId: str,
@app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"],
dependencies=[OR_scope(Permissions.session_replay)])
def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...),
async def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, assignment_id=issueId,
@ -365,7 +365,7 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schem
@app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
async def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=sessionId):
return {"errors": ["Session not found"]}
@ -380,7 +380,7 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema
@app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId,
session_id=sessionId, user_id=context.user_id)
if "errors" in data:
@ -392,7 +392,7 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo
@app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
async def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, data=data)
@ -405,7 +405,7 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema
@app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
async def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.delete(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId)
return data
@ -413,21 +413,21 @@ def delete_note(projectId: int, noteId: int, context: schemas.CurrentContext = D
@app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"],
dependencies=[OR_scope(Permissions.session_replay)])
def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
async def share_note_to_slack(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.get('/{projectId}/notes/{noteId}/msteams/{webhookId}', tags=["sessions", "notes"])
def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
async def share_note_to_msteams(projectId: int, noteId: int, webhookId: int,
context: schemas.CurrentContext = Depends(OR_context)):
return sessions_notes.share_to_msteams(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id,
note_id=noteId, webhook_id=webhookId)
@app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)])
def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
async def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId,
user_id=context.user_id, data=data)
@ -437,6 +437,6 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...),
@app.post('/{projectId}/click_maps/search', tags=["click maps"], dependencies=[OR_scope(Permissions.session_replay)])
def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...),
async def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)}

View file

@ -14,7 +14,7 @@ public_app, app, app_apikey = get_routers()
@app.get('/client/roles', tags=["client", "roles"])
def get_roles(context: schemas_ee.CurrentContext = Depends(OR_context)):
async def get_roles(context: schemas_ee.CurrentContext = Depends(OR_context)):
return {
'data': roles.get_roles(tenant_id=context.tenant_id)
}
@ -22,7 +22,7 @@ def get_roles(context: schemas_ee.CurrentContext = Depends(OR_context)):
@app.post('/client/roles', tags=["client", "roles"])
@app.put('/client/roles', tags=["client", "roles"])
def add_role(data: schemas_ee.RolePayloadSchema = Body(...), context: schemas_ee.CurrentContext = Depends(OR_context)):
async def add_role(data: schemas_ee.RolePayloadSchema = Body(...), context: schemas_ee.CurrentContext = Depends(OR_context)):
data = roles.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data)
if "errors" in data:
return data
@ -34,7 +34,7 @@ def add_role(data: schemas_ee.RolePayloadSchema = Body(...), context: schemas_ee
@app.post('/client/roles/{roleId}', tags=["client", "roles"])
@app.put('/client/roles/{roleId}', tags=["client", "roles"])
def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...),
async def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
data = roles.update(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId, data=data)
if "errors" in data:
@ -46,7 +46,7 @@ def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...),
@app.delete('/client/roles/{roleId}', tags=["client", "roles"])
def delete_role(roleId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
async def delete_role(roleId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
data = roles.delete(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId)
if "errors" in data:
return data
@ -56,12 +56,12 @@ def delete_role(roleId: int, context: schemas_ee.CurrentContext = Depends(OR_con
@app.get('/assist/credentials', tags=["assist"])
def get_assist_credentials():
async def get_assist_credentials():
return {"data": assist_helper.get_full_config()}
@app.post('/trails', tags=["traces", "trails"])
def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...),
async def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
return {
'data': traces.get_all(tenant_id=context.tenant_id, data=data)
@ -69,12 +69,12 @@ def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...),
@app.post('/trails/actions', tags=["traces", "trails"])
def get_available_trail_actions(context: schemas_ee.CurrentContext = Depends(OR_context)):
async def get_available_trail_actions(context: schemas_ee.CurrentContext = Depends(OR_context)):
return {'data': traces.get_available_actions(tenant_id=context.tenant_id)}
@app.put('/{projectId}/assist/save', tags=["assist"])
def sign_record_for_upload(projectId: int, data: schemas_ee.AssistRecordPayloadSchema = Body(...),
async def sign_record_for_upload(projectId: int, data: schemas_ee.AssistRecordPayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=data.session_id):
return {"errors": ["Session not found"]}
@ -82,7 +82,7 @@ def sign_record_for_upload(projectId: int, data: schemas_ee.AssistRecordPayloadS
@app.put('/{projectId}/assist/save/done', tags=["assist"])
def save_record_after_upload(projectId: int, data: schemas_ee.AssistRecordSavePayloadSchema = Body(...),
async def save_record_after_upload(projectId: int, data: schemas_ee.AssistRecordSavePayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
if not sessions.session_exists(project_id=projectId, session_id=data.session_id):
return {"errors": ["Session not found"]}
@ -90,18 +90,18 @@ def save_record_after_upload(projectId: int, data: schemas_ee.AssistRecordSavePa
@app.post('/{projectId}/assist/records', tags=["assist"])
def search_records(projectId: int, data: schemas_ee.AssistRecordSearchPayloadSchema = Body(...),
async def search_records(projectId: int, data: schemas_ee.AssistRecordSearchPayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
return {"data": assist_records.search_records(project_id=projectId, data=data, context=context)}
@app.get('/{projectId}/assist/records/{recordId}', tags=["assist"])
def get_record(projectId: int, recordId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
async def get_record(projectId: int, recordId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
return {"data": assist_records.get_record(project_id=projectId, record_id=recordId, context=context)}
@app.post('/{projectId}/assist/records/{recordId}', tags=["assist"])
def update_record(projectId: int, recordId: int, data: schemas_ee.AssistRecordUpdatePayloadSchema = Body(...),
async def update_record(projectId: int, recordId: int, data: schemas_ee.AssistRecordUpdatePayloadSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
result = assist_records.update_record(project_id=projectId, record_id=recordId, data=data, context=context)
if "errors" in result:
@ -110,7 +110,7 @@ def update_record(projectId: int, recordId: int, data: schemas_ee.AssistRecordUp
@app.delete('/{projectId}/assist/records/{recordId}', tags=["assist"])
def delete_record(projectId: int, recordId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
async def delete_record(projectId: int, recordId: int, context: schemas_ee.CurrentContext = Depends(OR_context)):
result = assist_records.delete_record(project_id=projectId, record_id=recordId, context=context)
if "errors" in result:
return result
@ -118,7 +118,7 @@ def delete_record(projectId: int, recordId: int, context: schemas_ee.CurrentCont
@app.post('/{projectId}/signals', tags=['signals'])
def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...),
async def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
data = signals.handle_frontend_signals_queued(project_id=projectId, user_id=context.user_id, data=data)
@ -129,6 +129,6 @@ def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...)
@app.post('/{projectId}/dashboard/insights', tags=["insights"])
@app.post('/{projectId}/dashboard/insights', tags=["insights"])
def sessions_search(projectId: int, data: schemas_ee.GetInsightsSchema = Body(...),
async def sessions_search(projectId: int, data: schemas_ee.GetInsightsSchema = Body(...),
context: schemas_ee.CurrentContext = Depends(OR_context)):
return {'data': sessions_insights.fetch_selected(data=data, project_id=projectId)}

File diff suppressed because it is too large Load diff

View file

@ -19,12 +19,12 @@
"homepage": "https://github.com/openreplay/openreplay#readme",
"dependencies": {
"@maxmind/geoip2-node": "^3.5.0",
"@socket.io/redis-adapter": "^8.0.1",
"@socket.io/redis-adapter": "^8.1.0",
"express": "^4.18.2",
"jsonwebtoken": "^9.0.0",
"redis": "^4.5.1",
"socket.io": "^4.5.4",
"ua-parser-js": "^1.0.32",
"redis": "^4.6.4",
"socket.io": "^4.6.0",
"ua-parser-js": "^1.0.33",
"uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.19.0"
}
}

295
peers/package-lock.json generated
View file

@ -10,7 +10,82 @@
"license": "Elastic License 2.0 (ELv2)",
"dependencies": {
"express": "^4.18.2",
"peer": "^v1.0.0-rc.4"
"peer": "^v1.0.0-rc.9"
}
},
"node_modules/@types/body-parser": {
"version": "1.19.2",
"resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz",
"integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==",
"dependencies": {
"@types/connect": "*",
"@types/node": "*"
}
},
"node_modules/@types/connect": {
"version": "3.4.35",
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz",
"integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/express": {
"version": "4.17.17",
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.17.tgz",
"integrity": "sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==",
"dependencies": {
"@types/body-parser": "*",
"@types/express-serve-static-core": "^4.17.33",
"@types/qs": "*",
"@types/serve-static": "*"
}
},
"node_modules/@types/express-serve-static-core": {
"version": "4.17.33",
"resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.33.tgz",
"integrity": "sha512-TPBqmR/HRYI3eC2E5hmiivIzv+bidAfXofM+sbonAGvyDhySGw9/PQZFt2BLOrjUUR++4eJVpx6KnLQK1Fk9tA==",
"dependencies": {
"@types/node": "*",
"@types/qs": "*",
"@types/range-parser": "*"
}
},
"node_modules/@types/mime": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-3.0.1.tgz",
"integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA=="
},
"node_modules/@types/node": {
"version": "18.13.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.13.0.tgz",
"integrity": "sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg=="
},
"node_modules/@types/qs": {
"version": "6.9.7",
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz",
"integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw=="
},
"node_modules/@types/range-parser": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz",
"integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw=="
},
"node_modules/@types/serve-static": {
"version": "1.15.0",
"resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz",
"integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==",
"dependencies": {
"@types/mime": "*",
"@types/node": "*"
}
},
"node_modules/@types/ws": {
"version": "8.5.4",
"resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.4.tgz",
"integrity": "sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/accepts": {
@ -95,22 +170,17 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/camelcase": {
"version": "5.3.1",
"resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
"integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
"engines": {
"node": ">=6"
}
},
"node_modules/cliui": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
"integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
"integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
"dependencies": {
"string-width": "^4.2.0",
"strip-ansi": "^6.0.0",
"wrap-ansi": "^6.2.0"
"strip-ansi": "^6.0.1",
"wrap-ansi": "^7.0.0"
},
"engines": {
"node": ">=12"
}
},
"node_modules/color-convert": {
@ -141,9 +211,9 @@
}
},
"node_modules/content-type": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz",
"integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==",
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"engines": {
"node": ">= 0.6"
}
@ -181,14 +251,6 @@
"ms": "2.0.0"
}
},
"node_modules/decamelize": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
"integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/depd": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz",
@ -224,6 +286,14 @@
"node": ">= 0.8"
}
},
"node_modules/escalade": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
"integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
"engines": {
"node": ">=6"
}
},
"node_modules/escape-html": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
@ -295,18 +365,6 @@
"node": ">= 0.8"
}
},
"node_modules/find-up": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
"integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
"dependencies": {
"locate-path": "^5.0.0",
"path-exists": "^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/forwarded": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz",
@ -337,9 +395,9 @@
}
},
"node_modules/get-intrinsic": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz",
"integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==",
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
"integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==",
"dependencies": {
"function-bind": "^1.1.1",
"has": "^1.0.3",
@ -418,17 +476,6 @@
"node": ">=8"
}
},
"node_modules/locate-path": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
"integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
"dependencies": {
"p-locate": "^4.1.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
@ -502,9 +549,9 @@
}
},
"node_modules/object-inspect": {
"version": "1.12.2",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz",
"integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==",
"version": "1.12.3",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
"integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
@ -520,39 +567,6 @@
"node": ">= 0.8"
}
},
"node_modules/p-limit": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
"integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
"dependencies": {
"p-try": "^2.0.0"
},
"engines": {
"node": ">=6"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/p-locate": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
"integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
"dependencies": {
"p-limit": "^2.2.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/p-try": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
"integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
"engines": {
"node": ">=6"
}
},
"node_modules/parseurl": {
"version": "1.3.3",
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
@ -561,34 +575,32 @@
"node": ">= 0.8"
}
},
"node_modules/path-exists": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
"engines": {
"node": ">=8"
}
},
"node_modules/path-to-regexp": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ=="
},
"node_modules/peer": {
"version": "1.0.0-rc.4",
"resolved": "https://registry.npmjs.org/peer/-/peer-1.0.0-rc.4.tgz",
"integrity": "sha512-xaNIDm3yWR5m8cuijK7jEFAMOWqNJDGSVJ0+Y3qKW5XTNYsNWEdqtg/Btq9eznGxTTeqQZGNw/SxwyrCVdmmDg==",
"version": "1.0.0-rc.9",
"resolved": "https://registry.npmjs.org/peer/-/peer-1.0.0-rc.9.tgz",
"integrity": "sha512-wjt3fWMKxM/lH/1uD5Qs9qinQ1x/aa9br1eZEQuJ2wuBBQrjAcCT85MUuY9PYcyoW5ymyABsDKC3H/q9KmZ3PA==",
"dependencies": {
"@types/express": "^4.17.3",
"@types/ws": "^7.2.3 || ^8.0.0",
"cors": "^2.8.5",
"express": "^4.17.1",
"ws": "^7.2.3",
"yargs": "^15.3.1"
"ws": "^7.2.3 || ^8.0.0",
"yargs": "^17.6.2"
},
"bin": {
"peerjs": "bin/peerjs"
"peerjs": "dist/bin/peerjs.js"
},
"engines": {
"node": ">=14"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/peer"
}
},
"node_modules/proxy-addr": {
@ -647,11 +659,6 @@
"node": ">=0.10.0"
}
},
"node_modules/require-main-filename": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
"integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg=="
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@ -718,11 +725,6 @@
"node": ">= 0.8.0"
}
},
"node_modules/set-blocking": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
"integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw=="
},
"node_modules/setprototypeof": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
@ -817,34 +819,32 @@
"node": ">= 0.8"
}
},
"node_modules/which-module": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",
"integrity": "sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q=="
},
"node_modules/wrap-ansi": {
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
"integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=8"
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/ws": {
"version": "7.5.9",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz",
"integrity": "sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==",
"version": "8.12.1",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.12.1.tgz",
"integrity": "sha512-1qo+M9Ba+xNhPB+YTWUlK6M17brTut5EXbcBaMRN5pH5dFrXz7lzz1ChFSUq3bOUl8yEvSenhHmYUNJxFzdJew==",
"engines": {
"node": ">=8.3.0"
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
@ -856,41 +856,36 @@
}
},
"node_modules/y18n": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz",
"integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ=="
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
"engines": {
"node": ">=10"
}
},
"node_modules/yargs": {
"version": "15.4.1",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz",
"integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==",
"version": "17.6.2",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz",
"integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==",
"dependencies": {
"cliui": "^6.0.0",
"decamelize": "^1.2.0",
"find-up": "^4.1.0",
"get-caller-file": "^2.0.1",
"cliui": "^8.0.1",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"require-main-filename": "^2.0.0",
"set-blocking": "^2.0.0",
"string-width": "^4.2.0",
"which-module": "^2.0.0",
"y18n": "^4.0.0",
"yargs-parser": "^18.1.2"
"string-width": "^4.2.3",
"y18n": "^5.0.5",
"yargs-parser": "^21.1.1"
},
"engines": {
"node": ">=8"
"node": ">=12"
}
},
"node_modules/yargs-parser": {
"version": "18.1.3",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz",
"integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==",
"dependencies": {
"camelcase": "^5.0.0",
"decamelize": "^1.2.0"
},
"version": "21.1.1",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
"integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
"engines": {
"node": ">=6"
"node": ">=12"
}
}
}

View file

@ -19,6 +19,6 @@
"homepage": "https://github.com/openreplay/openreplay#readme",
"dependencies": {
"express": "^4.18.2",
"peer": "^v1.0.0-rc.4"
"peer": "^v1.0.0-rc.9"
}
}

View file

@ -33,4 +33,6 @@ echo $DOCKER_REPO
bash build.sh $@
cd ../api
bash build.sh $@
bash build_alerts.sh $@
bash build_crons.sh $@
}

View file

@ -23,6 +23,8 @@ echo $DOCKER_REPO
tmux select-layout tiled
tmux split-window "cd ../../sourcemap-reader && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build.sh $@"
tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build.sh $@"
tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build_alerts.sh $@"
tmux split-window "cd ../../api && IMAGE_TAG=$IMAGE_TAG DOCKER_REPO=$DOCKER_REPO PUSH_IMAGE=1 bash build_crons.sh $@"
tmux select-layout tiled
}

View file

@ -9,7 +9,7 @@
"version": "1.0.0",
"license": "Elastic License 2.0 (ELv2)",
"dependencies": {
"aws-sdk": "^2.1284.0",
"aws-sdk": "^2.1314.0",
"express": "^4.18.2",
"source-map": "^0.7.4"
}
@ -43,9 +43,9 @@
}
},
"node_modules/aws-sdk": {
"version": "2.1284.0",
"resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1284.0.tgz",
"integrity": "sha512-B9NllAw1kMMPUHpSs4OcUm0xK1el0tNU2qmIVHtRPRbOFT8cQfxy4HF8s2m0ddvMF1ma4tdzB5uNUNcu3c81ag==",
"version": "2.1314.0",
"resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1314.0.tgz",
"integrity": "sha512-2jsfvgtOQ6kRflaicn50ndME4YoIaBhlus/dZCExtWNXeu8ePh+eAtflsYs6aqIiRPKhCBLaqClzahWm7hC0XA==",
"dependencies": {
"buffer": "4.9.2",
"events": "1.1.1",
@ -146,9 +146,9 @@
}
},
"node_modules/content-type": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz",
"integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==",
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"engines": {
"node": ">= 0.6"
}
@ -313,9 +313,9 @@
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
},
"node_modules/get-intrinsic": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz",
"integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==",
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
"integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==",
"dependencies": {
"function-bind": "^1.1.1",
"has": "^1.0.3",
@ -552,9 +552,9 @@
}
},
"node_modules/object-inspect": {
"version": "1.12.2",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz",
"integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==",
"version": "1.12.3",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
"integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}

View file

@ -18,7 +18,7 @@
},
"homepage": "https://github.com/openreplay/openreplay#readme",
"dependencies": {
"aws-sdk": "^2.1298.0",
"aws-sdk": "^2.1314.0",
"express": "^4.18.2",
"source-map": "^0.7.4"
}

View file

@ -12,8 +12,8 @@
"@maxmind/geoip2-node": "^3.5.0",
"express": "^4.18.2",
"jsonwebtoken": "^9.0.0",
"socket.io": "^4.5.4",
"ua-parser-js": "^1.0.32"
"socket.io": "^4.6.0",
"ua-parser-js": "^1.0.33"
}
},
"node_modules/@maxmind/geoip2-node": {
@ -45,9 +45,9 @@
}
},
"node_modules/@types/node": {
"version": "18.11.18",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.18.tgz",
"integrity": "sha512-DHQpWGjyQKSHj3ebjFI/wRKcqQcdR+MoFBygntYOZytCqNfkd2ZC4ARDJ2DQqhjH5p85Nnd3jhUJIXrszFX/JA=="
"version": "18.13.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.13.0.tgz",
"integrity": "sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg=="
},
"node_modules/accepts": {
"version": "1.3.8",
@ -170,9 +170,9 @@
}
},
"node_modules/content-type": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz",
"integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==",
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"engines": {
"node": ">= 0.6"
}
@ -254,9 +254,9 @@
}
},
"node_modules/engine.io": {
"version": "6.2.1",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.2.1.tgz",
"integrity": "sha512-ECceEFcAaNRybd3lsGQKas3ZlMVjN3cyWwMP25D2i0zWfyiytVbTpRPa34qrr+FHddtpBVOmq4H/DCv1O0lZRA==",
"version": "6.4.0",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.4.0.tgz",
"integrity": "sha512-OgxY1c/RuCSeO/rTr8DIFXx76IzUUft86R7/P7MMbbkuzeqJoTNw2lmeD91IyGz41QYleIIjWeMJGgug043sfQ==",
"dependencies": {
"@types/cookie": "^0.4.1",
"@types/cors": "^2.8.12",
@ -267,7 +267,7 @@
"cors": "~2.8.5",
"debug": "~4.3.1",
"engine.io-parser": "~5.0.3",
"ws": "~8.2.3"
"ws": "~8.11.0"
},
"engines": {
"node": ">=10.0.0"
@ -862,15 +862,15 @@
}
},
"node_modules/socket.io": {
"version": "4.5.4",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.5.4.tgz",
"integrity": "sha512-m3GC94iK9MfIEeIBfbhJs5BqFibMtkRk8ZpKwG2QwxV0m/eEhPIV4ara6XCF1LWNAus7z58RodiZlAH71U3EhQ==",
"version": "4.6.0",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.6.0.tgz",
"integrity": "sha512-b65bp6INPk/BMMrIgVvX12x3Q+NqlGqSlTuvKQWt0BUJ3Hyy3JangBl7fEoWZTXbOKlCqNPbQ6MbWgok/km28w==",
"dependencies": {
"accepts": "~1.3.4",
"base64id": "~2.0.0",
"debug": "~4.3.2",
"engine.io": "~6.2.1",
"socket.io-adapter": "~2.4.0",
"engine.io": "~6.4.0",
"socket.io-adapter": "~2.5.2",
"socket.io-parser": "~4.2.1"
},
"engines": {
@ -878,9 +878,12 @@
}
},
"node_modules/socket.io-adapter": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.4.0.tgz",
"integrity": "sha512-W4N+o69rkMEGVuk2D/cvca3uYsvGlMwsySWV447y99gUPghxq42BxqLNMndb+a1mm/5/7NeXVQS7RLa2XyXvYg=="
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-2.5.2.tgz",
"integrity": "sha512-87C3LO/NOMc+eMcpcxUBebGjkpMDkNBS9tf7KJqcDsmL936EChtVva71Dw2q4tQcuVC+hAUy4an2NO/sYXmwRA==",
"dependencies": {
"ws": "~8.11.0"
}
},
"node_modules/socket.io-parser": {
"version": "4.2.2",
@ -984,9 +987,9 @@
}
},
"node_modules/ua-parser-js": {
"version": "1.0.32",
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.32.tgz",
"integrity": "sha512-dXVsz3M4j+5tTiovFVyVqssXBu5HM47//YSOeZ9fQkdDKkfzv2v3PP1jmH6FUyPW+yCSn7aBVK1fGGKNhowdDA==",
"version": "1.0.33",
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.33.tgz",
"integrity": "sha512-RqshF7TPTE0XLYAqmjlu5cLLuGdKrNu9O1KLA/qp39QtbZwuzwv1dT46DZSopoUMsYgXpB3Cv8a03FI8b74oFQ==",
"funding": [
{
"type": "opencollective",
@ -1039,9 +1042,9 @@
}
},
"node_modules/ws": {
"version": "8.2.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz",
"integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==",
"version": "8.11.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.11.0.tgz",
"integrity": "sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==",
"engines": {
"node": ">=10.0.0"
},

View file

@ -21,7 +21,7 @@
"@maxmind/geoip2-node": "^3.5.0",
"express": "^4.18.2",
"jsonwebtoken": "^9.0.0",
"socket.io": "^4.5.4",
"ua-parser-js": "^1.0.32"
"socket.io": "^4.6.0",
"ua-parser-js": "^1.0.33"
}
}