diff --git a/.github/workflows/assist-ee.yaml b/.github/workflows/assist-ee.yaml index e3f03ef5f..44fcb5650 100644 --- a/.github/workflows/assist-ee.yaml +++ b/.github/workflows/assist-ee.yaml @@ -6,10 +6,10 @@ on: - dev - api-* paths: - - "ee/utilities/**" - - "utilities/**" - - "!utilities/.gitignore" - - "!utilities/*-dev.sh" + - "ee/assist/**" + - "assist/**" + - "!assist/.gitignore" + - "!assist/*-dev.sh" name: Build and Deploy Assist EE @@ -44,7 +44,7 @@ jobs: ENVIRONMENT: staging run: | skip_security_checks=${{ github.event.inputs.skip_security_checks }} - cd utilities + cd assist PUSH_IMAGE=0 bash -x ./build.sh ee [[ "x$skip_security_checks" == "xtrue" ]] || { curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ @@ -101,9 +101,9 @@ jobs: cat /tmp/image_override.yaml # Deploy command - mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + mv openreplay/charts/{ingress-nginx,assist,quickwit} /tmp rm -rf openreplay/charts/* - mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + mv /tmp/{ingress-nginx,assist,quickwit} openreplay/charts/ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} diff --git a/.github/workflows/assist.yaml b/.github/workflows/assist.yaml index 03ee1df5f..37582d7d0 100644 --- a/.github/workflows/assist.yaml +++ b/.github/workflows/assist.yaml @@ -6,9 +6,9 @@ on: - dev - api-* paths: - - "utilities/**" - - "!utilities/.gitignore" - - "!utilities/*-dev.sh" + - "assist/**" + - "!assist/.gitignore" + - "!assist/*-dev.sh" name: Build and Deploy Assist @@ -43,7 +43,7 @@ jobs: ENVIRONMENT: staging run: | skip_security_checks=${{ github.event.inputs.skip_security_checks }} - cd utilities + cd assist PUSH_IMAGE=0 bash -x ./build.sh [[ "x$skip_security_checks" == "xtrue" ]] || { curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ @@ -100,9 +100,9 @@ jobs: cat /tmp/image_override.yaml # Deploy command - mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + mv openreplay/charts/{ingress-nginx,assist,quickwit} /tmp rm -rf openreplay/charts/* - mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + mv /tmp/{ingress-nginx,assist,quickwit} openreplay/charts/ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} diff --git a/.github/workflows/peers-ee.yaml b/.github/workflows/peers-ee.yaml index dcd003e93..564c5cf6d 100644 --- a/.github/workflows/peers-ee.yaml +++ b/.github/workflows/peers-ee.yaml @@ -1,6 +1,11 @@ # This action will push the peers changes to aws on: workflow_dispatch: + inputs: + skip_security_checks: + description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' + required: false + default: 'false' push: branches: - dev @@ -11,7 +16,7 @@ on: - "!peers/.gitignore" - "!peers/*-dev.sh" -name: Build and Deploy Peers +name: Build and Deploy Peers EE jobs: deploy: @@ -36,30 +41,98 @@ jobs: kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. id: setcontext - - name: Building and Pushing api image + # Caching docker images + - uses: satackey/action-docker-layer-caching@v0.0.11 + # Ignore the failure of a step and avoid terminating the job. + continue-on-error: true + + + - name: Building and Pushing peers image id: build-image env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} cd peers - PUSH_IMAGE=1 bash build.sh ee + PUSH_IMAGE=0 bash -x ./build.sh ee + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("peers") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("peers") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + # We've to strip off the -ee, as helm will append it. + tag: `echo ${image_array[1]} | cut -d '-' -f 1` + EOF + done + - name: Deploy to kubernetes run: | cd scripts/helmcharts/ - sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.EE_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.EE_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"ee.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app peers + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml + sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/peers/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,peers,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,peers,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: ee + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' + # - name: Debug Job # if: ${{ failure() }} # uses: mxschmitt/action-tmate@v3 diff --git a/.github/workflows/peers.yaml b/.github/workflows/peers.yaml index 2de0ae3ed..ef564ec65 100644 --- a/.github/workflows/peers.yaml +++ b/.github/workflows/peers.yaml @@ -1,6 +1,11 @@ # This action will push the peers changes to aws on: workflow_dispatch: + inputs: + skip_security_checks: + description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' + required: false + default: 'false' push: branches: - dev @@ -35,30 +40,96 @@ jobs: kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. id: setcontext - - name: Building and Pushing api image + # Caching docker images + - uses: satackey/action-docker-layer-caching@v0.0.11 + # Ignore the failure of a step and avoid terminating the job. + continue-on-error: true + + + - name: Building and Pushing peers image id: build-image env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} cd peers - PUSH_IMAGE=1 bash build.sh + PUSH_IMAGE=0 bash -x ./build.sh + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("peers") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("peers") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + tag: ${image_array[1]} + EOF + done + - name: Deploy to kubernetes run: | cd scripts/helmcharts/ + + ## Update secerts sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app peers + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/peers/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,peers,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,peers,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f - env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: foss + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' + # - name: Debug Job # if: ${{ failure() }} # uses: mxschmitt/action-tmate@v3 @@ -66,4 +137,4 @@ jobs: # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} # IMAGE_TAG: ${{ github.sha }} # ENVIRONMENT: staging - # + diff --git a/.github/workflows/sourcemaps-reader.yaml b/.github/workflows/sourcemaps-reader.yaml index f0059da40..5b7c11d01 100644 --- a/.github/workflows/sourcemaps-reader.yaml +++ b/.github/workflows/sourcemaps-reader.yaml @@ -1,4 +1,4 @@ -# This action will push the chalice changes to aws +# This action will push the sourcemapreader changes to aws on: workflow_dispatch: push: @@ -83,13 +83,13 @@ jobs: sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml # Update changed image tag - sed -i "/chalice/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + sed -i "/sourcemapreader/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml cat /tmp/image_override.yaml # Deploy command - mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + mv openreplay/charts/{ingress-nginx,sourcemapreader,quickwit} /tmp rm -rf openreplay/charts/* - mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + mv /tmp/{ingress-nginx,sourcemapreader,quickwit} openreplay/charts/ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f - env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} diff --git a/api/app.py b/api/app.py index 43c3b7945..50cd7342f 100644 --- a/api/app.py +++ b/api/app.py @@ -1,4 +1,5 @@ import logging +from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config @@ -12,9 +13,42 @@ from chalicelib.utils import pg_client from routers import core, core_dynamic from routers.crons import core_crons from routers.crons import core_dynamic_crons -from routers.subs import insights, metrics, v1_api +from routers.subs import insights, metrics, v1_api, health -app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup + logging.info(">>>>> starting up <<<<<") + ap_logger = logging.getLogger('apscheduler') + ap_logger.setLevel(loglevel) + + app.schedule = AsyncIOScheduler() + await pg_client.init() + app.schedule.start() + + for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: + app.schedule.add_job(id=job["func"].__name__, **job) + + ap_logger.info(">Scheduled jobs:") + for job in app.schedule.get_jobs(): + ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + + # App listening + yield + + # Shutdown + logging.info(">>>>> shutting down <<<<<") + app.schedule.shutdown(wait=False) + await pg_client.terminate() + + +app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""), + lifespan=lifespan) app.add_middleware(GZipMiddleware, minimum_size=1000) @@ -51,39 +85,13 @@ app.include_router(core_dynamic.app_apikey) app.include_router(metrics.app) app.include_router(insights.app) app.include_router(v1_api.app_apikey) +app.include_router(health.public_app) +app.include_router(health.app) +app.include_router(health.app_apikey) -loglevel = config("LOGLEVEL", default=logging.INFO) -print(f">Loglevel set to: {loglevel}") -logging.basicConfig(level=loglevel) -ap_logger = logging.getLogger('apscheduler') -ap_logger.setLevel(loglevel) -app.schedule = AsyncIOScheduler() - - -@app.on_event("startup") -async def startup(): - logging.info(">>>>> starting up <<<<<") - await pg_client.init() - app.schedule.start() - - for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: - app.schedule.add_job(id=job["func"].__name__, **job) - - ap_logger.info(">Scheduled jobs:") - for job in app.schedule.get_jobs(): - ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) - - -@app.on_event("shutdown") -async def shutdown(): - logging.info(">>>>> shutting down <<<<<") - app.schedule.shutdown(wait=False) - await pg_client.terminate() - - -@app.get('/private/shutdown', tags=["private"]) -async def stop_server(): - logging.info("Requested shutdown") - await shutdown() - import os, signal - os.kill(1, signal.SIGTERM) +# @app.get('/private/shutdown', tags=["private"]) +# async def stop_server(): +# logging.info("Requested shutdown") +# await shutdown() +# import os, signal +# os.kill(1, signal.SIGTERM) diff --git a/api/app_alerts.py b/api/app_alerts.py index 111bad2a1..02147ef23 100644 --- a/api/app_alerts.py +++ b/api/app_alerts.py @@ -1,33 +1,17 @@ import logging +from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config from fastapi import FastAPI -from chalicelib.utils import pg_client from chalicelib.core import alerts_processor - -app = FastAPI(root_path="/alerts", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) -logging.info("============= ALERTS =============") +from chalicelib.utils import pg_client -@app.get("/") -async def root(): - return {"status": "Running"} - - -app.schedule = AsyncIOScheduler() - -loglevel = config("LOGLEVEL", default=logging.INFO) -print(f">Loglevel set to: {loglevel}") -logging.basicConfig(level=loglevel) -ap_logger = logging.getLogger('apscheduler') -ap_logger.setLevel(loglevel) -app.schedule = AsyncIOScheduler() - - -@app.on_event("startup") -async def startup(): +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup logging.info(">>>>> starting up <<<<<") await pg_client.init() app.schedule.start() @@ -39,24 +23,44 @@ async def startup(): for job in app.schedule.get_jobs(): ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + # App listening + yield -@app.on_event("shutdown") -async def shutdown(): + # Shutdown logging.info(">>>>> shutting down <<<<<") app.schedule.shutdown(wait=False) await pg_client.terminate() -@app.get('/private/shutdown', tags=["private"]) -async def stop_server(): - logging.info("Requested shutdown") - await shutdown() - import os, signal - os.kill(1, signal.SIGTERM) +app = FastAPI(root_path="/alerts", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""), + lifespan=lifespan) +logging.info("============= ALERTS =============") +@app.get("/") +async def root(): + return {"status": "Running"} + + +@app.get("/health") +async def get_health_status(): + return {"data": { + "health": True, + "details": {"version": config("version_number", default="unknown")} + }} + + +app.schedule = AsyncIOScheduler() + +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) +ap_logger = logging.getLogger('apscheduler') +ap_logger.setLevel(loglevel) +app.schedule = AsyncIOScheduler() + if config("LOCAL_DEV", default=False, cast=bool): - @app.get('/private/trigger', tags=["private"]) + @app.get('/trigger', tags=["private"]) async def trigger_main_cron(): logging.info("Triggering main cron") alerts_processor.process() diff --git a/api/chalicelib/core/alerts.py b/api/chalicelib/core/alerts.py index 3c8b00c54..dfa86ed75 100644 --- a/api/chalicelib/core/alerts.py +++ b/api/chalicelib/core/alerts.py @@ -116,7 +116,7 @@ def process_notifications(data): BATCH_SIZE = 200 for t in full.keys(): for i in range(0, len(full[t]), BATCH_SIZE): - notifications_list = full[t][i:i + BATCH_SIZE] + notifications_list = full[t][i:min(i + BATCH_SIZE, len(full[t]))] if notifications_list is None or len(notifications_list) == 0: break diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py new file mode 100644 index 000000000..a9a54977c --- /dev/null +++ b/api/chalicelib/core/health.py @@ -0,0 +1,172 @@ +from urllib.parse import urlparse + +import redis +import requests +from decouple import config + +from chalicelib.utils import pg_client + +if config("LOCAL_DEV", cast=bool, default=False): + HEALTH_ENDPOINTS = { + "alerts": "http://127.0.0.1:8888/metrics", + "assets": "http://127.0.0.1:8888/metrics", + "assist": "http://127.0.0.1:8888/metrics", + "chalice": "http://127.0.0.1:8888/metrics", + "db": "http://127.0.0.1:8888/metrics", + "ender": "http://127.0.0.1:8888/metrics", + "heuristics": "http://127.0.0.1:8888/metrics", + "http": "http://127.0.0.1:8888/metrics", + "ingress-nginx": "http://127.0.0.1:8888/metrics", + "integrations": "http://127.0.0.1:8888/metrics", + "peers": "http://127.0.0.1:8888/metrics", + "quickwit": "http://127.0.0.1:8888/metrics", + "sink": "http://127.0.0.1:8888/metrics", + "sourcemapreader": "http://127.0.0.1:8888/metrics", + "storage": "http://127.0.0.1:8888/metrics", + "utilities": "http://127.0.0.1:8888/metrics" + } + +else: + HEALTH_ENDPOINTS = { + "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/health", + "assets": "http://assets-openreplay.app.svc.cluster.local:8888/metrics", + "assist": "http://assist-openreplay.app.svc.cluster.local:8888/health", + "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", + "db": "http://db-openreplay.app.svc.cluster.local:8888/metrics", + "ender": "http://ender-openreplay.app.svc.cluster.local:8888/metrics", + "heuristics": "http://heuristics-openreplay.app.svc.cluster.local:8888/metrics", + "http": "http://http-openreplay.app.svc.cluster.local:8888/metrics", + "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", + "integrations": "http://integrations-openreplay.app.svc.cluster.local:8888/metrics", + "peers": "http://peers-openreplay.app.svc.cluster.local:8888/health", + "sink": "http://sink-openreplay.app.svc.cluster.local:8888/metrics", + "sourcemapreader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/health", + "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics", + } + + +def __check_database_pg(): + with pg_client.PostgresClient() as cur: + cur.execute("SHOW server_version;") + server_version = cur.fetchone() + cur.execute("SELECT openreplay_version() AS version;") + schema_version = cur.fetchone() + return { + "health": True, + "details": { + "version": server_version["server_version"], + "schema": schema_version["version"] + } + } + + +def __not_supported(): + return {"errors": ["not supported"]} + + +def __always_healthy(): + return { + "health": True, + "details": {} + } + + +def __always_healthy_with_version(): + return { + "health": True, + "details": {"version": config("version_number", default="unknown")} + } + + +def __check_be_service(service_name): + def fn(): + fail_response = { + "health": False, + "details": { + "errors": ["server health-check failed"] + } + } + try: + results = requests.get(HEALTH_ENDPOINTS.get(service_name), timeout=2) + if results.status_code != 200: + print(f"!! issue with the storage-health code:{results.status_code}") + print(results.text) + fail_response["details"]["errors"].append(results.text) + return fail_response + except requests.exceptions.Timeout: + print(f"!! Timeout getting {service_name}-health") + fail_response["details"]["errors"].append("timeout") + return fail_response + except Exception as e: + print("!! Issue getting storage-health response") + print(str(e)) + try: + print(results.text) + fail_response["details"]["errors"].append(results.text) + except: + print("couldn't get response") + fail_response["details"]["errors"].append(str(e)) + return fail_response + return { + "health": True, + "details": {} + } + + return fn + + +def __check_redis(): + fail_response = { + "health": False, + "details": {"errors": ["server health-check failed"]} + } + if config("REDIS_STRING", default=None) is None: + fail_response["details"]["errors"].append("REDIS_STRING not defined in env-vars") + return fail_response + + try: + u = urlparse(config("REDIS_STRING")) + r = redis.Redis(host=u.hostname, port=u.port, socket_timeout=2) + r.ping() + except Exception as e: + print("!! Issue getting redis-health response") + print(str(e)) + fail_response["details"]["errors"].append(str(e)) + return fail_response + + return { + "health": True, + "details": {"version": r.execute_command('INFO')['redis_version']} + } + + +def get_health(): + health_map = { + "databases": { + "postgres": __check_database_pg + }, + "ingestionPipeline": { + "redis": __check_redis + }, + "backendServices": { + "alerts": __check_be_service("alerts"), + "assets": __check_be_service("assets"), + "assist": __check_be_service("assist"), + "chalice": __always_healthy_with_version, + "db": __check_be_service("db"), + "ender": __check_be_service("ender"), + "frontend": __always_healthy, + "heuristics": __check_be_service("heuristics"), + "http": __check_be_service("http"), + "ingress-nginx": __always_healthy, + "integrations": __check_be_service("integrations"), + "peers": __check_be_service("peers"), + "sink": __check_be_service("sink"), + "sourcemapreader": __check_be_service("sourcemapreader"), + "storage": __check_be_service("storage") + } + } + for parent_key in health_map.keys(): + for element_key in health_map[parent_key]: + health_map[parent_key][element_key] = health_map[parent_key][element_key]() + return health_map diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index c95bed903..8f98aac83 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1,10 +1,7 @@ from typing import List import schemas -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ - sessions_devtool, sessions_notes -from chalicelib.utils import errors_helper +from chalicelib.core import events, metadata, projects, performance_event, sessions_favorite from chalicelib.utils import pg_client, helper, metrics_helper from chalicelib.utils import sql_helper as sh @@ -33,89 +30,6 @@ COALESCE((SELECT TRUE AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, - group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - # This function executes the query and return result def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): diff --git a/api/chalicelib/core/sessions_favorite.py b/api/chalicelib/core/sessions_favorite.py index 00228b31f..d3bf5e9b4 100644 --- a/api/chalicelib/core/sessions_favorite.py +++ b/api/chalicelib/core/sessions_favorite.py @@ -1,5 +1,4 @@ import schemas -from chalicelib.core import sessions from chalicelib.utils import pg_client @@ -8,11 +7,14 @@ def add_favorite_session(context: schemas.CurrentContext, project_id, session_id cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) - VALUES (%(userId)s,%(session_id)s);""", + VALUES (%(userId)s,%(session_id)s) + RETURNING session_id;""", {"userId": context.user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True) + row = cur.fetchone() + if row: + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id): @@ -21,11 +23,14 @@ def remove_favorite_session(context: schemas.CurrentContext, project_id, session cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s - AND session_id = %(session_id)s;""", + AND session_id = %(session_id)s + RETURNING session_id;""", {"userId": context.user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True) + row = cur.fetchone() + if row: + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def favorite_session(context: schemas.CurrentContext, project_id, session_id): diff --git a/api/chalicelib/core/sessions_replay.py b/api/chalicelib/core/sessions_replay.py new file mode 100644 index 000000000..94e3cc504 --- /dev/null +++ b/api/chalicelib/core/sessions_replay.py @@ -0,0 +1,186 @@ +import schemas +from chalicelib.core import events, metadata, events_ios, \ + sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes +from chalicelib.utils import errors_helper +from chalicelib.utils import pg_client, helper + + +def __group_metadata(session, project_metadata): + meta = {} + for m in project_metadata.keys(): + if project_metadata[m] is not None and session.get(m) is not None: + meta[project_metadata[m]] = session[m] + session.pop(m) + return meta + + +# for backward compatibility +def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["startTs"], duration=data["duration"]) + + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) + + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_events(project_id, session_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + f"""SELECT session_id, platform, start_ts, duration + FROM public.sessions AS s + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + s_data = cur.fetchone() + if s_data is not None: + s_data = helper.dict_to_camel_case(s_data) + data = {} + if s_data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=s_data["startTs"], duration=s_data["duration"]) + + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + return data + else: + return None diff --git a/api/chalicelib/core/tenants.py b/api/chalicelib/core/tenants.py index 5479178d8..4d95ae491 100644 --- a/api/chalicelib/core/tenants.py +++ b/api/chalicelib/core/tenants.py @@ -68,7 +68,7 @@ def update(tenant_id, user_id, data: schemas.UpdateTenantSchema): return edit_client(tenant_id=tenant_id, changes=changes) -def tenants_exists(): - with pg_client.PostgresClient() as cur: +def tenants_exists(use_pool=True): + with pg_client.PostgresClient(use_pool=use_pool) as cur: cur.execute(f"SELECT EXISTS(SELECT 1 FROM public.tenants)") return cur.fetchone()["exists"] diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 4cfd8b0e3..64ca1719f 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -87,9 +87,10 @@ class PostgresClient: long_query = False unlimited_query = False - def __init__(self, long_query=False, unlimited_query=False): + def __init__(self, long_query=False, unlimited_query=False, use_pool=True): self.long_query = long_query self.unlimited_query = unlimited_query + self.use_pool = use_pool if unlimited_query: long_config = dict(_PG_CONFIG) long_config["application_name"] += "-UNLIMITED" @@ -100,7 +101,7 @@ class PostgresClient: long_config["options"] = f"-c statement_timeout=" \ f"{config('pg_long_timeout', cast=int, default=5 * 60) * 1000}" self.connection = psycopg2.connect(**long_config) - elif not config('PG_POOL', cast=bool, default=True): + elif not use_pool or not config('PG_POOL', cast=bool, default=True): single_config = dict(_PG_CONFIG) single_config["application_name"] += "-NOPOOL" single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=30) * 1000}" @@ -120,11 +121,12 @@ class PostgresClient: try: self.connection.commit() self.cursor.close() - if self.long_query or self.unlimited_query: + if not self.use_pool or self.long_query or self.unlimited_query: self.connection.close() except Exception as error: logging.error("Error while committing/closing PG-connection", error) if str(error) == "connection already closed" \ + and self.use_pool \ and not self.long_query \ and not self.unlimited_query \ and config('PG_POOL', cast=bool, default=True): @@ -134,6 +136,7 @@ class PostgresClient: raise error finally: if config('PG_POOL', cast=bool, default=True) \ + and self.use_pool \ and not self.long_query \ and not self.unlimited_query: postgreSQL_pool.putconn(self.connection) diff --git a/api/entrypoint.sh b/api/entrypoint.sh index e140268ef..401046526 100755 --- a/api/entrypoint.sh +++ b/api/entrypoint.sh @@ -1,3 +1,3 @@ #!/bin/sh -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers +uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --proxy-headers diff --git a/api/entrypoint_alerts.sh b/api/entrypoint_alerts.sh index dedfa102b..9ac93dd60 100755 --- a/api/entrypoint_alerts.sh +++ b/api/entrypoint_alerts.sh @@ -1,3 +1,3 @@ #!/bin/sh export ASSIST_KEY=ignore -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload +uvicorn app:app --host 0.0.0.0 --port 8888 diff --git a/api/env.default b/api/env.default index 78acd001c..12feccf1f 100644 --- a/api/env.default +++ b/api/env.default @@ -52,4 +52,4 @@ PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= PYTHONUNBUFFERED=1 -THUMBNAILS_BUCKET=thumbnails \ No newline at end of file +REDIS_STRING=redis://redis-master.db.svc.cluster.local:6379 \ No newline at end of file diff --git a/api/requirements-alerts.txt b/api/requirements-alerts.txt index b208d28c2..edb644c87 100644 --- a/api/requirements-alerts.txt +++ b/api/requirements-alerts.txt @@ -8,7 +8,7 @@ jira==3.4.1 -fastapi==0.92.0 +fastapi==0.94.1 uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 diff --git a/api/requirements.txt b/api/requirements.txt index 0a058a94f..490a147df 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -8,8 +8,10 @@ jira==3.4.1 -fastapi==0.92.0 +fastapi==0.95.0 uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 apscheduler==3.10.0 + +redis==4.5.1 \ No newline at end of file diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 3389074bf..c6f71e88f 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -6,7 +6,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \ - sessions_favorite, assist, sessions_notes, click_maps + sessions_favorite, assist, sessions_notes, click_maps, sessions_replay from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -145,13 +145,14 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): stack_integrations=True)} -@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) +# for backward compatibility +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - include_fav_viewed=True, group_metadata=True, context=context) + data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -162,6 +163,37 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas } +@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"]) +async def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id, + session_id=sessionId) + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"]) +async def get_session_events(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) + if data is None: + return {"errors": ["session not found"]} + + return { + 'data': data + } + + @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): @@ -239,8 +271,8 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, - full_data=True, include_fav_viewed=True, group_metadata=True, live=False) + data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, + full_data=True, include_fav_viewed=True, group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): diff --git a/api/routers/subs/health.py b/api/routers/subs/health.py new file mode 100644 index 000000000..5e3c10f07 --- /dev/null +++ b/api/routers/subs/health.py @@ -0,0 +1,14 @@ +from chalicelib.core import health, tenants +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + +health_router = public_app + +if tenants.tenants_exists(use_pool=False): + health_router = app + + +@health_router.get('/health', tags=["health-check"]) +def get_global_health_status(): + return {"data": health.get_health()} diff --git a/api/run-alerts-dev.sh b/api/run-alerts-dev.sh index 54db30171..309356133 100755 --- a/api/run-alerts-dev.sh +++ b/api/run-alerts-dev.sh @@ -1,3 +1,3 @@ #!/bin/zsh -uvicorn app_alerts:app --reload \ No newline at end of file +uvicorn app_alerts:app --reload --port 8888 \ No newline at end of file diff --git a/utilities/.dockerignore b/assist/.dockerignore similarity index 100% rename from utilities/.dockerignore rename to assist/.dockerignore diff --git a/utilities/.gitignore b/assist/.gitignore similarity index 100% rename from utilities/.gitignore rename to assist/.gitignore diff --git a/utilities/Dockerfile b/assist/Dockerfile similarity index 97% rename from utilities/Dockerfile rename to assist/Dockerfile index edbaae03c..84b54c906 100644 --- a/utilities/Dockerfile +++ b/assist/Dockerfile @@ -18,4 +18,4 @@ USER 1001 ADD --chown=1001 https://static.openreplay.com/geoip/GeoLite2-Country.mmdb $MAXMINDDB_FILE ENTRYPOINT ["/sbin/tini", "--"] -CMD npm start +CMD npm start \ No newline at end of file diff --git a/utilities/build.sh b/assist/build.sh similarity index 92% rename from utilities/build.sh rename to assist/build.sh index 861d37596..84c6f882c 100644 --- a/utilities/build.sh +++ b/assist/build.sh @@ -35,20 +35,20 @@ update_helm_release() { } function build_api(){ - destination="_utilities" + destination="_assist" [[ $1 == "ee" ]] && { - destination="_utilities_ee" + destination="_assist_ee" } - cp -R ../utilities ../${destination} + cp -R ../assist ../${destination} cd ../${destination} # Copy enterprise code [[ $1 == "ee" ]] && { - cp -rf ../ee/utilities/* ./ + cp -rf ../ee/assist/* ./ } docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/assist:${image_tag} . - cd ../utilities + cd ../assist rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { docker push ${DOCKER_REPO:-'local'}/assist:${image_tag} diff --git a/utilities/package-lock.json b/assist/package-lock.json similarity index 99% rename from utilities/package-lock.json rename to assist/package-lock.json index aba9e43fe..683472320 100644 --- a/utilities/package-lock.json +++ b/assist/package-lock.json @@ -45,9 +45,9 @@ } }, "node_modules/@types/node": { - "version": "18.14.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.1.tgz", - "integrity": "sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==" + "version": "18.14.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", + "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==" }, "node_modules/accepts": { "version": "1.3.8", @@ -987,9 +987,9 @@ } }, "node_modules/ua-parser-js": { - "version": "1.0.33", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.33.tgz", - "integrity": "sha512-RqshF7TPTE0XLYAqmjlu5cLLuGdKrNu9O1KLA/qp39QtbZwuzwv1dT46DZSopoUMsYgXpB3Cv8a03FI8b74oFQ==", + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.34.tgz", + "integrity": "sha512-K9mwJm/DaB6mRLZfw6q8IMXipcrmuT6yfhYmwhAkuh+81sChuYstYA+znlgaflUPaYUa3odxKPKGw6Vw/lANew==", "funding": [ { "type": "opencollective", diff --git a/utilities/package.json b/assist/package.json similarity index 97% rename from utilities/package.json rename to assist/package.json index b06c8cae5..ad9794fea 100644 --- a/utilities/package.json +++ b/assist/package.json @@ -1,6 +1,6 @@ { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", "scripts": { diff --git a/ee/utilities/run-dev.sh b/assist/run-dev.sh similarity index 100% rename from ee/utilities/run-dev.sh rename to assist/run-dev.sh diff --git a/utilities/server.js b/assist/server.js similarity index 81% rename from utilities/server.js rename to assist/server.js index d71aca65d..5eb6c2e16 100644 --- a/utilities/server.js +++ b/assist/server.js @@ -2,6 +2,7 @@ const dumps = require('./utils/HeapSnapshot'); const express = require('express'); const socket = require("./servers/websocket"); const {request_logger} = require("./utils/helper"); +const health = require("./utils/health"); const assert = require('assert').strict; const debug = process.env.debug === "1"; @@ -10,7 +11,7 @@ const HOST = process.env.LISTEN_HOST || '0.0.0.0'; const PORT = process.env.LISTEN_PORT || 9001; assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required'); const P_KEY = process.env.ASSIST_KEY; -const PREFIX = process.env.PREFIX || process.env.prefix || `/assist` +const PREFIX = process.env.PREFIX || process.env.prefix || `/assist`; const wsapp = express(); wsapp.use(express.json()); @@ -27,16 +28,9 @@ heapdump && wsapp.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router); const wsserver = wsapp.listen(PORT, HOST, () => { console.log(`WS App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); + wsapp.enable('trust proxy'); socket.start(wsserver); -module.exports = {wsserver}; - -wsapp.get('/private/shutdown', (req, res) => { - console.log("Requested shutdown"); - res.statusCode = 200; - res.end("ok!"); - process.kill(1, "SIGTERM"); - } -); \ No newline at end of file +module.exports = {wsserver}; \ No newline at end of file diff --git a/utilities/servers/websocket.js b/assist/servers/websocket.js similarity index 96% rename from utilities/servers/websocket.js rename to assist/servers/websocket.js index f5d029bc2..0fdda85f2 100644 --- a/utilities/servers/websocket.js +++ b/assist/servers/websocket.js @@ -26,7 +26,7 @@ const debug = process.env.debug === "1"; const createSocketIOServer = function (server, prefix) { io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] @@ -45,7 +45,22 @@ const respond = function (res, data) { res.setHeader('Content-Type', 'application/json'); res.end(JSON.stringify({"data": data})); } - +const countSessions = async function () { + let count = 0; + try { + const arr = Array.from(io.sockets.adapter.rooms); + const filtered = arr.filter(room => !room[1].has(room[0])); + for (let i of filtered) { + let {projectKey, sessionId} = extractPeerId(i[0]); + if (projectKey !== null && sessionId !== null) { + count++; + } + } + } catch (e) { + console.error(e); + } + return count; +} const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); let filters = extractPayloadFromRequest(req); @@ -360,6 +375,7 @@ module.exports = { socketConnexionTimeout(io); }, + countSessions, handlers: { socketsList, socketsListByProject, diff --git a/utilities/utils/HeapSnapshot.js b/assist/utils/HeapSnapshot.js similarity index 100% rename from utilities/utils/HeapSnapshot.js rename to assist/utils/HeapSnapshot.js diff --git a/utilities/utils/assistHelper.js b/assist/utils/assistHelper.js similarity index 100% rename from utilities/utils/assistHelper.js rename to assist/utils/assistHelper.js diff --git a/utilities/utils/geoIP.js b/assist/utils/geoIP.js similarity index 100% rename from utilities/utils/geoIP.js rename to assist/utils/geoIP.js diff --git a/assist/utils/health.js b/assist/utils/health.js new file mode 100644 index 000000000..d71864e71 --- /dev/null +++ b/assist/utils/health.js @@ -0,0 +1,54 @@ +const express = require('express'); +const socket = require("../servers/websocket"); +const HOST = process.env.LISTEN_HOST || '0.0.0.0'; +const PORT = process.env.HEALTH_PORT || 8888; + + +const {request_logger} = require("./helper"); +const debug = process.env.debug === "1"; +const respond = function (res, data) { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({"data": data})); +} + +const check_health = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + respond(res, { + "health": true, + "details": { + "version": process.env.npm_package_version, + "connectedSessions": await socket.countSessions() + } + }); +} + + +const healthApp = express(); +healthApp.use(express.json()); +healthApp.use(express.urlencoded({extended: true})); +healthApp.use(request_logger("[healthApp]")); +healthApp.get(['/'], (req, res) => { + res.statusCode = 200; + res.end("healthApp ok!"); + } +); +healthApp.get('/health', check_health); +healthApp.get('/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } +); + +const listen_cb = async function () { + console.log(`Health App listening on http://${HOST}:${PORT}`); + console.log('Press Ctrl+C to quit.'); +} + +module.exports = { + healthApp, + PORT, + listen_cb +}; diff --git a/utilities/utils/helper.js b/assist/utils/helper.js similarity index 100% rename from utilities/utils/helper.js rename to assist/utils/helper.js diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 79aec2ade..1e342e8bc 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -215,6 +215,7 @@ Pipfile.lock /chalicelib/core/log_tool_sumologic.py /chalicelib/core/metadata.py /chalicelib/core/mobile.py +/chalicelib/core/sessions.py /chalicelib/core/sessions_assignments.py #exp /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py @@ -264,5 +265,8 @@ Pipfile.lock /app_alerts.py /build_alerts.sh /build_crons.sh +/run-dev.sh +/run-alerts-dev.sh +/routers/subs/health.py /routers/subs/v1_api.py #exp /chalicelib/core/dashboards.py diff --git a/ee/api/app.py b/ee/api/app.py index a1e203005..034d93565 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -1,5 +1,6 @@ import logging import queue +from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config @@ -10,17 +11,54 @@ from starlette import status from starlette.responses import StreamingResponse, JSONResponse from chalicelib.core import traces +from chalicelib.utils import events_queue from chalicelib.utils import helper from chalicelib.utils import pg_client -from chalicelib.utils import events_queue from routers import core, core_dynamic, ee, saml from routers.crons import core_crons from routers.crons import core_dynamic_crons from routers.crons import ee_crons from routers.subs import insights, metrics, v1_api_ee -from routers.subs import v1_api +from routers.subs import v1_api, health -app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup + logging.info(">>>>> starting up <<<<<") + ap_logger = logging.getLogger('apscheduler') + ap_logger.setLevel(loglevel) + + app.schedule = AsyncIOScheduler() + app.queue_system = queue.Queue() + await pg_client.init() + await events_queue.init() + app.schedule.start() + + for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs + traces.cron_jobs + ee_crons.ee_cron_jobs: + app.schedule.add_job(id=job["func"].__name__, **job) + + ap_logger.info(">Scheduled jobs:") + for job in app.schedule.get_jobs(): + ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + + # App listening + yield + + # Shutdown + logging.info(">>>>> shutting down <<<<<") + app.schedule.shutdown(wait=True) + await traces.process_traces_queue() + await events_queue.terminate() + await pg_client.terminate() + + +app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""), + lifespan=lifespan) app.add_middleware(GZipMiddleware, minimum_size=1000) @@ -68,43 +106,6 @@ app.include_router(metrics.app) app.include_router(insights.app) app.include_router(v1_api.app_apikey) app.include_router(v1_api_ee.app_apikey) - -loglevel = config("LOGLEVEL", default=logging.INFO) -print(f">Loglevel set to: {loglevel}") -logging.basicConfig(level=loglevel) -ap_logger = logging.getLogger('apscheduler') -ap_logger.setLevel(loglevel) -app.schedule = AsyncIOScheduler() -app.queue_system = queue.Queue() - - -@app.on_event("startup") -async def startup(): - logging.info(">>>>> starting up <<<<<") - await pg_client.init() - await events_queue.init() - app.schedule.start() - - for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs + traces.cron_jobs + ee_crons.ee_cron_jobs: - app.schedule.add_job(id=job["func"].__name__, **job) - - ap_logger.info(">Scheduled jobs:") - for job in app.schedule.get_jobs(): - ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) - - -@app.on_event("shutdown") -async def shutdown(): - logging.info(">>>>> shutting down <<<<<") - app.schedule.shutdown(wait=True) - await traces.process_traces_queue() - await events_queue.terminate() - await pg_client.terminate() - - -@app.get('/private/shutdown', tags=["private"]) -async def stop_server(): - logging.info("Requested shutdown") - await shutdown() - import os, signal - os.kill(1, signal.SIGTERM) +app.include_router(health.public_app) +app.include_router(health.app) +app.include_router(health.app_apikey) diff --git a/ee/api/chalicelib/core/health.py b/ee/api/chalicelib/core/health.py new file mode 100644 index 000000000..4b95888d1 --- /dev/null +++ b/ee/api/chalicelib/core/health.py @@ -0,0 +1,228 @@ +from urllib.parse import urlparse + +import redis +import requests +# from confluent_kafka.admin import AdminClient +from decouple import config + +from chalicelib.utils import pg_client, ch_client + +if config("LOCAL_DEV", cast=bool, default=False): + HEALTH_ENDPOINTS = { + "alerts": "http://127.0.0.1:8888/metrics", + "assets": "http://127.0.0.1:8888/metrics", + "assist": "http://127.0.0.1:8888/metrics", + "chalice": "http://127.0.0.1:8888/metrics", + "db": "http://127.0.0.1:8888/metrics", + "ender": "http://127.0.0.1:8888/metrics", + "heuristics": "http://127.0.0.1:8888/metrics", + "http": "http://127.0.0.1:8888/metrics", + "ingress-nginx": "http://127.0.0.1:8888/metrics", + "integrations": "http://127.0.0.1:8888/metrics", + "peers": "http://127.0.0.1:8888/metrics", + "quickwit": "http://127.0.0.1:8888/metrics", + "sink": "http://127.0.0.1:8888/metrics", + "sourcemapreader": "http://127.0.0.1:8888/metrics", + "storage": "http://127.0.0.1:8888/metrics", + "utilities": "http://127.0.0.1:8888/metrics" + } + +else: + HEALTH_ENDPOINTS = { + "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/health", + "assets": "http://assets-openreplay.app.svc.cluster.local:8888/metrics", + "assist": "http://assist-openreplay.app.svc.cluster.local:8888/health", + "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", + "db": "http://db-openreplay.app.svc.cluster.local:8888/metrics", + "ender": "http://ender-openreplay.app.svc.cluster.local:8888/metrics", + "heuristics": "http://heuristics-openreplay.app.svc.cluster.local:8888/metrics", + "http": "http://http-openreplay.app.svc.cluster.local:8888/metrics", + "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", + "integrations": "http://integrations-openreplay.app.svc.cluster.local:8888/metrics", + "peers": "http://peers-openreplay.app.svc.cluster.local:8888/health", + "quickwit": "http://quickwit-openreplay.app.svc.cluster.local:8888/metrics", + "sink": "http://sink-openreplay.app.svc.cluster.local:8888/metrics", + "sourcemapreader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/health", + "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics", + } + + +def __check_database_pg(): + with pg_client.PostgresClient() as cur: + cur.execute("SHOW server_version;") + server_version = cur.fetchone() + cur.execute("SELECT openreplay_version() AS version;") + schema_version = cur.fetchone() + return { + "health": True, + "details": { + "version": server_version["server_version"], + "schema": schema_version["version"] + } + } + + +def __not_supported(): + return {"errors": ["not supported"]} + + +def __always_healthy(): + return { + "health": True, + "details": {} + } + + +def __always_healthy_with_version(): + return { + "health": True, + "details": {"version": config("version_number", default="unknown")} + } + + +def __check_be_service(service_name): + def fn(): + fail_response = { + "health": False, + "details": { + "errors": ["server health-check failed"] + } + } + try: + results = requests.get(HEALTH_ENDPOINTS.get(service_name), timeout=2) + if results.status_code != 200: + print(f"!! issue with the storage-health code:{results.status_code}") + print(results.text) + fail_response["details"]["errors"].append(results.text) + return fail_response + except requests.exceptions.Timeout: + print(f"!! Timeout getting {service_name}-health") + fail_response["details"]["errors"].append("timeout") + return fail_response + except Exception as e: + print("!! Issue getting storage-health response") + print(str(e)) + try: + print(results.text) + fail_response["details"]["errors"].append(results.text) + except: + print("couldn't get response") + fail_response["details"]["errors"].append(str(e)) + return fail_response + return { + "health": True, + "details": {} + } + + return fn + + +def __check_redis(): + fail_response = { + "health": False, + "details": {"errors": ["server health-check failed"]} + } + if config("REDIS_STRING", default=None) is None: + fail_response["details"]["errors"].append("REDIS_STRING not defined in env-vars") + return fail_response + + try: + u = urlparse(config("REDIS_STRING")) + r = redis.Redis(host=u.hostname, port=u.port, socket_timeout=2) + r.ping() + except Exception as e: + print("!! Issue getting redis-health response") + print(str(e)) + fail_response["details"]["errors"].append(str(e)) + return fail_response + + return { + "health": True, + "details": {"version": r.execute_command('INFO')['redis_version']} + } + + +def get_health(): + health_map = { + "databases": { + "postgres": __check_database_pg, + "clickhouse": __check_database_ch + }, + "ingestionPipeline": { + "redis": __check_redis, + # "kafka": __check_kafka + "kafka": __always_healthy + }, + "backendServices": { + "alerts": __check_be_service("alerts"), + "assets": __check_be_service("assets"), + "assist": __check_be_service("assist"), + "chalice": __always_healthy_with_version, + "db": __check_be_service("db"), + "ender": __check_be_service("ender"), + "frontend": __always_healthy, + "heuristics": __check_be_service("heuristics"), + "http": __check_be_service("http"), + "ingress-nginx": __always_healthy, + "integrations": __check_be_service("integrations"), + "peers": __check_be_service("peers"), + "quickwit": __check_be_service("quickwit"), + "sink": __check_be_service("sink"), + "sourcemapreader": __check_be_service("sourcemapreader"), + "storage": __check_be_service("storage") + } + } + for parent_key in health_map.keys(): + for element_key in health_map[parent_key]: + health_map[parent_key][element_key] = health_map[parent_key][element_key]() + return health_map + + +def __check_database_ch(): + errors = {} + with ch_client.ClickHouseClient() as ch: + server_version = ch.execute("SELECT version() AS server_version;") + schema_version = ch.execute("""SELECT 1 + FROM system.functions + WHERE name = 'openreplay_version';""") + if len(schema_version) > 0: + schema_version = ch.execute("SELECT openreplay_version()() AS version;") + schema_version = schema_version[0]["version"] + else: + schema_version = "unknown" + errors = {"errors": ["clickhouse schema is outdated"]} + return { + "health": True, + "details": { + "version": server_version[0]["server_version"], + "schema": schema_version, + **errors + } + } + + +# def __check_kafka(): +# fail_response = { +# "health": False, +# "details": {"errors": ["server health-check failed"]} +# } +# if config("KAFKA_SERVERS", default=None) is None: +# fail_response["details"]["errors"].append("KAFKA_SERVERS not defined in env-vars") +# return fail_response +# +# try: +# a = AdminClient({'bootstrap.servers': config("KAFKA_SERVERS"), "socket.connection.setup.timeout.ms": 3000}) +# topics = a.list_topics().topics +# if not topics: +# raise Exception('topics not found') +# +# except Exception as e: +# print("!! Issue getting kafka-health response") +# print(str(e)) +# fail_response["details"]["errors"].append(str(e)) +# return fail_response +# +# return { +# "health": True, +# "details": {} +# } diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py deleted file mode 100644 index 6d92c3954..000000000 --- a/ee/api/chalicelib/core/sessions.py +++ /dev/null @@ -1,1215 +0,0 @@ -from typing import List - -import schemas -import schemas_ee -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ - sessions_devtool, sessions_notes -from chalicelib.utils import errors_helper -from chalicelib.utils import pg_client, helper, metrics_helper -from chalicelib.utils import sql_helper as sh - -SESSION_PROJECTION_COLS = """s.project_id, -s.session_id::text AS session_id, -s.user_uuid, -s.user_id, -s.user_os, -s.user_browser, -s.user_device, -s.user_device_type, -s.user_country, -s.start_ts, -s.duration, -s.events_count, -s.pages_count, -s.errors_count, -s.user_anonymous_id, -s.platform, -s.issue_score, -to_jsonb(s.issue_types) AS issue_types, -favorite_sessions.session_id NOTNULL AS favorite, -COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ - - -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, - include_fav_viewed=False, group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, - encode(file_key,'hex') AS file_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, - context=context) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - -# This function executes the query and return result -def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, - error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): - if data.bookmarked: - data.startDate, data.endDate = sessions_favorite.get_start_end_timestamp(project_id, user_id) - - full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, - favorite_only=data.bookmarked, issue=issue, project_id=project_id, - user_id=user_id) - if data.limit is not None and data.page is not None: - full_args["sessions_limit"] = data.limit - full_args["sessions_limit_s"] = (data.page - 1) * data.limit - full_args["sessions_limit_e"] = data.page * data.limit - else: - full_args["sessions_limit"] = 200 - full_args["sessions_limit_s"] = 1 - full_args["sessions_limit_e"] = 200 - - meta_keys = [] - with pg_client.PostgresClient() as cur: - if errors_only: - main_query = cur.mogrify(f"""SELECT DISTINCT er.error_id, - COALESCE((SELECT TRUE - FROM public.user_viewed_errors AS ve - WHERE er.error_id = ve.error_id - AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed - {query_part};""", full_args) - - elif count_only: - main_query = cur.mogrify(f"""SELECT COUNT(DISTINCT s.session_id) AS count_sessions, - COUNT(DISTINCT s.user_uuid) AS count_users - {query_part};""", full_args) - elif data.group_by_user: - g_sort = "count(full_sessions)" - if data.order is None: - data.order = schemas.SortOrderType.desc.value - else: - data.order = data.order.value - if data.sort is not None and data.sort != 'sessionsCount': - sort = helper.key_to_snake_case(data.sort) - g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" - else: - sort = 'start_ts' - - meta_keys = metadata.get(project_id=project_id) - main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, - COALESCE(JSONB_AGG(users_sessions) - FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT user_id, - count(full_sessions) AS user_sessions_count, - jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, - MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ) AS filtred_sessions - ) AS full_sessions - GROUP BY user_id - ) AS users_sessions;""", - full_args) - elif ids_only: - main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id - {query_part} - ORDER BY s.session_id desc - LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""", - full_args) - else: - if data.order is None: - data.order = schemas.SortOrderType.desc.value - else: - data.order = data.order.value - sort = 'session_id' - if data.sort is not None and data.sort != "session_id": - # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) - sort = helper.key_to_snake_case(data.sort) - meta_keys = metadata.get(project_id=project_id) - main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, - COALESCE(JSONB_AGG(full_sessions) - FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", - full_args) - # print("--------------------") - # print(main_query) - # print("--------------------") - try: - cur.execute(main_query) - except Exception as err: - print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") - print(main_query.decode('UTF-8')) - print("--------- PAYLOAD -----------") - print(data.json()) - print("--------------------") - raise err - if errors_only or ids_only: - return helper.list_to_camel_case(cur.fetchall()) - - sessions = cur.fetchone() - if count_only: - return helper.dict_to_camel_case(sessions) - - total = sessions["count"] - sessions = sessions["sessions"] - - if data.group_by_user: - for i, s in enumerate(sessions): - sessions[i] = {**s.pop("last_session")[0], **s} - sessions[i].pop("rn") - sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \ - if sessions[i][f'metadata_{k["index"]}'] is not None} - else: - for i, s in enumerate(sessions): - sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \ - if sessions[i][f'metadata_{k["index"]}'] is not None} - # if not data.group_by_user and data.sort is not None and data.sort != "session_id": - # sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)], - # reverse=data.order.upper() == "DESC") - return { - 'total': total, - 'sessions': helper.list_to_camel_case(sessions) - } - - -def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int, - view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType, - metric_of: schemas.MetricOfTable, metric_value: List): - step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate, - density=density, factor=1, decimal=True)) - extra_event = None - if metric_of == schemas.MetricOfTable.visited_url: - extra_event = "events.pages" - elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0: - data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, - operator=schemas.SearchEventOperator._is)) - full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, - favorite_only=False, issue=None, project_id=project_id, - user_id=None, extra_event=extra_event) - full_args["step_size"] = step_size - sessions = [] - with pg_client.PostgresClient() as cur: - if metric_type == schemas.MetricType.timeseries: - if view_type == schemas.MetricTimeseriesViewType.line_chart: - main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts - {query_part}) - SELECT generated_timestamp AS timestamp, - COUNT(s) AS count - FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT 1 AS s - FROM full_sessions - WHERE start_ts >= generated_timestamp - AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp;""", full_args) - else: - main_query = cur.mogrify(f"""SELECT count(DISTINCT s.session_id) AS count - {query_part};""", full_args) - - # print("--------------------") - # print(main_query) - # print("--------------------") - try: - cur.execute(main_query) - except Exception as err: - print("--------- SESSIONS-SERIES QUERY EXCEPTION -----------") - print(main_query.decode('UTF-8')) - print("--------- PAYLOAD -----------") - print(data.json()) - print("--------------------") - raise err - if view_type == schemas.MetricTimeseriesViewType.line_chart: - sessions = cur.fetchall() - else: - sessions = cur.fetchone()["count"] - elif metric_type == schemas.MetricType.table: - if isinstance(metric_of, schemas.MetricOfTable): - main_col = "user_id" - extra_col = "" - extra_where = "" - pre_query = "" - distinct_on = "s.session_id" - if metric_of == schemas.MetricOfTable.user_country: - main_col = "user_country" - elif metric_of == schemas.MetricOfTable.user_device: - main_col = "user_device" - elif metric_of == schemas.MetricOfTable.user_browser: - main_col = "user_browser" - elif metric_of == schemas.MetricOfTable.issues: - main_col = "issue" - extra_col = f", UNNEST(s.issue_types) AS {main_col}" - if len(metric_value) > 0: - extra_where = [] - for i in range(len(metric_value)): - arg_name = f"selected_issue_{i}" - extra_where.append(f"{main_col} = %({arg_name})s") - full_args[arg_name] = metric_value[i] - extra_where = f"WHERE ({' OR '.join(extra_where)})" - elif metric_of == schemas.MetricOfTable.visited_url: - main_col = "path" - extra_col = ", path" - distinct_on += ",path" - main_query = cur.mogrify(f"""{pre_query} - SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS values - FROM (SELECT {main_col} AS name, - count(DISTINCT session_id) AS session_count, - ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn - FROM (SELECT * - FROM (SELECT DISTINCT ON({distinct_on}) s.session_id, s.user_uuid, - s.user_id, s.user_os, - s.user_browser, s.user_device, - s.user_device_type, s.user_country, s.issue_types{extra_col} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ) AS full_sessions - {extra_where} - GROUP BY {main_col} - ORDER BY session_count DESC) AS users_sessions;""", - full_args) - # print("--------------------") - # print(main_query) - # print("--------------------") - cur.execute(main_query) - sessions = cur.fetchone() - for s in sessions["values"]: - s.pop("rn") - sessions["values"] = helper.list_to_camel_case(sessions["values"]) - - return sessions - - -def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): - return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, - schemas.EventType.graphql] \ - or event.type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage - ] and (event.source is None or len(event.source) == 0) \ - or event.type in [schemas.EventType.request_details, schemas.EventType.graphql] and ( - event.filters is None or len(event.filters) == 0)) - - -# this function generates the query and return the generated-query with the dict of query arguments -def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue, - project_id, user_id, extra_event=None): - ss_constraints = [] - full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate, - "projectId": project_id, "userId": user_id} - extra_constraints = [ - "s.project_id = %(project_id)s", - "s.duration IS NOT NULL" - ] - extra_from = "" - events_query_part = "" - if len(data.filters) > 0: - meta_keys = None - for i, f in enumerate(data.filters): - if not isinstance(f.value, list): - f.value = [f.value] - filter_type = f.type - f.value = helper.values_for_operator(value=f.value, op=f.operator) - f_k = f"f_value{i}" - full_args = {**full_args, **sh.multi_values(f.value, value_key=f_k)} - op = sh.get_sql_operator(f.operator) \ - if filter_type not in [schemas.FilterType.events_count] else f.operator - is_any = sh.isAny_opreator(f.operator) - is_undefined = sh.isUndefined_operator(f.operator) - if not is_any and not is_undefined and len(f.value) == 0: - continue - is_not = False - if sh.is_negation_operator(f.operator): - is_not = True - if filter_type == schemas.FilterType.user_browser: - if is_any: - extra_constraints.append('s.user_browser IS NOT NULL') - ss_constraints.append('ms.user_browser IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, - value_key=f_k)) - - elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]: - if is_any: - extra_constraints.append('s.user_os IS NOT NULL') - ss_constraints.append('ms.user_os IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - - elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]: - if is_any: - extra_constraints.append('s.user_device IS NOT NULL') - ss_constraints.append('ms.user_device IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - - elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]: - if is_any: - extra_constraints.append('s.user_country IS NOT NULL') - ss_constraints.append('ms.user_country IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, - value_key=f_k)) - - elif filter_type in [schemas.FilterType.utm_source]: - if is_any: - extra_constraints.append('s.utm_source IS NOT NULL') - ss_constraints.append('ms.utm_source IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.utm_source IS NULL') - ss_constraints.append('ms.utm_source IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_medium]: - if is_any: - extra_constraints.append('s.utm_medium IS NOT NULL') - ss_constraints.append('ms.utm_medium IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.utm_medium IS NULL') - ss_constraints.append('ms.utm_medium IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_campaign]: - if is_any: - extra_constraints.append('s.utm_campaign IS NOT NULL') - ss_constraints.append('ms.utm_campaign IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.utm_campaign IS NULL') - ss_constraints.append('ms.utm_campaign IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - - elif filter_type == schemas.FilterType.duration: - if len(f.value) > 0 and f.value[0] is not None: - extra_constraints.append("s.duration >= %(minDuration)s") - ss_constraints.append("ms.duration >= %(minDuration)s") - full_args["minDuration"] = f.value[0] - if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0: - extra_constraints.append("s.duration <= %(maxDuration)s") - ss_constraints.append("ms.duration <= %(maxDuration)s") - full_args["maxDuration"] = f.value[1] - elif filter_type == schemas.FilterType.referrer: - # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" - if is_any: - extra_constraints.append('s.base_referrer IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type == events.EventType.METADATA.ui_type: - # get metadata list only if you need it - if meta_keys is None: - meta_keys = metadata.get(project_id=project_id) - meta_keys = {m["key"]: m["index"] for m in meta_keys} - if f.source in meta_keys.keys(): - if is_any: - extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL") - ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL") - elif is_undefined: - extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NULL") - ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NULL") - else: - extra_constraints.append( - sh.multi_conditions( - f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", - f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions( - f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", - f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - if is_any: - extra_constraints.append('s.user_id IS NOT NULL') - ss_constraints.append('ms.user_id IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.user_id IS NULL') - ss_constraints.append('ms.user_id IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.user_anonymous_id, - schemas.FilterType.user_anonymous_id_ios]: - if is_any: - extra_constraints.append('s.user_anonymous_id IS NOT NULL') - ss_constraints.append('ms.user_anonymous_id IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.user_anonymous_id IS NULL') - ss_constraints.append('ms.user_anonymous_id IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]: - if is_any: - extra_constraints.append('s.rev_id IS NOT NULL') - ss_constraints.append('ms.rev_id IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.rev_id IS NULL') - ss_constraints.append('ms.rev_id IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type == schemas.FilterType.platform: - # op = __ sh.get_sql_operator(f.operator) - extra_constraints.append( - sh.multi_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type == schemas.FilterType.issue: - if is_any: - extra_constraints.append("array_length(s.issue_types, 1) > 0") - ss_constraints.append("array_length(ms.issue_types, 1) > 0") - else: - extra_constraints.append( - sh.multi_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not, - value_key=f_k)) - # search sessions with click_rage on a specific selector - if len(f.filters) > 0 and schemas.IssueType.click_rage in f.value: - for j, sf in enumerate(f.filters): - if sf.operator == schemas.IssueFilterOperator._on_selector: - f_k = f"f_value{i}_{j}" - full_args = {**full_args, **sh.multi_values(sf.value, value_key=f_k)} - extra_constraints += ["mc.timestamp>=%(startDate)s", - "mc.timestamp<=%(endDate)s", - "mis.type='click_rage'", - sh.multi_conditions(f"mc.selector=%({f_k})s", - sf.value, is_not=is_not, - value_key=f_k)] - - extra_from += """INNER JOIN events.clicks AS mc USING(session_id) - INNER JOIN events_common.issues USING (session_id,timestamp) - INNER JOIN public.issues AS mis USING (issue_id)\n""" - - elif filter_type == schemas.FilterType.events_count: - extra_constraints.append( - sh.multi_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - # --------------------------------------------------------------------------- - if len(data.events) > 0: - valid_events_count = 0 - for event in data.events: - is_any = sh.isAny_opreator(event.operator) - if not isinstance(event.value, list): - event.value = [event.value] - if __is_valid_event(is_any=is_any, event=event): - valid_events_count += 1 - events_query_from = [] - event_index = 0 - or_events = data.events_order == schemas.SearchEventOrder._or - # events_joiner = " FULL JOIN " if or_events else " INNER JOIN LATERAL " - events_joiner = " UNION " if or_events else " INNER JOIN LATERAL " - for i, event in enumerate(data.events): - event_type = event.type - is_any = sh.isAny_opreator(event.operator) - if not isinstance(event.value, list): - event.value = [event.value] - if not __is_valid_event(is_any=is_any, event=event): - continue - op = sh.get_sql_operator(event.operator) - is_not = False - if sh.is_negation_operator(event.operator): - is_not = True - op = sh.reverse_sql_operator(op) - if event_index == 0 or or_events: - event_from = "%s INNER JOIN public.sessions AS ms USING (session_id)" - event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s", - "main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s", - "ms.start_ts <= %(endDate)s", "ms.duration IS NOT NULL"] - if favorite_only and not errors_only: - event_from += "INNER JOIN public.user_favorite_sessions AS fs USING(session_id)" - event_where.append("fs.user_id = %(userId)s") - else: - event_from = "%s" - event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s", - "main.session_id=event_0.session_id"] - if data.events_order == schemas.SearchEventOrder._then: - event_where.append(f"event_{event_index - 1}.timestamp <= main.timestamp") - e_k = f"e_value{i}" - s_k = e_k + "_source" - if event.type != schemas.PerformanceEventType.time_between_events: - event.value = helper.values_for_operator(value=event.value, op=event.operator) - full_args = {**full_args, - **sh.multi_values(event.value, value_key=e_k), - **sh.multi_values(event.source, value_key=s_k)} - - if event_type == events.EventType.CLICK.ui_type: - event_from = event_from % f"{events.EventType.CLICK.table} AS main " - if not is_any: - if event.operator == schemas.ClickEventExtraOperator._on_selector: - event_where.append( - sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k)) - else: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value, - value_key=e_k)) - - elif event_type == events.EventType.INPUT.ui_type: - event_from = event_from % f"{events.EventType.INPUT.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value, - value_key=e_k)) - if event.source is not None and len(event.source) > 0: - event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source, - value_key=f"custom{i}")) - full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")} - - elif event_type == events.EventType.LOCATION.ui_type: - event_from = event_from % f"{events.EventType.LOCATION.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.CUSTOM.ui_type: - event_from = event_from % f"{events.EventType.CUSTOM.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value, - value_key=e_k)) - elif event_type == events.EventType.REQUEST.ui_type: - event_from = event_from % f"{events.EventType.REQUEST.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value, - value_key=e_k)) - # elif event_type == events.event_type.GRAPHQL.ui_type: - # event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main " - # if not is_any: - # event_where.append( - # _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value, - # value_key=e_k)) - elif event_type == events.EventType.STATEACTION.ui_type: - event_from = event_from % f"{events.EventType.STATEACTION.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.ERROR.ui_type: - event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)" - event.source = list(set(event.source)) - if not is_any and event.value not in [None, "*", ""]: - event_where.append( - sh.multi_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)", - event.value, value_key=e_k)) - if len(event.source) > 0 and event.source[0] not in [None, "*", ""]: - event_where.append(sh.multi_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k)) - - - # ----- IOS - elif event_type == events.EventType.CLICK_IOS.ui_type: - event_from = event_from % f"{events.EventType.CLICK_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CLICK_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - - elif event_type == events.EventType.INPUT_IOS.ui_type: - event_from = event_from % f"{events.EventType.INPUT_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.INPUT_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - if event.source is not None and len(event.source) > 0: - event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source, - value_key="custom{i}")) - full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")} - elif event_type == events.EventType.VIEW_IOS.ui_type: - event_from = event_from % f"{events.EventType.VIEW_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.VIEW_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.CUSTOM_IOS.ui_type: - event_from = event_from % f"{events.EventType.CUSTOM_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CUSTOM_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.REQUEST_IOS.ui_type: - event_from = event_from % f"{events.EventType.REQUEST_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.ERROR_IOS.ui_type: - event_from = event_from % f"{events.EventType.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)" - if not is_any and event.value not in [None, "*", ""]: - event_where.append( - sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)", - event.value, value_key=e_k)) - elif event_type == schemas.PerformanceEventType.fetch_failed: - event_from = event_from % f"{events.EventType.REQUEST.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", - event.value, value_key=e_k)) - col = performance_event.get_col(event_type) - colname = col["column"] - event_where.append(f"main.{colname} = FALSE") - # elif event_type == schemas.PerformanceEventType.fetch_duration: - # event_from = event_from % f"{events.event_type.REQUEST.table} AS main " - # if not is_any: - # event_where.append( - # _multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", - # event.value, value_key=e_k)) - # col = performance_event.get_col(event_type) - # colname = col["column"] - # tname = "main" - # e_k += "_custom" - # full_args = {**full_args, **_ sh.multiple_values(event.source, value_key=e_k)} - # event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + - # _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", - # event.source, value_key=e_k)) - elif event_type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage - ]: - event_from = event_from % f"{events.EventType.LOCATION.table} AS main " - col = performance_event.get_col(event_type) - colname = col["column"] - tname = "main" - if col.get("extraJoin") is not None: - tname = "ej" - event_from += f" INNER JOIN {col['extraJoin']} AS {tname} USING(session_id)" - event_where += [f"{tname}.timestamp >= main.timestamp", f"{tname}.timestamp >= %(startDate)s", - f"{tname}.timestamp <= %(endDate)s"] - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s", - event.value, value_key=e_k)) - e_k += "_custom" - full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} - - event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + - sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s", - event.source, value_key=e_k)) - elif event_type == schemas.PerformanceEventType.time_between_events: - event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) " - if not isinstance(event.value[0].value, list): - event.value[0].value = [event.value[0].value] - if not isinstance(event.value[1].value, list): - event.value[1].value = [event.value[1].value] - event.value[0].value = helper.values_for_operator(value=event.value[0].value, - op=event.value[0].operator) - event.value[1].value = helper.values_for_operator(value=event.value[1].value, - op=event.value[0].operator) - e_k1 = e_k + "_e1" - e_k2 = e_k + "_e2" - full_args = {**full_args, - **sh.multi_values(event.value[0].value, value_key=e_k1), - **sh.multi_values(event.value[1].value, value_key=e_k2)} - s_op = sh.get_sql_operator(event.value[0].operator) - event_where += ["main2.timestamp >= %(startDate)s", "main2.timestamp <= %(endDate)s"] - if event_index > 0 and not or_events: - event_where.append("main2.session_id=event_0.session_id") - is_any = sh.isAny_opreator(event.value[0].operator) - if not is_any: - event_where.append( - sh.multi_conditions( - f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s", - event.value[0].value, value_key=e_k1)) - s_op = sh.get_sql_operator(event.value[1].operator) - is_any = sh.isAny_opreator(event.value[1].operator) - if not is_any: - event_where.append( - sh.multi_conditions( - f"main2.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s", - event.value[1].value, value_key=e_k2)) - - e_k += "_custom" - full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} - event_where.append( - sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator.value} %({e_k})s", - event.source, value_key=e_k)) - - elif event_type == schemas.EventType.request_details: - event_from = event_from % f"{events.EventType.REQUEST.table} AS main " - apply = False - for j, f in enumerate(event.filters): - is_any = sh.isAny_opreator(f.operator) - if is_any or len(f.value) == 0: - continue - f.value = helper.values_for_operator(value=f.value, op=f.operator) - op = sh.get_sql_operator(f.operator) - e_k_f = e_k + f"_fetch{j}" - full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} - if f.type == schemas.FetchFilterType._url: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text", - f.value, value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._status_code: - event_where.append( - sh.multi_conditions(f"main.status_code {f.operator.value} %({e_k_f})s::integer", f.value, - value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._method: - event_where.append( - sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._duration: - event_where.append( - sh.multi_conditions(f"main.duration {f.operator.value} %({e_k_f})s::integer", f.value, - value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._request_body: - event_where.append( - sh.multi_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value, - value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._response_body: - event_where.append( - sh.multi_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value, - value_key=e_k_f)) - apply = True - else: - print(f"undefined FETCH filter: {f.type}") - if not apply: - continue - elif event_type == schemas.EventType.graphql: - event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main " - for j, f in enumerate(event.filters): - is_any = sh.isAny_opreator(f.operator) - if is_any or len(f.value) == 0: - continue - f.value = helper.values_for_operator(value=f.value, op=f.operator) - op = sh.get_sql_operator(f.operator) - e_k_f = e_k + f"_graphql{j}" - full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} - if f.type == schemas.GraphqlFilterType._name: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value, - value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._method: - event_where.append( - sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._request_body: - event_where.append( - sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._response_body: - event_where.append( - sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) - else: - print(f"undefined GRAPHQL filter: {f.type}") - else: - continue - if event_index == 0 or or_events: - event_where += ss_constraints - if is_not: - if event_index == 0 or or_events: - events_query_from.append(f"""\ - (SELECT - session_id, - 0 AS timestamp - FROM sessions - WHERE EXISTS(SELECT session_id - FROM {event_from} - WHERE {" AND ".join(event_where)} - AND sessions.session_id=ms.session_id) IS FALSE - AND project_id = %(projectId)s - AND start_ts >= %(startDate)s - AND start_ts <= %(endDate)s - AND duration IS NOT NULL - ) {"" if or_events else (f"AS event_{event_index}" + ("ON(TRUE)" if event_index > 0 else ""))}\ - """) - else: - events_query_from.append(f"""\ - (SELECT - event_0.session_id, - event_{event_index - 1}.timestamp AS timestamp - WHERE EXISTS(SELECT session_id FROM {event_from} WHERE {" AND ".join(event_where)}) IS FALSE - ) AS event_{event_index} {"ON(TRUE)" if event_index > 0 else ""}\ - """) - else: - events_query_from.append(f"""\ - (SELECT main.session_id, {"MIN" if event_index < (valid_events_count - 1) else "MAX"}(main.timestamp) AS timestamp - FROM {event_from} - WHERE {" AND ".join(event_where)} - GROUP BY 1 - ) {"" if or_events else (f"AS event_{event_index} " + ("ON(TRUE)" if event_index > 0 else ""))}\ - """) - event_index += 1 - if event_index > 0: - if or_events: - events_query_part = f"""SELECT - session_id, - MIN(timestamp) AS first_event_ts, - MAX(timestamp) AS last_event_ts - FROM ({events_joiner.join(events_query_from)}) AS u - GROUP BY 1""" - else: - events_query_part = f"""SELECT - event_0.session_id, - MIN(event_0.timestamp) AS first_event_ts, - MAX(event_{event_index - 1}.timestamp) AS last_event_ts - FROM {events_joiner.join(events_query_from)} - GROUP BY 1""" - else: - data.events = [] - # --------------------------------------------------------------------------- - if data.startDate is not None: - extra_constraints.append("s.start_ts >= %(startDate)s") - if data.endDate is not None: - extra_constraints.append("s.start_ts <= %(endDate)s") - # if data.platform is not None: - # if data.platform == schemas.PlatformType.mobile: - # extra_constraints.append(b"s.user_os in ('Android','BlackBerry OS','iOS','Tizen','Windows Phone')") - # elif data.platform == schemas.PlatformType.desktop: - # extra_constraints.append( - # b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") - - if errors_only: - extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" - extra_constraints.append("ser.source = 'js_exception'") - extra_constraints.append("ser.project_id = %(project_id)s") - # if error_status != schemas.ErrorStatus.all: - # extra_constraints.append("ser.status = %(error_status)s") - # full_args["error_status"] = error_status - # if favorite_only: - # extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" - # extra_constraints.append("ufe.user_id = %(userId)s") - - if favorite_only and not errors_only and user_id is not None: - extra_from += """INNER JOIN (SELECT user_id, session_id - FROM public.user_favorite_sessions - WHERE user_id = %(userId)s) AS favorite_sessions - USING (session_id)""" - elif not favorite_only and not errors_only and user_id is not None: - extra_from += """LEFT JOIN (SELECT user_id, session_id - FROM public.user_favorite_sessions - WHERE user_id = %(userId)s) AS favorite_sessions - USING (session_id)""" - extra_join = "" - if issue is not None: - extra_join = """ - INNER JOIN LATERAL(SELECT TRUE FROM events_common.issues INNER JOIN public.issues AS p_issues USING (issue_id) - WHERE issues.session_id=f.session_id - AND p_issues.type=%(issue_type)s - AND p_issues.context_string=%(issue_contextString)s - AND timestamp >= f.first_event_ts - AND timestamp <= f.last_event_ts) AS issues ON(TRUE) - """ - full_args["issue_contextString"] = issue["contextString"] - full_args["issue_type"] = issue["type"] - if extra_event: - extra_join += f"""INNER JOIN {extra_event} AS ev USING(session_id)""" - extra_constraints.append("ev.timestamp>=%(startDate)s") - extra_constraints.append("ev.timestamp<=%(endDate)s") - query_part = f"""\ - FROM {f"({events_query_part}) AS f" if len(events_query_part) > 0 else "public.sessions AS s"} - {extra_join} - {"INNER JOIN public.sessions AS s USING(session_id)" if len(events_query_part) > 0 else ""} - {extra_from} - WHERE - {" AND ".join(extra_constraints)}""" - return full_args, query_part - - -def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): - if project_id is None: - all_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False) - else: - all_projects = [ - projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False, - include_gdpr=False)] - - all_projects = {int(p["projectId"]): p["name"] for p in all_projects} - project_ids = list(all_projects.keys()) - - available_keys = metadata.get_keys_by_projects(project_ids) - for i in available_keys: - available_keys[i]["user_id"] = schemas.FilterType.user_id - available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id - results = {} - for i in project_ids: - if m_key not in available_keys[i].values(): - available_keys.pop(i) - results[i] = {"total": 0, "sessions": [], "missingMetadata": True} - project_ids = list(available_keys.keys()) - if len(project_ids) > 0: - with pg_client.PostgresClient() as cur: - sub_queries = [] - for i in project_ids: - col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)] - sub_queries.append(cur.mogrify( - f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"", - {"id": i, "value": m_value}).decode('UTF-8')) - query = f"""SELECT {", ".join(sub_queries)};""" - cur.execute(query=query) - - rows = cur.fetchone() - - sub_queries = [] - for i in rows.keys(): - results[i] = {"total": rows[i], "sessions": [], "missingMetadata": False, "name": all_projects[int(i)]} - if rows[i] > 0: - col_name = list(available_keys[int(i)].keys())[list(available_keys[int(i)].values()).index(m_key)] - sub_queries.append( - cur.mogrify( - f"""( - SELECT * - FROM ( - SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS} - FROM public.sessions AS s LEFT JOIN (SELECT session_id - FROM public.user_favorite_sessions - WHERE user_favorite_sessions.user_id = %(userId)s - ) AS favorite_sessions USING (session_id) - WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s - ) AS full_sessions - ORDER BY favorite DESC, issue_score DESC - LIMIT 10 - )""", - {"id": i, "value": m_value, "userId": user_id}).decode('UTF-8')) - if len(sub_queries) > 0: - cur.execute("\nUNION\n".join(sub_queries)) - rows = cur.fetchall() - for i in rows: - results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i)) - return results - - -def get_user_sessions(project_id, user_id, start_date, end_date): - with pg_client.PostgresClient() as cur: - constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"] - if start_date is not None: - constraints.append("s.start_ts >= %(startDate)s") - if end_date is not None: - constraints.append("s.start_ts <= %(endDate)s") - - query_part = f"""\ - FROM public.sessions AS s - WHERE {" AND ".join(constraints)}""" - - cur.execute(cur.mogrify(f"""\ - SELECT s.project_id, - s.session_id::text AS session_id, - s.user_uuid, - s.user_id, - s.user_os, - s.user_browser, - s.user_device, - s.user_country, - s.start_ts, - s.duration, - s.events_count, - s.pages_count, - s.errors_count - {query_part} - ORDER BY s.session_id - LIMIT 50;""", { - "projectId": project_id, - "userId": user_id, - "startDate": start_date, - "endDate": end_date - })) - - sessions = cur.fetchall() - return helper.list_to_camel_case(sessions) - - -def get_session_user(project_id, user_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """\ - SELECT - user_id, - count(*) as session_count, - max(start_ts) as last_seen, - min(start_ts) as first_seen - FROM - "public".sessions - WHERE - project_id = %(project_id)s - AND user_id = %(userId)s - AND duration is not null - GROUP BY user_id; - """, - {"project_id": project_id, "userId": user_id} - ) - cur.execute(query=query) - data = cur.fetchone() - return helper.dict_to_camel_case(data) - - -def get_session_ids_by_user_ids(project_id, user_ids): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """\ - SELECT session_id FROM public.sessions - WHERE - project_id = %(project_id)s AND user_id IN %(userId)s;""", - {"project_id": project_id, "userId": tuple(user_ids)} - ) - ids = cur.execute(query=query) - return ids - - -def delete_sessions_by_session_ids(session_ids): - with pg_client.PostgresClient(unlimited_query=True) as cur: - query = cur.mogrify( - """\ - DELETE FROM public.sessions - WHERE - session_id IN %(session_ids)s;""", - {"session_ids": tuple(session_ids)} - ) - cur.execute(query=query) - - return True - - -def delete_sessions_by_user_ids(project_id, user_ids): - with pg_client.PostgresClient(unlimited_query=True) as cur: - query = cur.mogrify( - """\ - DELETE FROM public.sessions - WHERE - project_id = %(project_id)s AND user_id IN %(userId)s;""", - {"project_id": project_id, "userId": tuple(user_ids)} - ) - cur.execute(query=query) - - return True - - -def count_all(): - with pg_client.PostgresClient(unlimited_query=True) as cur: - cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") - row = cur.fetchone() - return row.get("count", 0) if row else 0 - - -def session_exists(project_id, session_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify("""SELECT 1 - FROM public.sessions - WHERE session_id=%(session_id)s - AND project_id=%(project_id)s - LIMIT 1;""", - {"project_id": project_id, "session_id": session_id}) - cur.execute(query) - row = cur.fetchone() - return row is not None diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index f60090ed4..888800681 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -2,11 +2,8 @@ from typing import List, Union import schemas import schemas_ee -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, metrics, sessions_devtool, \ - sessions_notes -from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper, errors_helper -from chalicelib.utils import sql_helper as sh +from chalicelib.core import events, metadata, projects, performance_event, metrics +from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper SESSION_PROJECTION_COLS_CH = """\ s.project_id, @@ -51,94 +48,6 @@ SESSION_PROJECTION_COLS_CH_MAP = """\ """ -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -# This function should not use Clickhouse because it doesn't have `file_key` -def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, include_fav_viewed=False, - group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, - encode(file_key,'hex') AS file_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, - context=context) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], - duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, - session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - def __get_sql_operator(op: schemas.SearchEventOperator): return { schemas.SearchEventOperator._is: "=", diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index d8ae4e1f7..85e308756 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -10,13 +10,15 @@ def add_favorite_session(context: schemas_ee.CurrentContext, project_id, session cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) - VALUES (%(userId)s,%(sessionId)s);""", - {"userId": context.user_id, "sessionId": session_id}) + VALUES (%(userId)s,%(session_id)s) + RETURNING session_id;""", + {"userId": context.user_id, "session_id": session_id}) ) - - sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True, context=context) + row = cur.fetchone() + if row: + sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): @@ -25,12 +27,15 @@ def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, sess cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s - AND session_id = %(sessionId)s;""", - {"userId": context.user_id, "sessionId": session_id}) + AND session_id = %(session_id)s + RETURNING session_id;""", + {"userId": context.user_id, "session_id": session_id}) ) - sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True, context=context) + row = cur.fetchone() + if row: + sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): diff --git a/ee/api/chalicelib/core/sessions_replay.py b/ee/api/chalicelib/core/sessions_replay.py new file mode 100644 index 000000000..319eb13b6 --- /dev/null +++ b/ee/api/chalicelib/core/sessions_replay.py @@ -0,0 +1,192 @@ +import schemas +import schemas_ee +from chalicelib.core import events, metadata, events_ios, \ + sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes +from chalicelib.utils import errors_helper +from chalicelib.utils import pg_client, helper + + +def __group_metadata(session, project_metadata): + meta = {} + for m in project_metadata.keys(): + if project_metadata[m] is not None and session.get(m) is not None: + meta[project_metadata[m]] = session[m] + session.pop(m) + return meta + + +# for backward compatibility +# This function should not use Clickhouse because it doesn't have `file_key` +def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, + include_fav_viewed=False, group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, + encode(file_key,'hex') AS file_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + context=context) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["startTs"], duration=data["duration"]) + + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +# This function should not use Clickhouse because it doesn't have `file_key` +def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + context=context) + + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_events(project_id, session_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + f"""SELECT session_id, platform, start_ts, duration + FROM public.sessions AS s + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + s_data = cur.fetchone() + if s_data is not None: + s_data = helper.dict_to_camel_case(s_data) + data = {} + if s_data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=s_data["startTs"], duration=s_data["duration"]) + + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + return data + else: + return None diff --git a/ee/api/chalicelib/core/tenants.py b/ee/api/chalicelib/core/tenants.py index 30a87bd29..7ea621007 100644 --- a/ee/api/chalicelib/core/tenants.py +++ b/ee/api/chalicelib/core/tenants.py @@ -51,7 +51,7 @@ def get_by_api_key(api_key): WHERE tenants.api_key = %(api_key)s AND tenants.deleted_at ISNULL LIMIT 1;""", - {"api_key": api_key}) + {"api_key": api_key}) cur.execute(query=query) return helper.dict_to_camel_case(cur.fetchone()) @@ -94,7 +94,7 @@ def update(tenant_id, user_id, data: schemas.UpdateTenantSchema): return edit_client(tenant_id=tenant_id, changes=changes) -def tenants_exists(): - with pg_client.PostgresClient() as cur: +def tenants_exists(use_pool=True): + with pg_client.PostgresClient(use_pool=use_pool) as cur: cur.execute(f"SELECT EXISTS(SELECT 1 FROM public.tenants)") return cur.fetchone()["exists"] diff --git a/ee/api/chalicelib/utils/assist_helper.py b/ee/api/chalicelib/utils/assist_helper.py index d182226c0..061b329ef 100644 --- a/ee/api/chalicelib/utils/assist_helper.py +++ b/ee/api/chalicelib/utils/assist_helper.py @@ -37,13 +37,16 @@ def get_full_config(): if __get_secret() is not None: for i in range(len(servers)): url = servers[i].split(",")[0] - servers[i] = {"url": url} if url.lower().startswith("stun") else {"url": url, **credentials} + # servers[i] = {"url": url} if url.lower().startswith("stun") else {"url": url, **credentials} + servers[i] = {"urls": url} if url.lower().startswith("stun") else {"urls": url, **credentials} else: for i in range(len(servers)): s = servers[i].split(",") if len(s) == 3: - servers[i] = {"url": s[0], "username": s[1], "credential": s[2]} + # servers[i] = {"url": s[0], "username": s[1], "credential": s[2]} + servers[i] = {"urls": s[0], "username": s[1], "credential": s[2]} else: - servers[i] = {"url": s[0]} + # servers[i] = {"url": s[0]} + servers[i] = {"urls": s[0]} return servers diff --git a/ee/api/chalicelib/utils/ch_client.py b/ee/api/chalicelib/utils/ch_client.py index 576bbc590..ef1839189 100644 --- a/ee/api/chalicelib/utils/ch_client.py +++ b/ee/api/chalicelib/utils/ch_client.py @@ -20,8 +20,9 @@ class ClickHouseClient: def __init__(self): self.__client = clickhouse_driver.Client(host=config("ch_host"), - database=config("ch_database",default="default", cast=str), - password=config("ch_password",default="", cast=str), + database=config("ch_database", default="default"), + user=config("ch_user", default="default"), + password=config("ch_password", default=""), port=config("ch_port", cast=int), settings=settings) \ if self.__client is None else self.__client diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index acc91e7b7..c47a80ee8 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -35,6 +35,7 @@ rm -rf ./chalicelib/core/log_tool_stackdriver.py rm -rf ./chalicelib/core/log_tool_sumologic.py rm -rf ./chalicelib/core/metadata.py rm -rf ./chalicelib/core/mobile.py +rm -rf ./chalicelib/core/sessions.py rm -rf ./chalicelib/core/sessions_assignments.py #exp rm -rf ./chalicelib/core/sessions_metas.py rm -rf ./chalicelib/core/sessions_mobs.py @@ -78,9 +79,12 @@ rm -rf ./Dockerfile_bundle rm -rf ./entrypoint.bundle.sh rm -rf ./chalicelib/core/heatmaps.py rm -rf ./schemas.py +rm -rf ./routers/subs/health.py rm -rf ./routers/subs/v1_api.py #exp rm -rf ./chalicelib/core/custom_metrics.py rm -rf ./chalicelib/core/performance_event.py rm -rf ./chalicelib/core/saved_search.py rm -rf ./app_alerts.py rm -rf ./build_alerts.sh +rm -rf ./run-dev.sh +rm -rf ./run-alerts-dev.sh diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh index ebd646a7d..e63d4e2af 100755 --- a/ee/api/entrypoint.sh +++ b/ee/api/entrypoint.sh @@ -2,4 +2,4 @@ sh env_vars.sh source /tmp/.env.override -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers +uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --proxy-headers diff --git a/ee/api/entrypoint_alerts.sh b/ee/api/entrypoint_alerts.sh index acf8b390a..410015142 100755 --- a/ee/api/entrypoint_alerts.sh +++ b/ee/api/entrypoint_alerts.sh @@ -2,4 +2,4 @@ export ASSIST_KEY=ignore sh env_vars.sh source /tmp/.env.override -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload +uvicorn app:app --host 0.0.0.0 --port 8888 diff --git a/ee/api/env.default b/ee/api/env.default index cdbc3d256..df353d071 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -70,4 +70,7 @@ SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 -ASSIST_JWT_SECRET= \ No newline at end of file +ASSIST_JWT_SECRET= +REDIS_STRING=redis://redis-master.db.svc.cluster.local:6379 +KAFKA_SERVERS=kafka.db.svc.cluster.local:9092 +KAFKA_USE_SSL=false \ No newline at end of file diff --git a/ee/api/requirements-alerts.txt b/ee/api/requirements-alerts.txt index 250882623..6b6901ca5 100644 --- a/ee/api/requirements-alerts.txt +++ b/ee/api/requirements-alerts.txt @@ -8,7 +8,7 @@ jira==3.4.1 -fastapi==0.92.0 +fastapi==0.94.1 uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index c8b76e700..b5da59c4b 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -8,7 +8,7 @@ jira==3.4.1 -fastapi==0.92.0 +fastapi==0.95.0 uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 @@ -17,3 +17,6 @@ apscheduler==3.10.0 clickhouse-driver==0.2.5 python3-saml==1.15.0 python-multipart==0.0.5 + +redis==4.5.1 +#confluent-kafka==2.0.2 \ No newline at end of file diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 8c8aa55b6..209fdbd6d 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -7,7 +7,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas import schemas_ee from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \ - errors_favorite, sessions_notes, click_maps + errors_favorite, sessions_notes, click_maps, sessions_replay from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -59,7 +59,8 @@ async def edit_account(data: schemas_ee.EditUserSchema = Body(...), @app.post('/integrations/slack', tags=['integrations']) @app.put('/integrations/slack', tags=['integrations']) -async def add_slack_client(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)): +async def add_slack_integration(data: schemas.AddCollaborationSchema, + context: schemas.CurrentContext = Depends(OR_context)): n = Slack.add(tenant_id=context.tenant_id, data=data) if n is None: return { @@ -155,13 +156,15 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): stack_integrations=True, user_id=context.user_id)} -@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) +# for backward compatibility +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - include_fav_viewed=True, group_metadata=True, context=context) + data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -172,6 +175,39 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas } +@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) +async def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id, + session_id=sessionId) + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) +async def get_session_events(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) + if data is None: + return {"errors": ["session not found"]} + + return { + 'data': data + } + + @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], dependencies=[OR_scope(Permissions.dev_tools)]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, @@ -250,8 +286,8 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac context: schemas_ee.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, - full_data=True, include_fav_viewed=True, group_metadata=True, live=False) + data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, + full_data=True, include_fav_viewed=True, group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): diff --git a/ee/api/run-dev.sh b/ee/api/run-dev.sh deleted file mode 100755 index 76682286d..000000000 --- a/ee/api/run-dev.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/zsh - -uvicorn app:app --reload \ No newline at end of file diff --git a/ee/utilities/.gitignore b/ee/assist/.gitignore similarity index 100% rename from ee/utilities/.gitignore rename to ee/assist/.gitignore diff --git a/ee/utilities/Dockerfile b/ee/assist/Dockerfile similarity index 100% rename from ee/utilities/Dockerfile rename to ee/assist/Dockerfile diff --git a/ee/utilities/clean-dev.sh b/ee/assist/clean-dev.sh similarity index 100% rename from ee/utilities/clean-dev.sh rename to ee/assist/clean-dev.sh diff --git a/ee/utilities/package-lock.json b/ee/assist/package-lock.json similarity index 97% rename from ee/utilities/package-lock.json rename to ee/assist/package-lock.json index 1d74677cf..a94f1d5bb 100644 --- a/ee/utilities/package-lock.json +++ b/ee/assist/package-lock.json @@ -1,12 +1,12 @@ { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0-ee", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0-ee", "license": "Elastic License 2.0 (ELv2)", "dependencies": { "@maxmind/geoip2-node": "^3.5.0", @@ -38,9 +38,9 @@ } }, "node_modules/@redis/client": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.5.tgz", - "integrity": "sha512-fuMnpDYSjT5JXR9rrCW1YWA4L8N/9/uS4ImT3ZEC/hcaQRI1D/9FvwjriRj1UvepIgzZXthFVKMNRzP/LNL7BQ==", + "version": "1.5.6", + "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.6.tgz", + "integrity": "sha512-dFD1S6je+A47Lj22jN/upVU2fj4huR7S9APd7/ziUXsIXDL+11GPYti4Suv5y8FuXaN+0ZG4JF+y1houEJ7ToA==", "dependencies": { "cluster-key-slot": "1.1.2", "generic-pool": "3.9.0", @@ -67,9 +67,9 @@ } }, "node_modules/@redis/search": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.1.1.tgz", - "integrity": "sha512-pqCXTc5e7wJJgUuJiC3hBgfoFRoPxYzwn0BEfKgejTM7M/9zP3IpUcqcjgfp8hF+LoV8rHZzcNTz7V+pEIY7LQ==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.1.2.tgz", + "integrity": "sha512-/cMfstG/fOh/SsE+4/BQGeuH/JJloeWuH+qJzM8dbxuWvdWibWAOAHHCZTMPhV3xIlH4/cUEIA8OV5QnYpaVoA==", "peerDependencies": { "@redis/client": "^1.0.0" } @@ -117,9 +117,9 @@ } }, "node_modules/@types/node": { - "version": "18.14.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.1.tgz", - "integrity": "sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==" + "version": "18.15.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.1.tgz", + "integrity": "sha512-U2TWca8AeHSmbpi314QBESRk7oPjSZjDsR+c+H4ECC1l+kFgpZf8Ydhv3SJpPy51VyZHHqxlb6mTTqYNNRVAIw==" }, "node_modules/accepts": { "version": "1.3.8", @@ -878,15 +878,15 @@ } }, "node_modules/redis": { - "version": "4.6.4", - "resolved": "https://registry.npmjs.org/redis/-/redis-4.6.4.tgz", - "integrity": "sha512-wi2tgDdQ+Q8q+PR5FLRx4QvDiWaA+PoJbrzsyFqlClN5R4LplHqN3scs/aGjE//mbz++W19SgxiEnQ27jnCRaA==", + "version": "4.6.5", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.6.5.tgz", + "integrity": "sha512-O0OWA36gDQbswOdUuAhRL6mTZpHFN525HlgZgDaVNgCJIAZR3ya06NTESb0R+TUZ+BFaDpz6NnnVvoMx9meUFg==", "dependencies": { "@redis/bloom": "1.2.0", - "@redis/client": "1.5.5", + "@redis/client": "1.5.6", "@redis/graph": "1.1.0", "@redis/json": "1.0.4", - "@redis/search": "1.1.1", + "@redis/search": "1.1.2", "@redis/time-series": "1.0.4" } }, @@ -1085,9 +1085,9 @@ } }, "node_modules/ua-parser-js": { - "version": "1.0.33", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.33.tgz", - "integrity": "sha512-RqshF7TPTE0XLYAqmjlu5cLLuGdKrNu9O1KLA/qp39QtbZwuzwv1dT46DZSopoUMsYgXpB3Cv8a03FI8b74oFQ==", + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.34.tgz", + "integrity": "sha512-K9mwJm/DaB6mRLZfw6q8IMXipcrmuT6yfhYmwhAkuh+81sChuYstYA+znlgaflUPaYUa3odxKPKGw6Vw/lANew==", "funding": [ { "type": "opencollective", diff --git a/ee/utilities/package.json b/ee/assist/package.json similarity index 97% rename from ee/utilities/package.json rename to ee/assist/package.json index 3fcedf03b..4ef88774a 100644 --- a/ee/utilities/package.json +++ b/ee/assist/package.json @@ -1,6 +1,6 @@ { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0-ee", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", "scripts": { diff --git a/ee/utilities/prepare-dev.sh b/ee/assist/prepare-dev.sh similarity index 75% rename from ee/utilities/prepare-dev.sh rename to ee/assist/prepare-dev.sh index 2daecbfc1..8da98eac3 100755 --- a/ee/utilities/prepare-dev.sh +++ b/ee/assist/prepare-dev.sh @@ -1,2 +1,2 @@ #!/bin/bash -rsync -avr --exclude=".*" --exclude="node_modules" --ignore-existing ../../utilities/* ./ \ No newline at end of file +rsync -avr --exclude=".*" --exclude="node_modules" --ignore-existing ../../assist/* ./ \ No newline at end of file diff --git a/utilities/run-dev.sh b/ee/assist/run-dev.sh similarity index 100% rename from utilities/run-dev.sh rename to ee/assist/run-dev.sh diff --git a/ee/utilities/server.js b/ee/assist/server.js similarity index 89% rename from ee/utilities/server.js rename to ee/assist/server.js index 482ddcd17..414d7ac2c 100644 --- a/ee/utilities/server.js +++ b/ee/assist/server.js @@ -1,6 +1,7 @@ const dumps = require('./utils/HeapSnapshot'); const {request_logger} = require('./utils/helper'); const express = require('express'); +const health = require("./utils/health"); const assert = require('assert').strict; let socket; @@ -14,7 +15,7 @@ const HOST = process.env.LISTEN_HOST || '0.0.0.0'; const PORT = process.env.LISTEN_PORT || 9001; assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required'); const P_KEY = process.env.ASSIST_KEY; -const PREFIX = process.env.PREFIX || process.env.prefix || `/assist` +const PREFIX = process.env.PREFIX || process.env.prefix || `/assist`; let debug = process.env.debug === "1"; const heapdump = process.env.heapdump === "1"; @@ -31,18 +32,11 @@ if (process.env.uws !== "true") { ); heapdump && wsapp.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router); wsapp.use(`${PREFIX}/${P_KEY}`, socket.wsRouter); - wsapp.get('/private/shutdown', (req, res) => { - console.log("Requested shutdown"); - res.statusCode = 200; - res.end("ok!"); - process.kill(1, "SIGTERM"); - } - ); wsapp.enable('trust proxy'); const wsserver = wsapp.listen(PORT, HOST, () => { console.log(`WS App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); socket.start(wsserver); @@ -102,13 +96,6 @@ if (process.env.uws !== "true") { uapp.post(`${PREFIX}/${P_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject)); - uapp.get('/private/shutdown', (res, req) => { - console.log("Requested shutdown"); - res.writeStatus('200 OK').end("ok!"); - process.kill(1, "SIGTERM"); - } - ); - socket.start(uapp); uapp.listen(HOST, PORT, (token) => { @@ -116,7 +103,7 @@ if (process.env.uws !== "true") { console.warn("port already in use"); } console.log(`WS App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/assist/servers/websocket-cluster.js similarity index 96% rename from ee/utilities/servers/websocket-cluster.js rename to ee/assist/servers/websocket-cluster.js index e129bfcb6..4618a6184 100644 --- a/ee/utilities/servers/websocket-cluster.js +++ b/ee/assist/servers/websocket-cluster.js @@ -34,7 +34,7 @@ const debug = process.env.debug === "1"; const createSocketIOServer = function (server, prefix) { if (process.env.uws !== "true") { io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] @@ -43,7 +43,7 @@ const createSocketIOServer = function (server, prefix) { }); } else { io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] @@ -83,6 +83,22 @@ const respond = function (res, data) { } } +const countSessions = async function () { + let count = 0; + try { + let rooms = await io.of('/').adapter.allRooms(); + for (let i of rooms) { + let {projectKey, sessionId} = extractPeerId(i); + if (projectKey !== undefined && sessionId !== undefined) { + count++; + } + } + } catch (e) { + console.error(e); + } + return count; +} + const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); let filters = await extractPayloadFromRequest(req, res); @@ -417,6 +433,7 @@ module.exports = { process.exit(2); }); }, + countSessions, handlers: { socketsList, socketsListByProject, diff --git a/ee/utilities/servers/websocket.js b/ee/assist/servers/websocket.js similarity index 96% rename from ee/utilities/servers/websocket.js rename to ee/assist/servers/websocket.js index c906b5987..7fb1c9684 100644 --- a/ee/utilities/servers/websocket.js +++ b/ee/assist/servers/websocket.js @@ -29,7 +29,7 @@ const debug = process.env.debug === "1"; const createSocketIOServer = function (server, prefix) { if (process.env.uws !== "true") { io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] @@ -38,7 +38,7 @@ const createSocketIOServer = function (server, prefix) { }); } else { io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] @@ -66,6 +66,23 @@ const respond = function (res, data) { } } +const countSessions = async function () { + let count = 0; + try { + const arr = Array.from(io.sockets.adapter.rooms); + const filtered = arr.filter(room => !room[1].has(room[0])); + for (let i of filtered) { + let {projectKey, sessionId} = extractPeerId(i[0]); + if (projectKey !== null && sessionId !== null) { + count++; + } + } + } catch (e) { + console.error(e); + } + return count; +} + const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); let filters = await extractPayloadFromRequest(req, res); @@ -379,6 +396,7 @@ module.exports = { socketConnexionTimeout(io); }, + countSessions, handlers: { socketsList, socketsListByProject, diff --git a/ee/assist/utils/health.js b/ee/assist/utils/health.js new file mode 100644 index 000000000..bcb64f61c --- /dev/null +++ b/ee/assist/utils/health.js @@ -0,0 +1,61 @@ +const express = require('express'); +let socket; +if (process.env.redis === "true") { + socket = require("../servers/websocket-cluster"); +} else { + socket = require("../servers/websocket"); +} +const HOST = process.env.LISTEN_HOST || '0.0.0.0'; +const PORT = process.env.HEALTH_PORT || 8888; + + +const {request_logger} = require("./helper"); +const debug = process.env.debug === "1"; +const respond = function (res, data) { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({"data": data})); +} + +const check_health = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + respond(res, { + "health": true, + "details": { + "version": process.env.npm_package_version, + "connectedSessions": await socket.countSessions(), + "uWebSocket": process.env.uws === "true", + "redis": process.env.redis === "true" + } + }); +} + + +const healthApp = express(); +healthApp.use(express.json()); +healthApp.use(express.urlencoded({extended: true})); +healthApp.use(request_logger("[healthApp]")); +healthApp.get(['/'], (req, res) => { + res.statusCode = 200; + res.end("healthApp ok!"); + } +); +healthApp.get('/health', check_health); +healthApp.get('/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } +); + +const listen_cb = async function () { + console.log(`Health App listening on http://${HOST}:${PORT}`); + console.log('Press Ctrl+C to quit.'); +} + +module.exports = { + healthApp, + PORT, + listen_cb +}; diff --git a/ee/utilities/utils/helper-ee.js b/ee/assist/utils/helper-ee.js similarity index 100% rename from ee/utilities/utils/helper-ee.js rename to ee/assist/utils/helper-ee.js diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql b/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql new file mode 100644 index 000000000..1962fde10 --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql @@ -0,0 +1,8 @@ +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.11.0-ee'; + +ALTER TABLE experimental.events + MODIFY COLUMN issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20)); + +ALTER TABLE experimental.issues + MODIFY COLUMN type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20); + diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index 9b2cfbbd1..9536307d8 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -1,3 +1,4 @@ +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.11.0-ee'; CREATE DATABASE IF NOT EXISTS experimental; CREATE TABLE IF NOT EXISTS experimental.autocomplete @@ -78,7 +79,7 @@ CREATE TABLE IF NOT EXISTS experimental.events success Nullable(UInt8), request_body Nullable(String), response_body Nullable(String), - issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19)), + issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20)), issue_id Nullable(String), error_tags_keys Array(String), error_tags_values Array(Nullable(String)), @@ -200,7 +201,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues ( project_id UInt16, issue_id String, - type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19), + type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20), context_string String, context_keys Array(String), context_values Array(Nullable(String)), diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql new file mode 100644 index 000000000..21544f62c --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql @@ -0,0 +1,42 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.10.0-ee'; + next_version CONSTANT text := 'v1.11.0-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.11.0-ee' +$$ LANGUAGE sql IMMUTABLE; + +ALTER TABLE events.inputs + ADD COLUMN duration integer NULL, + ADD COLUMN hesitation integer NULL; + +ALTER TABLE public.projects + ALTER COLUMN gdpr SET DEFAULT '{ + "maskEmails": true, + "sampleRate": 33, + "maskNumbers": false, + "defaultInputMode": "obscured" + }'::jsonb; + +ALTER TYPE issue_type ADD VALUE IF NOT EXISTS 'mouse_thrashing'; + +ALTER TABLE events.clicks + ADD COLUMN hesitation integer NULL; + +COMMIT; \ No newline at end of file diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 0b2945b39..0418c5d7d 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -253,7 +253,7 @@ $$ "maskEmails": true, "sampleRate": 33, "maskNumbers": false, - "defaultInputMode": "plain" + "defaultInputMode": "obscured" }'::jsonb, first_recorded_session_at timestamp without time zone NULL DEFAULT NULL, sessions_last_check_at timestamp without time zone NULL DEFAULT NULL, @@ -947,13 +947,14 @@ $$ CREATE TABLE IF NOT EXISTS events.clicks ( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - url text DEFAULT '' NOT NULL, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + url text DEFAULT '' NOT NULL, path text, - selector text DEFAULT '' NOT NULL, + selector text DEFAULT '' NOT NULL, + hesitation integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX IF NOT EXISTS clicks_session_id_idx ON events.clicks (session_id); @@ -974,8 +975,10 @@ $$ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, message_id bigint NOT NULL, timestamp bigint NOT NULL, - label text DEFAULT NULL, - value text DEFAULT NULL, + label text DEFAULT NULL, + value text DEFAULT NULL, + duration integer DEFAULT NULL, + hesitation integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX IF NOT EXISTS inputs_session_id_idx ON events.inputs (session_id); diff --git a/frontend/app/components/Header/Header.js b/frontend/app/components/Header/Header.js index 7ef0028c9..021f96df3 100644 --- a/frontend/app/components/Header/Header.js +++ b/frontend/app/components/Header/Header.js @@ -19,6 +19,7 @@ import UserMenu from './UserMenu'; import SettingsMenu from './SettingsMenu'; import DefaultMenuView from './DefaultMenuView'; import PreferencesView from './PreferencesView'; +import HealthStatus from './HealthStatus' const CLIENT_PATH = client(CLIENT_DEFAULT_TAB); @@ -78,6 +79,8 @@ const Header = (props) => { + +
diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx new file mode 100644 index 000000000..0daf5cf56 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx @@ -0,0 +1,43 @@ +import React from 'react'; +import { Icon } from 'UI'; +import cn from 'classnames' + +function Footer({ isSetup }: { isSetup?: boolean }) { + return ( + + ); +} + +export default Footer; diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx new file mode 100644 index 000000000..b0a6fb9fa --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx @@ -0,0 +1,159 @@ +import React from 'react'; +// @ts-ignore +import slide from 'App/svg/cheers.svg'; +import { Button } from 'UI'; +import Footer from './Footer'; +import { getHighest } from 'App/constants/zindex'; +import Category from 'Components/Header/HealthStatus/ServiceCategory'; +import SubserviceHealth from 'Components/Header/HealthStatus/SubserviceHealth/SubserviceHealth'; +import { IServiceStats } from '../HealthStatus'; +import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; + +function HealthModal({ + getHealth, + isLoading, + healthResponse, + setShowModal, + setPassed, +}: { + getHealth: () => void; + isLoading: boolean; + healthResponse: { overallHealth: boolean; healthMap: Record }; + setShowModal: (isOpen: boolean) => void; + setPassed?: () => void; +}) { + const [selectedService, setSelectedService] = React.useState(''); + + React.useEffect(() => { + if (!healthResponse?.overallHealth) { + if (healthResponse?.healthMap) { + setSelectedService( + Object.keys(healthResponse.healthMap).filter( + (s) => !healthResponse.healthMap[s].healthOk + )[0] + ); + } + } + }, [healthResponse]); + + const handleClose = () => { + setShowModal(false); + }; + + const isSetup = document.location.pathname.includes('/signup') + + return ( +
+
e.stopPropagation()} + className={'flex flex-col bg-white rounded border border-figmaColors-divider'} + > +
+
Installation Status
+ +
+ +
+
+ {isLoading ? ( + null} name={"Loading health status"} isLoading /> + ) + : Object.keys(healthResponse.healthMap).map((service) => ( + + setSelectedService(service)} + healthOk={healthResponse.healthMap[service].healthOk} + name={healthResponse.healthMap[service].name} + isSelectable + isSelected={selectedService === service} + /> + + ))} +
+
+ {isLoading ? ( +
+ +
+ ) : selectedService ? ( + + ) : + } +
+
+ {isSetup ? ( +
+ +
+ ) : null} +
+
+
+ ); +} + +function ServiceStatus({ service }: { service: Record }) { + const { subservices } = service; + return ( +
+
+ {Object.keys(subservices).map((subservice: string) => ( + + + + ))} +
+
+ ); +} + +export default HealthModal; diff --git a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx new file mode 100644 index 000000000..283ba6f21 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx @@ -0,0 +1,92 @@ +import React from 'react'; +import { Icon } from 'UI'; +import HealthModal from 'Components/Header/HealthStatus/HealthModal/HealthModal'; +import { lastAskedKey, healthResponseKey } from './const'; +import HealthWidget from "Components/Header/HealthStatus/HealthWidget"; +import { getHealthRequest } from './getHealth' + +export interface IServiceStats { + name: 'backendServices' | 'databases' | 'ingestionPipeline' | 'ssl'; + serviceName: string; + healthOk: boolean; + subservices: { + health: boolean; + details?: { + errors?: string[]; + version?: string; + } + }[] +} + + +function HealthStatus() { + const healthResponseSaved = localStorage.getItem(healthResponseKey) || '{}'; + const [healthResponse, setHealthResponse] = React.useState(JSON.parse(healthResponseSaved)); + const [isError, setIsError] = React.useState(false); + const [isLoading, setIsLoading] = React.useState(false); + const lastAskedSaved = localStorage.getItem(lastAskedKey); + const [lastAsked, setLastAsked] = React.useState(lastAskedSaved); + const [showModal, setShowModal] = React.useState(false); + + const getHealth = async () => { + if (isLoading) return; + try { + setIsLoading(true); + const { healthMap, asked } = await getHealthRequest(); + setHealthResponse(healthMap); + setLastAsked(asked.toString()); + } catch (e) { + console.error(e); + setIsError(true); + } finally { + setIsLoading(false); + } + }; + + React.useEffect(() => { + const now = new Date(); + const lastAskedDate = lastAsked ? new Date(parseInt(lastAsked, 10)) : null; + const diff = lastAskedDate ? now.getTime() - lastAskedDate.getTime() : 0; + const diffInMinutes = Math.round(diff / 1000 / 60); + if (Object.keys(healthResponse).length === 0 || !lastAskedDate || diffInMinutes > 10) { + void getHealth(); + } + }, []); + + const icon = !isError && healthResponse?.overallHealth ? 'pulse' : ('exclamation-circle-fill' as const); + return ( + <> +
+
+
+ +
+
+ + +
+ {showModal ? ( + + ) : null} + + ); +} + + +export default HealthStatus; diff --git a/frontend/app/components/Header/HealthStatus/HealthWidget.tsx b/frontend/app/components/Header/HealthStatus/HealthWidget.tsx new file mode 100644 index 000000000..6a184f854 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthWidget.tsx @@ -0,0 +1,98 @@ +import React from 'react' +import { Icon } from "UI"; +import ServiceCategory from "Components/Header/HealthStatus/ServiceCategory"; +import cn from 'classnames' +import { IServiceStats } from './HealthStatus' + +function HealthWidget({ + healthResponse, + getHealth, + isLoading, + lastAsked, + setShowModal, + isError, +}: { + healthResponse: { overallHealth: boolean; healthMap: Record }; + getHealth: Function; + isLoading: boolean; + lastAsked: string | null; + setShowModal: (visible: boolean) => void; + isError?: boolean; +}) { + const [lastAskedDiff, setLastAskedDiff] = React.useState(0); + const healthOk = healthResponse?.overallHealth; + + React.useEffect(() => { + const now = new Date(); + const lastAskedDate = lastAsked ? new Date(parseInt(lastAsked, 10)) : null; + const diff = lastAskedDate ? now.getTime() - lastAskedDate.getTime() : 0; + const diffInMinutes = Math.round(diff / 1000 / 60); + setLastAskedDiff(diffInMinutes); + }, [lastAsked]); + + const title = !isError && healthOk ? 'All Systems Operational' : 'Service disruption'; + const icon = !isError && healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); + + const problematicServices = Object.values(healthResponse?.healthMap || {}).filter( + (service: Record) => !service.healthOk + ) + + return ( +
+
+
+ + {title} +
+
+ Last checked {lastAskedDiff} mins. ago +
getHealth()} + > + +
+
+ {isError &&
Error getting service health status
} + +
+ +
+ {!isError && !healthOk ? ( + <> +
+ Observed installation Issue with the following +
+ {problematicServices.map((service) => ( + + setShowModal(true)} + healthOk={false} + name={service.name} + isSelectable + /> + + ))} + + ) : null} +
+
+
+ ); +} + +export default HealthWidget \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx b/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx new file mode 100644 index 000000000..be5edec1f --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx @@ -0,0 +1,49 @@ +import { Icon } from 'UI'; +import React from 'react'; +import cn from 'classnames'; +import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; + +function Category({ + name, + healthOk, + onClick, + isSelectable, + isExpandable, + isExpanded, + isSelected, + isLoading, +}: { + name: string; + healthOk?: boolean; + isLoading?: boolean; + onClick: (args: any) => void; + isSelectable?: boolean; + isExpandable?: boolean; + isExpanded?: boolean; + isSelected?: boolean; +}) { + + const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); + return ( +
+ {isLoading ? ( + + ) : } + {name} + + {isSelectable ? : null} + {isExpandable ? ( + + ) : null} +
+ ); +} + +export default Category \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx b/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx new file mode 100644 index 000000000..6fd91031b --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx @@ -0,0 +1,48 @@ +import React from 'react'; +import Category from 'Components/Header/HealthStatus/ServiceCategory'; +import cn from 'classnames'; + +function SubserviceHealth({ + subservice, + name, +}: { + name: string; + subservice: { health: boolean; details: { errors?: string[]; version?: string } }; +}) { + const [isExpanded, setIsExpanded] = React.useState(!subservice?.health); + + const isExpandable = subservice?.details && Object.keys(subservice?.details).length > 0; + return ( +
+ (isExpandable ? setIsExpanded(!isExpanded) : null)} + name={name} + healthOk={subservice?.health} + isExpandable={isExpandable} + isExpanded={isExpanded} + /> + {isExpanded ? ( +
+ {subservice?.details?.version ? ( +
+
Version
+
+ {subservice?.details?.version} +
+
+ ) : null} + {subservice?.details?.errors?.length ? ( +
+
Error log:
+ {subservice.details.errors.toString()} +
+ ) : subservice?.health ? null : ( + 'Service not responding' + )} +
+ ) : null} +
+ ); +} + +export default SubserviceHealth; diff --git a/frontend/app/components/Header/HealthStatus/const.ts b/frontend/app/components/Header/HealthStatus/const.ts new file mode 100644 index 000000000..69b5b1c5e --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/const.ts @@ -0,0 +1,9 @@ +export const categoryKeyNames = { + backendServices: 'Backend Services', + databases: 'Databases', + ingestionPipeline: 'Ingestion Pipeline', + ssl: 'SSL', +} as const + +export const lastAskedKey = '__openreplay_health_status'; +export const healthResponseKey = '__openreplay_health_response'; \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/getHealth.ts b/frontend/app/components/Header/HealthStatus/getHealth.ts new file mode 100644 index 000000000..70bd8914c --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/getHealth.ts @@ -0,0 +1,36 @@ +import { healthService } from 'App/services'; +import { categoryKeyNames, lastAskedKey, healthResponseKey } from "Components/Header/HealthStatus/const"; +import { IServiceStats } from "Components/Header/HealthStatus/HealthStatus"; + + +function mapResponse(resp: Record) { + const services = Object.keys(resp); + const healthMap: Record = {}; + services.forEach((service) => { + healthMap[service] = { + // @ts-ignore + name: categoryKeyNames[service], + healthOk: true, + subservices: resp[service], + serviceName: service, + }; + Object.values(healthMap[service].subservices).forEach((subservice: Record) => { + if (!subservice?.health) healthMap[service].healthOk = false; + }); + }); + + const overallHealth = Object.values(healthMap).every( + (service: Record) => service.healthOk + ); + + return { overallHealth, healthMap }; +} + +export async function getHealthRequest() { + const r = await healthService.fetchStatus(); + const healthMap = mapResponse(r); + const asked = new Date().getTime(); + localStorage.setItem(healthResponseKey, JSON.stringify(healthMap)); + localStorage.setItem(lastAskedKey, asked.toString()); + return { healthMap, asked } +} \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/index.ts b/frontend/app/components/Header/HealthStatus/index.ts new file mode 100644 index 000000000..1f4ce8576 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/index.ts @@ -0,0 +1 @@ +export { default } from './HealthStatus' \ No newline at end of file diff --git a/frontend/app/components/Overview/Overview.tsx b/frontend/app/components/Overview/Overview.tsx index 78b4bfe2b..9d71b5702 100644 --- a/frontend/app/components/Overview/Overview.tsx +++ b/frontend/app/components/Overview/Overview.tsx @@ -4,25 +4,30 @@ import NoSessionsMessage from 'Shared/NoSessionsMessage'; import MainSearchBar from 'Shared/MainSearchBar'; import SessionSearch from 'Shared/SessionSearch'; import SessionListContainer from 'Shared/SessionListContainer/SessionListContainer'; +import cn from 'classnames'; +import OverviewMenu from 'Shared/OverviewMenu'; function Overview() { - return ( -
-
-
- + return ( +
+
+ +
+
+ -
- - +
+ + -
- -
-
-
+
+
- ); +
+
+ ); } export default withPageTitle('Sessions - OpenReplay')(Overview); diff --git a/frontend/app/components/Session_/BugReport/components/MetaInfo.tsx b/frontend/app/components/Session_/BugReport/components/MetaInfo.tsx index 09746dcfb..2eecf4ea7 100644 --- a/frontend/app/components/Session_/BugReport/components/MetaInfo.tsx +++ b/frontend/app/components/Session_/BugReport/components/MetaInfo.tsx @@ -24,11 +24,9 @@ export default function MetaInfo({ {Object.keys(envObject).map((envTag) => (
{envTag}
-
- +
{/* @ts-ignore */} {envObject[envTag]} -
))} diff --git a/frontend/app/components/Signup/Signup.js b/frontend/app/components/Signup/Signup.js index 6ca511f32..55e924d27 100644 --- a/frontend/app/components/Signup/Signup.js +++ b/frontend/app/components/Signup/Signup.js @@ -6,6 +6,8 @@ import stl from './signup.module.css'; import cn from 'classnames'; import SignupForm from './SignupForm'; import RegisterBg from '../../svg/register.svg'; +import HealthModal from 'Components/Header/HealthStatus/HealthModal/HealthModal'; +import { getHealthRequest } from 'Components/Header/HealthStatus/getHealth'; const BulletItem = ({ text }) => (
@@ -15,9 +17,45 @@ const BulletItem = ({ text }) => (
{text}
); + +const healthStatusCheck_key = '__or__healthStatusCheck_key' + @withPageTitle('Signup - OpenReplay') export default class Signup extends React.Component { + state = { + healthModalPassed: localStorage.getItem(healthStatusCheck_key === 'true'), + healthStatusLoading: true, + healthStatus: null, + } + + getHealth = async () => { + this.setState({ healthStatusLoading: true }); + const { healthMap } = await getHealthRequest(); + this.setState({ healthStatus: healthMap, healthStatusLoading: false }); + } + + componentDidMount() { + if (!this.state.healthModalPassed) void this.getHealth(); + } + + setHealthModalPassed = () => { + localStorage.setItem(healthStatusCheck_key, 'true'); + this.setState({ healthModalPassed: true }); + } + render() { + if (!this.state.healthModalPassed) { + return ( + null} + healthResponse={this.state.healthStatus} + getHealth={this.getHealth} + isLoading={this.state.healthStatusLoading} + setPassed={this.setHealthModalPassed} + /> + ) + } + return (
diff --git a/frontend/app/components/shared/OverviewMenu/OverviewMenu.tsx b/frontend/app/components/shared/OverviewMenu/OverviewMenu.tsx new file mode 100644 index 000000000..9736c353f --- /dev/null +++ b/frontend/app/components/shared/OverviewMenu/OverviewMenu.tsx @@ -0,0 +1,52 @@ +import React from 'react'; +import { SideMenuitem } from 'UI'; +import { connect } from 'react-redux'; +import { setActiveTab } from 'Duck/search'; + +interface Props { + setActiveTab: (tab: any) => void; + activeTab: string; + isEnterprise: boolean; +} +function OverviewMenu(props: Props) { + const { activeTab, isEnterprise } = props; + + return ( +
+
+ props.setActiveTab({ type: 'all' })} + /> +
+
+
+ props.setActiveTab({ type: 'bookmark' })} + /> +
+
+
+ props.setActiveTab({ type: 'notes' })} + /> +
+
+ ); +} + +export default connect((state: any) => ({ + activeTab: state.getIn(['search', 'activeTab', 'type']), + isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee', +}), { setActiveTab })(OverviewMenu); diff --git a/frontend/app/components/shared/OverviewMenu/index.ts b/frontend/app/components/shared/OverviewMenu/index.ts new file mode 100644 index 000000000..91599b4c8 --- /dev/null +++ b/frontend/app/components/shared/OverviewMenu/index.ts @@ -0,0 +1 @@ +export { default } from './OverviewMenu'; \ No newline at end of file diff --git a/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx b/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx index ebf0ad48b..9222b8183 100644 --- a/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx +++ b/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx @@ -47,40 +47,24 @@ function SessionHeader(props: Props) { }; return ( -
-
-
- props.setActiveTab({ type: 'all' })} addBorder={activeTab === 'all'}> - SESSIONS - - props.setActiveTab({ type: 'bookmark' })} - addBorder={activeTab === 'bookmark'} - > - {`${isEnterprise ? 'VAULT' : 'BOOKMARKS'}`} - - props.setActiveTab({ type: 'notes' })} - > - NOTES - -
-
- - {activeTab !== 'notes' && activeTab !== 'bookmark' ? ( -
- -
- -
+
+ {activeTab !== 'notes' ? ( +
+ {activeTab !== 'bookmark' && ( + <> + +
+ +
+ + )}
) : null} {activeTab === 'notes' && ( -
+
)} diff --git a/frontend/app/components/ui/SVG.tsx b/frontend/app/components/ui/SVG.tsx index d2710c6f5..227488667 100644 --- a/frontend/app/components/ui/SVG.tsx +++ b/frontend/app/components/ui/SVG.tsx @@ -78,6 +78,7 @@ const SVG = (props: Props) => { case 'bell-slash': return ; case 'bell': return ; case 'binoculars': return ; + case 'book-doc': return ; case 'book': return ; case 'browser/browser': return ; case 'browser/chrome': return ; @@ -172,6 +173,7 @@ const SVG = (props: Props) => { case 'event/mouse_thrashing': return ; case 'event/resize': return ; case 'event/view': return ; + case 'exclamation-circle-fill': return ; case 'exclamation-circle': return ; case 'expand-wide': return ; case 'explosion': return ; @@ -371,6 +373,7 @@ const SVG = (props: Props) => { case 'plus': return ; case 'pointer-sessions-search': return ; case 'prev1': return ; + case 'pulse': return ; case 'puzzle-piece': return ; case 'puzzle': return ; case 'question-circle': return ; @@ -406,6 +409,7 @@ const SVG = (props: Props) => { case 'star-solid': return ; case 'star': return ; case 'step-forward': return ; + case 'stickies': return ; case 'stop-record-circle': return ; case 'stopwatch': return ; case 'store': return ; diff --git a/frontend/app/logger/index.js b/frontend/app/logger/index.js index 353f186e9..caf6d7bee 100644 --- a/frontend/app/logger/index.js +++ b/frontend/app/logger/index.js @@ -24,18 +24,27 @@ function error(...args) { } let groupTm = {}; +let groupedLogs = {}; function group(groupName, ...args) { if (!window.env.PRODUCTION || options.verbose) { - if (!groupTm[groupName]) { - groupTm[groupName] = setTimeout(() => { - console.groupEnd() - delete groupTm[groupName] - }, 500); - console.groupCollapsed(groupName); + if (groupTm[groupName]) { + clearTimeout(groupTm[groupName]) + groupTm[groupName] = null + } else { + groupedLogs[groupName] = [] } - console.log(...args); + groupedLogs[groupName].push(args); + groupTm[groupName] = setTimeout(() => { + console.groupCollapsed(groupName) + groupedLogs[groupName].forEach((log) => { + console.log(...log) + }) + console.groupEnd() + delete groupTm[groupName] + delete groupedLogs[groupName] + }, 500) options.exceptionsLogs.push(args) } } diff --git a/frontend/app/player/common/types.ts b/frontend/app/player/common/types.ts index 7df4f6f6b..308ec0659 100644 --- a/frontend/app/player/common/types.ts +++ b/frontend/app/player/common/types.ts @@ -7,7 +7,7 @@ export interface Indexed { } export interface Moveable { - move(time: number, isJump?: boolean): void + move(time: number): void } export interface Cleanable { diff --git a/frontend/app/player/player/Animator.ts b/frontend/app/player/player/Animator.ts index 9423b5785..55d38432c 100644 --- a/frontend/app/player/player/Animator.ts +++ b/frontend/app/player/player/Animator.ts @@ -1,4 +1,5 @@ import type { Store, Moveable, Interval } from '../common/types'; +import MessageManager from 'App/player/web/MessageManager' const fps = 60 const performance: { now: () => number } = window.performance || { now: Date.now.bind(Date) } @@ -54,18 +55,18 @@ export default class Animator { private animationFrameRequestId: number = 0 - constructor(private store: Store, private mm: Moveable) { + constructor(private store: Store, private mm: MessageManager) { // @ts-ignore window.playerJump = this.jump.bind(this) } - private setTime(time: number, isJump?: boolean) { + private setTime(time: number) { this.store.update({ time, completed: false, }) - this.mm.move(time, isJump) + this.mm.move(time) } private startAnimation() { @@ -183,11 +184,11 @@ export default class Animator { jump = (time: number) => { if (this.store.get().playing) { cancelAnimationFrame(this.animationFrameRequestId) - this.setTime(time, true) + this.setTime(time) this.startAnimation() this.store.update({ livePlay: time === this.store.get().endTime }) } else { - this.setTime(time, true) + this.setTime(time) this.store.update({ livePlay: time === this.store.get().endTime }) } } diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index 2772ccbd2..b343eef96 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -289,7 +289,7 @@ export default class MessageManager { this.activityManager = new ActivityManager(this.session.duration.milliseconds); } - move(t: number, isJump?: boolean, index?: number): void { + move(t: number, index?: number): void { const stateToUpdate: Partial = {}; /* == REFACTOR_ME == */ const lastLoadedLocationMsg = this.loadedLocationManager.moveGetLast(t, index); @@ -339,7 +339,7 @@ export default class MessageManager { if (!!lastResize) { this.setSize(lastResize) } - this.pagesManager.moveReady(t, isJump).then(() => { + this.pagesManager.moveReady(t).then(() => { const lastScroll = this.scrollManager.moveGetLast(t, index); if (!!lastScroll && this.screen.window) { diff --git a/frontend/app/player/web/WebLivePlayer.ts b/frontend/app/player/web/WebLivePlayer.ts index 709692d20..7ed1e3400 100644 --- a/frontend/app/player/web/WebLivePlayer.ts +++ b/frontend/app/player/web/WebLivePlayer.ts @@ -56,7 +56,7 @@ export default class WebLivePlayer extends WebPlayer { const bytes = await requestEFSDom(this.session.sessionId) const fileReader = new MFileReader(bytes, this.session.startedAt) for (let msg = fileReader.readNext();msg !== null;msg = fileReader.readNext()) { - this.messageManager.distributeMessage(msg, msg._index) + this.messageManager.distributeMessage(msg) } this.wpState.update({ liveTimeTravel: true, diff --git a/frontend/app/player/web/managers/DOM/DOMManager.ts b/frontend/app/player/web/managers/DOM/DOMManager.ts index ec51401f6..d54781028 100644 --- a/frontend/app/player/web/managers/DOM/DOMManager.ts +++ b/frontend/app/player/web/managers/DOM/DOMManager.ts @@ -142,7 +142,7 @@ export default class DOMManager extends ListWalker { private setNodeAttribute(msg: { id: number, name: string, value: string }) { let { name, value } = msg; const vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("SetNodeAttribute: Node not found", msg); return } if (vn.node.tagName === "INPUT" && name === "name") { // Otherwise binds local autocomplete values (maybe should ignore on the tracker level) @@ -169,7 +169,7 @@ export default class DOMManager extends ListWalker { this.removeBodyScroll(msg.id, vn) } - private applyMessage = (msg: Message, isJump?: boolean): Promise | undefined => { + private applyMessage = (msg: Message): Promise | undefined => { let vn: VNode | undefined let doc: Document | null let styleSheet: CSSStyleSheet | PostponedStyleSheet | undefined @@ -230,14 +230,14 @@ export default class DOMManager extends ListWalker { return case MType.RemoveNode: vn = this.vElements.get(msg.id) || this.vTexts.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } - if (!vn.parentNode) { logger.error("Parent node not found", msg); return } + if (!vn) { logger.error("RemoveNode: Node not found", msg); return } + if (!vn.parentNode) { logger.error("RemoveNode: Parent node not found", msg); return } vn.parentNode.removeChild(vn) this.vElements.delete(msg.id) this.vTexts.delete(msg.id) return case MType.SetNodeAttribute: - if (isJump && msg.name === 'href') this.attrsBacktrack.push(msg) + if (msg.name === 'href') this.attrsBacktrack.push(msg) else this.setNodeAttribute(msg) return case MType.StringDict: @@ -247,7 +247,7 @@ export default class DOMManager extends ListWalker { this.stringDict[msg.nameKey] === undefined && logger.error("No dictionary key for msg 'name': ", msg) this.stringDict[msg.valueKey] === undefined && logger.error("No dictionary key for msg 'value': ", msg) if (this.stringDict[msg.nameKey] === undefined || this.stringDict[msg.valueKey] === undefined ) { return } - if (isJump && this.stringDict[msg.nameKey] === 'href') this.attrsBacktrack.push(msg) + if (this.stringDict[msg.nameKey] === 'href') this.attrsBacktrack.push(msg) else { this.setNodeAttribute({ id: msg.id, @@ -257,13 +257,13 @@ export default class DOMManager extends ListWalker { } return case MType.RemoveNodeAttribute: - vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } - vn.removeAttribute(msg.name) + vn = this.vElements.get(msg.id) + if (!vn) { logger.error("RemoveNodeAttribute: Node not found", msg); return } + vn.removeAttribute(msg.name) return case MType.SetInputValue: vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("SetInoputValue: Node not found", msg); return } const nodeWithValue = vn.node if (!(nodeWithValue instanceof HTMLInputElement || nodeWithValue instanceof HTMLTextAreaElement @@ -283,13 +283,13 @@ export default class DOMManager extends ListWalker { return case MType.SetInputChecked: vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("SetInputChecked: Node not found", msg); return } (vn.node as HTMLInputElement).checked = msg.checked return case MType.SetNodeData: case MType.SetCssData: // mbtodo: remove css transitions when timeflow is not natural (on jumps) vn = this.vTexts.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("SetCssData: Node not found", msg); return } vn.setData(msg.data) if (vn.node instanceof HTMLStyleElement) { doc = this.screen.document @@ -304,7 +304,7 @@ export default class DOMManager extends ListWalker { // @deprecated since 4.0.2 in favor of adopted_ss_insert/delete_rule + add_owner as being common case for StyleSheets case MType.CssInsertRule: vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("CssInsertRule: Node not found", msg); return } if (!(vn instanceof VStyleElement)) { logger.warn("Non-style node in CSS rules message (or sheet is null)", msg, vn); return @@ -313,7 +313,7 @@ export default class DOMManager extends ListWalker { return case MType.CssDeleteRule: vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("CssDeleteRule: Node not found", msg); return } if (!(vn instanceof VStyleElement)) { logger.warn("Non-style node in CSS rules message (or sheet is null)", msg, vn); return @@ -324,7 +324,7 @@ export default class DOMManager extends ListWalker { case MType.CreateIFrameDocument: vn = this.vElements.get(msg.frameID) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("CreateIFrameDocument: Node not found", msg); return } vn.enforceInsertion() const host = vn.node if (host instanceof HTMLIFrameElement) { @@ -384,7 +384,7 @@ export default class DOMManager extends ListWalker { if (!vn) { // non-constructed case vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("AdoptedSsAddOwner: Node not found", msg); return } if (!(vn instanceof VStyleElement)) { logger.error("Non-style owner", msg); return } this.ppStyleSheets.set(msg.sheetID, new PostponedStyleSheet(vn.node)) return @@ -411,13 +411,13 @@ export default class DOMManager extends ListWalker { return } vn = this.vRoots.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("AdoptedSsRemoveOwner: Node not found", msg); return } //@ts-ignore vn.node.adoptedStyleSheets = [...vn.node.adoptedStyleSheets].filter(s => s !== styleSheet) return case MType.LoadFontFace: vn = this.vRoots.get(msg.parentID) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("LoadFontFace: Node not found", msg); return } if (vn instanceof VShadowRoot) { logger.error(`Node ${vn} expected to be a Document`, msg); return } let descr: Object try { @@ -460,7 +460,7 @@ export default class DOMManager extends ListWalker { } } - async moveReady(t: number, isJump?: boolean): Promise { + async moveReady(t: number): Promise { // MBTODO (back jump optimisation): // - store intemediate virtual dom state // - cancel previous moveReady tasks (is it possible?) if new timestamp is less @@ -472,15 +472,16 @@ export default class DOMManager extends ListWalker { * are applied, so it won't try to download and then cancel when node is created in msg N and removed in msg N+2 * which produces weird bug when asset is cached (10-25ms delay) * */ - await this.moveWait(t, (msg) => this.applyMessage(msg, isJump)) - if (isJump) { - this.attrsBacktrack.forEach(msg => { - this.applyBacktrack(msg) - }) - this.attrsBacktrack = [] - } - this.vRoots.forEach(rt => rt.applyChanges()) // MBTODO (optimisation): affected set + // http://0.0.0.0:3333/5/session/8452905874437457 + // 70 iframe, 8 create element - STYLE tag + await this.moveWait(t, this.applyMessage) + this.attrsBacktrack.forEach(msg => { + this.applyBacktrack(msg) + }) + this.attrsBacktrack = [] + + this.vRoots.forEach(rt => rt.applyChanges()) // MBTODO (optimisation): affected set // Thinkabout (read): css preload // What if we go back before it is ready? We'll have two handlres? return this.stylesManager.moveReady(t).then(() => { diff --git a/frontend/app/player/web/managers/PagesManager.ts b/frontend/app/player/web/managers/PagesManager.ts index b30f40372..dbc64bb72 100644 --- a/frontend/app/player/web/managers/PagesManager.ts +++ b/frontend/app/player/web/managers/PagesManager.ts @@ -33,14 +33,14 @@ export default class PagesManager extends ListWalker { this.forEach(page => page.sort(comparator)) } - moveReady(t: number, isJump?: boolean): Promise { + moveReady(t: number): Promise { const requiredPage = this.moveGetLast(t) if (requiredPage != null) { this.currentPage = requiredPage this.currentPage.reset() // Otherwise it won't apply create_document } if (this.currentPage != null) { - return this.currentPage.moveReady(t, isJump) + return this.currentPage.moveReady(t) } return Promise.resolve() } diff --git a/frontend/app/player/web/messages/MFileReader.ts b/frontend/app/player/web/messages/MFileReader.ts index d1b131595..b5fdde85c 100644 --- a/frontend/app/player/web/messages/MFileReader.ts +++ b/frontend/app/player/web/messages/MFileReader.ts @@ -3,7 +3,7 @@ import type { RawMessage } from './raw.gen'; import { MType } from './raw.gen'; import RawMessageReader from './RawMessageReader.gen'; import resolveURL from './urlBasedResolver' - +import Logger from 'App/logger' // TODO: composition instead of inheritance // needSkipMessage() and next() methods here use buf and p protected properties, @@ -59,10 +59,8 @@ export default class MFileReader extends RawMessageReader { if (!skippedMessage) { return null } - this.logger.group("Openreplay: Skipping messages ", skippedMessage) - + Logger.group("Openreplay: Skipping messages ", skippedMessage) } - this.pLastMessageID = this.p const rMsg = this.readRawMessage() diff --git a/frontend/app/services/HealthService.ts b/frontend/app/services/HealthService.ts new file mode 100644 index 000000000..7d2b3cc7f --- /dev/null +++ b/frontend/app/services/HealthService.ts @@ -0,0 +1,9 @@ +import BaseService from './BaseService'; + +export default class HealthService extends BaseService { + fetchStatus(): Promise { + return this.client.get('/health') + .then(r => r.json()) + .then(j => j.data || {}) + } +} \ No newline at end of file diff --git a/frontend/app/services/index.ts b/frontend/app/services/index.ts index 816113e68..32e216127 100644 --- a/frontend/app/services/index.ts +++ b/frontend/app/services/index.ts @@ -10,6 +10,7 @@ import RecordingsService from "./RecordingsService"; import ConfigService from './ConfigService' import AlertsService from './AlertsService' import WebhookService from './WebhookService' +import HealthService from "./HealthService"; export const dashboardService = new DashboardService(); export const metricService = new MetricService(); @@ -24,6 +25,8 @@ export const configService = new ConfigService(); export const alertsService = new AlertsService(); export const webhookService = new WebhookService(); +export const healthService = new HealthService(); + export const services = [ dashboardService, metricService, @@ -37,4 +40,5 @@ export const services = [ configService, alertsService, webhookService, + healthService, ] \ No newline at end of file diff --git a/frontend/app/svg/cheers.svg b/frontend/app/svg/cheers.svg new file mode 100644 index 000000000..1341b27a2 --- /dev/null +++ b/frontend/app/svg/cheers.svg @@ -0,0 +1,193 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/app/svg/icons/book-doc.svg b/frontend/app/svg/icons/book-doc.svg new file mode 100644 index 000000000..7e6f2a680 --- /dev/null +++ b/frontend/app/svg/icons/book-doc.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/frontend/app/svg/icons/exclamation-circle-fill.svg b/frontend/app/svg/icons/exclamation-circle-fill.svg new file mode 100644 index 000000000..eebbd6833 --- /dev/null +++ b/frontend/app/svg/icons/exclamation-circle-fill.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/frontend/app/svg/icons/pulse.svg b/frontend/app/svg/icons/pulse.svg new file mode 100644 index 000000000..5075d1cab --- /dev/null +++ b/frontend/app/svg/icons/pulse.svg @@ -0,0 +1,3 @@ + + + diff --git a/frontend/app/svg/icons/stickies.svg b/frontend/app/svg/icons/stickies.svg new file mode 100644 index 000000000..bf752b965 --- /dev/null +++ b/frontend/app/svg/icons/stickies.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/frontend/app/theme/colors.js b/frontend/app/theme/colors.js index 3986cf3d5..d9671f6fe 100644 --- a/frontend/app/theme/colors.js +++ b/frontend/app/theme/colors.js @@ -57,5 +57,6 @@ module.exports = { 'text-disabled': 'rgba(0,0,0, 0.38)', 'text-primary': 'rgba(0,0,0, 0.87)', 'outlined-border': 'rgba(0,0,0, 0.23)', + 'divider': 'rgba(0, 0, 0, 0.12)', } } diff --git a/peers/build.sh b/peers/build.sh index dfff188cb..232d145e1 100644 --- a/peers/build.sh +++ b/peers/build.sh @@ -41,7 +41,8 @@ function build_api(){ } cp -R ../peers ../${destination} cd ../${destination} - cp -R ../utilities/utils . + cp -R ../assist/utils . + cp ../sourcemap-reader/utils/health.js ./utils/. # Copy enterprise code [[ $1 == "ee" ]] && { cp -rf ../ee/peers/* ./ diff --git a/peers/clean-dev.sh b/peers/clean-dev.sh new file mode 100755 index 000000000..a0cb5c9ed --- /dev/null +++ b/peers/clean-dev.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +rm -rf ./utils \ No newline at end of file diff --git a/peers/package-lock.json b/peers/package-lock.json index da9b72ca1..fd230847f 100644 --- a/peers/package-lock.json +++ b/peers/package-lock.json @@ -1,16 +1,16 @@ { "name": "peers-server", - "version": "1.0.0", + "version": "v1.11.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "peers-server", - "version": "1.0.0", + "version": "v1.11.0", "license": "Elastic License 2.0 (ELv2)", "dependencies": { "express": "^4.18.2", - "peer": "^v1.0.0-rc.9" + "peer": "^v1.0.0" } }, "node_modules/@types/body-parser": { @@ -57,9 +57,9 @@ "integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==" }, "node_modules/@types/node": { - "version": "18.13.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.13.0.tgz", - "integrity": "sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg==" + "version": "18.15.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.5.tgz", + "integrity": "sha512-Ark2WDjjZO7GmvsyFFf81MXuGTA/d6oP38anyxWOL6EREyBKAxKoFHwBhaZxCfLRLpO8JgVXwqOwSwa7jRcjew==" }, "node_modules/@types/qs": { "version": "6.9.7", @@ -72,9 +72,9 @@ "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" }, "node_modules/@types/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==", + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.1.tgz", + "integrity": "sha512-NUo5XNiAdULrJENtJXZZ3fHtfMolzZwczzBbnAeBbqBwG+LaG6YaJtuwzwGSQZ2wsCrxjEhNNjAkKigy3n8teQ==", "dependencies": { "@types/mime": "*", "@types/node": "*" @@ -243,6 +243,14 @@ "node": ">= 0.10" } }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "engines": { + "node": ">= 12" + } + }, "node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -348,6 +356,28 @@ "node": ">= 0.10.0" } }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, "node_modules/finalhandler": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", @@ -365,6 +395,17 @@ "node": ">= 0.8" } }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, "node_modules/forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -540,6 +581,41 @@ "node": ">= 0.6" } }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.1.tgz", + "integrity": "sha512-cRVc/kyto/7E5shrWca1Wsea4y6tL9iYJE5FBCius3JQfb/4P4I295PfhgbJQBLTx6lATE4z+wK0rPM4VS2uow==", + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -581,14 +657,15 @@ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "node_modules/peer": { - "version": "1.0.0-rc.9", - "resolved": "https://registry.npmjs.org/peer/-/peer-1.0.0-rc.9.tgz", - "integrity": "sha512-wjt3fWMKxM/lH/1uD5Qs9qinQ1x/aa9br1eZEQuJ2wuBBQrjAcCT85MUuY9PYcyoW5ymyABsDKC3H/q9KmZ3PA==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/peer/-/peer-1.0.0.tgz", + "integrity": "sha512-fPVtyCKZWVfjbf7XnY7MskhTlu+pBpMvQV81sngT8aXIuT5YF9y9bwIw8y5BlI98DV0NsDpLjow/oemFNvcKkg==", "dependencies": { "@types/express": "^4.17.3", "@types/ws": "^7.2.3 || ^8.0.0", "cors": "^2.8.5", "express": "^4.17.1", + "node-fetch": "^3.3.0", "ws": "^7.2.3 || ^8.0.0", "yargs": "^17.6.2" }, @@ -819,6 +896,14 @@ "node": ">= 0.8" } }, + "node_modules/web-streams-polyfill": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz", + "integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==", + "engines": { + "node": ">= 8" + } + }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -836,9 +921,9 @@ } }, "node_modules/ws": { - "version": "8.12.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.12.1.tgz", - "integrity": "sha512-1qo+M9Ba+xNhPB+YTWUlK6M17brTut5EXbcBaMRN5pH5dFrXz7lzz1ChFSUq3bOUl8yEvSenhHmYUNJxFzdJew==", + "version": "8.13.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", + "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", "engines": { "node": ">=10.0.0" }, @@ -864,9 +949,9 @@ } }, "node_modules/yargs": { - "version": "17.6.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", - "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", + "version": "17.7.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz", + "integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==", "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", diff --git a/peers/package.json b/peers/package.json index 400274ffc..d77cf5910 100644 --- a/peers/package.json +++ b/peers/package.json @@ -1,6 +1,6 @@ { "name": "peers-server", - "version": "1.0.0", + "version": "v1.11.0", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", "scripts": { @@ -19,6 +19,6 @@ "homepage": "https://github.com/openreplay/openreplay#readme", "dependencies": { "express": "^4.18.2", - "peer": "^v1.0.0-rc.9" + "peer": "^v1.0.0" } } diff --git a/peers/prepare-dev.sh b/peers/prepare-dev.sh new file mode 100755 index 000000000..d4825a3d0 --- /dev/null +++ b/peers/prepare-dev.sh @@ -0,0 +1,3 @@ +#!/bin/bash +rsync -avr --exclude=".*" --ignore-existing ../assist/utils ./ +cp ../sourcemap-reader/utils/health.js ./utils/. \ No newline at end of file diff --git a/peers/run-dev.sh b/peers/run-dev.sh new file mode 100755 index 000000000..00e8d5a4b --- /dev/null +++ b/peers/run-dev.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -a +source .env +set +a + +npm start \ No newline at end of file diff --git a/peers/server.js b/peers/server.js index 4e25a0827..8cf12d546 100644 --- a/peers/server.js +++ b/peers/server.js @@ -1,5 +1,6 @@ const dumps = require('./utils/HeapSnapshot'); const {request_logger} = require('./utils/helper'); +const health = require("./utils/health"); const assert = require('assert').strict; const {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers/peerjs-server'); const express = require('express'); @@ -44,10 +45,4 @@ process.on('uncaughtException', err => { // process.exit(1); }); -app.get('/private/shutdown', (req, res) => { - console.log("Requested shutdown"); - res.statusCode = 200; - res.end("ok!"); - process.kill(1, "SIGTERM"); - } -); \ No newline at end of file +health.healthApp.listen(health.PORT, HOST, health.listen_cb); \ No newline at end of file diff --git a/scripts/helmcharts/openreplay-cli b/scripts/helmcharts/openreplay-cli index 52a6b28b2..4cde9a54d 100755 --- a/scripts/helmcharts/openreplay-cli +++ b/scripts/helmcharts/openreplay-cli @@ -15,6 +15,9 @@ tmp_dir=$(mktemp -d) sudo mkdir $OR_DIR } export PATH=/var/lib/openreplay:$PATH +function xargs() { + /var/lib/openreplay/busybox xargs +} tools=( zyedidia/eget @@ -114,10 +117,12 @@ echo -e ${NC} log info ' Usage: openreplay [ -h | --help ] [ -s | --status ] + [ -i | --install DOMAIN_NAME ] [ -u | --upgrade ] [ -U | --deprecated-upgrade /path/to/old_vars.yaml] [ -r | --restart ] [ -R | --Reload ] + [ -c | --cleanup N(in days) ] [ -e | --edit ] [ -p | --install-packages ] [ -l | --logs SERVICE ] @@ -184,6 +189,73 @@ function upgrade_old() { upgrade } +function clone_repo() { + err_cd "$tmp_dir" + log info "Working directory $tmp_dir" + git_options="-b ${OR_VERSION:-main}" + eval git clone "${OR_REPO}" --depth 1 $git_options + return +} + +function install() { + domain_name=$1 + # Check existing installation + [[ -f ${OR_DIR}/vars.yaml ]] && { + or_version=$(busybox awk '/fromVersion/{print $2}' < "${OR_DIR}/vars.yaml") + log err "Openreplay installation ${BWHITE}${or_version}${RED} found. If you want to upgrade, run ${BWHITE}openreplay -u${RED}" + } + # Installing OR + log title "Installing OpenReplay" + clone_repo + err_cd "$tmp_dir/openreplay/scripts/helmcharts" + DOMAIN_NAME=$domain_name bash init.sh + return +} + +function cleanup() { + # Confirmation for deletion. Do you want to delete Postgres/Minio(session) data before $date ? + delete_from_number_days=$1 + delete_from_date=$(date +%Y-%m-%d -d "$delete_from_number_days day ago") + log debug "Do you want to delete the data captured on and before ${BWHITE}$delete_from_date${YELLOW}?" + read -p "Are you sure[y/n]? " -n 1 -r + echo # (optional) move to a new line + if [[ ! $REPLY =~ ^[Yy]$ ]]; then + log err "Cancelling data deletion" + fi + + # Run pg cleanup + pguser=$(awk '/postgresqlUser/{print $2}' < "${OR_DIR}/vars.yaml" | xargs) + pgpassword=$(awk '/postgresqlPassword/{print $2}' < "${OR_DIR}/vars.yaml" | xargs) + pghost=$(awk '/postgresqlHost/{print $2}' < "${OR_DIR}/vars.yaml" | xargs) + pgport=$(awk '/postgresqlPort/{print $2}' < "${OR_DIR}/vars.yaml" | xargs) + pgdatabase=$(awk '/postgresqlDatabase/{print $2}' < "${OR_DIR}/vars.yaml" | xargs) + kubectl delete po -n ${APP_NS} pg-cleanup &> /dev/null || true + kubectl run pg-cleanup -n ${APP_NS} \ + --restart=Never \ + --env PGHOST=$pghost \ + --env PGUSER=$pguser \ + --env PGDATABASE=$pgdatabase \ + --env PGPASSWORD=$pgpassword \ + --env PGPORT=$pgport \ + --image bitnami/postgresql -- psql -c "DELETE FROM public.sessions WHERE start_ts < extract(epoch from '${delete_from_date}'::date) * 1000;" + # Run minio cleanup + MINIO_ACCESS_KEY=$(awk '/accessKey/{print $NF}' < "${OR_DIR}/vars.yaml" | tail -n1 | xargs) + MINIO_SECRET_KEY=$(awk '/secretKey/{print $NF}' < "${OR_DIR}/vars.yaml" | tail -n1 | xargs) + MINIO_HOST=$(awk '/endpoint/{print $NF}' < "${OR_DIR}/vars.yaml" | tail -n1 | xargs) + kubectl delete po -n ${APP_NS} minio-cleanup &> /dev/null || true + kubectl run minio-cleanup -n ${APP_NS} \ + --restart=Never \ + --env MINIO_HOST=$pghost \ + --image bitnami/minio:2020.10.9-debian-10-r6 -- /bin/sh -c " + mc alias set minio $MINIO_HOST $MINIO_ACCESS_KEY $MINIO_SECRET_KEY && + mc rm --recursive --dangerous --force --older-than ${delete_from_number_days}d minio/mobs + " + log info "Postgres data cleanup process initiated. Postgres will automatically vacuum deleted rows when the database is idle. This may take up a few days to free the disk space." + log info "Minio (where recordings are stored) cleanup process initiated." + log info "Run ${BWHITE}openreplay -s${GREEN} to check the status of the cleanup process and available disk space." + return +} + function upgrade() { # TODO: # 1. store vars.yaml in central place. @@ -191,15 +263,12 @@ function upgrade() { # 3. How to update package. Because openreplay -u will be done from old update script # 4. Update from Version exists git || log err "Git not found. Please install" - log info "Working directory $tmp_dir" - err_cd "$tmp_dir" or_version=$(busybox awk '/fromVersion/{print $2}' < "${OR_DIR}/vars.yaml") # Creating backup dir of current installation [[ -d "$OR_DIR/openreplay" ]] && sudo cp -rfb "$OR_DIR/openreplay" "$OR_DIR/openreplay_${or_version//\"}" && sudo rm -rf ${OR_DIR}/openreplay - git_options="-b ${OR_VERSION:-main}" - eval git clone "${OR_REPO}" --depth 1 $git_options + clone_repo err_cd openreplay/scripts/helmcharts install_packages [[ -d /openreplay ]] && sudo chown -R 1001:1001 /openreplay @@ -239,7 +308,7 @@ function clean_tmp_dir() { install_packages } -PARSED_ARGUMENTS=$(busybox getopt -a -n openreplay -o Rrevpiuhsl:U: --long reload,edit,restart,verbose,install-packages,install,upgrade,help,status,logs,deprecated-upgrade: -- "$@") +PARSED_ARGUMENTS=$(busybox getopt -a -n openreplay -o Rrevpi:uhsl:U:c: --long reload,edit,restart,verbose,install-packages,install:,upgrade,help,status,logs,deprecated-upgrade:,cleanup: -- "$@") VALID_ARGUMENTS=$? if [[ "$VALID_ARGUMENTS" != "0" ]]; then help @@ -256,6 +325,12 @@ do clean_tmp_dir exit 0 ;; + -i | --install) + log title "Installing OpenReplay" + install "$2" + clean_tmp_dir + exit 0 + ;; -u | --upgrade) log title "Upgrading OpenReplay" upgrade @@ -268,6 +343,12 @@ do clean_tmp_dir exit 0 ;; + -c | --cleanup) + log title "Cleaning up data older than $2 days" + cleanup "$2" + clean_tmp_dir + exit 0 + ;; -r | --restart) log title "Restarting OpenReplay Components" kubectl rollout restart deployment -n "${APP_NS}" diff --git a/scripts/helmcharts/openreplay/charts/assist/values.yaml b/scripts/helmcharts/openreplay/charts/assist/values.yaml index 8ff07d2d0..5e84e8c60 100644 --- a/scripts/helmcharts/openreplay/charts/assist/values.yaml +++ b/scripts/helmcharts/openreplay/charts/assist/values.yaml @@ -64,6 +64,7 @@ service: type: ClusterIP ports: socketio: 9001 + metrics: 8888 ingress: enabled: true diff --git a/scripts/helmcharts/openreplay/charts/peers/values.yaml b/scripts/helmcharts/openreplay/charts/peers/values.yaml index 0bc4b6b14..0bf7fc27e 100644 --- a/scripts/helmcharts/openreplay/charts/peers/values.yaml +++ b/scripts/helmcharts/openreplay/charts/peers/values.yaml @@ -64,6 +64,7 @@ service: type: ClusterIP ports: peerjs: 9000 + metrics: 8888 ingress: enabled: true diff --git a/scripts/helmcharts/openreplay/charts/sourcemapreader/values.yaml b/scripts/helmcharts/openreplay/charts/sourcemapreader/values.yaml index ec9fe9655..d14069fca 100644 --- a/scripts/helmcharts/openreplay/charts/sourcemapreader/values.yaml +++ b/scripts/helmcharts/openreplay/charts/sourcemapreader/values.yaml @@ -48,6 +48,7 @@ service: type: ClusterIP ports: sourcemapreader: 9000 + metrics: 8888 serviceMonitor: enabled: false diff --git a/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql b/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql new file mode 100644 index 000000000..0fde93c48 --- /dev/null +++ b/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql @@ -0,0 +1,42 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.10.0'; + next_version CONSTANT text := 'v1.11.0'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.11.0' +$$ LANGUAGE sql IMMUTABLE; + +ALTER TABLE events.inputs + ADD COLUMN duration integer NULL, + ADD COLUMN hesitation integer NULL; + +ALTER TABLE public.projects + ALTER COLUMN gdpr SET DEFAULT '{ + "maskEmails": true, + "sampleRate": 33, + "maskNumbers": false, + "defaultInputMode": "obscured" + }'::jsonb; + +ALTER TYPE issue_type ADD VALUE IF NOT EXISTS 'mouse_thrashing'; + +ALTER TABLE events.clicks + ADD COLUMN hesitation integer NULL; + +COMMIT; \ No newline at end of file diff --git a/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 57dea2a58..2fe532636 100644 --- a/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -190,7 +190,7 @@ $$ "maskEmails": true, "sampleRate": 33, "maskNumbers": false, - "defaultInputMode": "plain" + "defaultInputMode": "obscured" }'::jsonb, first_recorded_session_at timestamp without time zone NULL DEFAULT NULL, sessions_last_check_at timestamp without time zone NULL DEFAULT NULL, @@ -628,13 +628,14 @@ $$ CREATE TABLE events.clicks ( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - url text DEFAULT '' NOT NULL, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + url text DEFAULT '' NOT NULL, path text, - selector text DEFAULT '' NOT NULL, + selector text DEFAULT '' NOT NULL, + hesitation integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX clicks_session_id_idx ON events.clicks (session_id); @@ -654,8 +655,10 @@ $$ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, message_id bigint NOT NULL, timestamp bigint NOT NULL, - label text DEFAULT NULL, - value text DEFAULT NULL, + label text DEFAULT NULL, + value text DEFAULT NULL, + duration integer DEFAULT NULL, + hesitation integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX inputs_session_id_idx ON events.inputs (session_id); diff --git a/sourcemap-reader/.gitignore b/sourcemap-reader/.gitignore index 09c49b304..f2686decf 100644 --- a/sourcemap-reader/.gitignore +++ b/sourcemap-reader/.gitignore @@ -3,5 +3,8 @@ node_modules npm-debug.log .cache test.html -/utils/ +/utils/assistHelper.js +/utils/geoIP.js +/utils/HeapSnapshot.js +/utils/helper.js mappings.wasm diff --git a/sourcemap-reader/build.sh b/sourcemap-reader/build.sh index 7169403e7..389a61a71 100644 --- a/sourcemap-reader/build.sh +++ b/sourcemap-reader/build.sh @@ -48,7 +48,7 @@ function build_api(){ } cp -R ../sourcemap-reader ../${destination} cd ../${destination} - cp -R ../utilities/utils . + cp -R ../assist/utils . tag="" # Copy enterprise code [[ $1 == "ee" ]] && { diff --git a/sourcemap-reader/clean-dev.sh b/sourcemap-reader/clean-dev.sh index a0cb5c9ed..ebc1c36c6 100755 --- a/sourcemap-reader/clean-dev.sh +++ b/sourcemap-reader/clean-dev.sh @@ -1,3 +1,6 @@ #!/bin/bash -rm -rf ./utils \ No newline at end of file +rm -rf ./utils/assistHelper.js +rm -rf ./utils/geoIP.js +rm -rf ./utils/HeapSnapshot.js +rm -rf ./utils/helper.js \ No newline at end of file diff --git a/sourcemap-reader/package-lock.json b/sourcemap-reader/package-lock.json index cbaebc3c1..1b3f5ec82 100644 --- a/sourcemap-reader/package-lock.json +++ b/sourcemap-reader/package-lock.json @@ -1,12 +1,12 @@ { "name": "sourcemaps-reader", - "version": "1.0.0", + "version": "v1.11.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "sourcemaps-reader", - "version": "1.0.0", + "version": "v1.11.0", "license": "Elastic License 2.0 (ELv2)", "dependencies": { "aws-sdk": "^2.1314.0", @@ -43,9 +43,9 @@ } }, "node_modules/aws-sdk": { - "version": "2.1314.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1314.0.tgz", - "integrity": "sha512-2jsfvgtOQ6kRflaicn50ndME4YoIaBhlus/dZCExtWNXeu8ePh+eAtflsYs6aqIiRPKhCBLaqClzahWm7hC0XA==", + "version": "2.1333.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1333.0.tgz", + "integrity": "sha512-MvOuleNeRryJtkCGXGEWDHPqqgxuqdi4/hGzJEpn9tnjsW9LNK8UgFPpYzUZ24ZO/3S+jiUh8DMMrL5nVGnagg==", "dependencies": { "buffer": "4.9.2", "events": "1.1.1", diff --git a/sourcemap-reader/package.json b/sourcemap-reader/package.json index 9d5a2806b..5a9b28ef8 100644 --- a/sourcemap-reader/package.json +++ b/sourcemap-reader/package.json @@ -1,6 +1,6 @@ { "name": "sourcemaps-reader", - "version": "1.0.0", + "version": "v1.11.0", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", "scripts": { diff --git a/sourcemap-reader/prepare-dev.sh b/sourcemap-reader/prepare-dev.sh index e057555db..78a315946 100755 --- a/sourcemap-reader/prepare-dev.sh +++ b/sourcemap-reader/prepare-dev.sh @@ -1,2 +1,2 @@ #!/bin/bash -rsync -avr --exclude=".*" --ignore-existing ../utilities/utils ./ \ No newline at end of file +rsync -avr --exclude=".*" --ignore-existing ../assist/utils ./ \ No newline at end of file diff --git a/sourcemap-reader/server.js b/sourcemap-reader/server.js index 02f63475b..08e3f926f 100644 --- a/sourcemap-reader/server.js +++ b/sourcemap-reader/server.js @@ -1,11 +1,12 @@ const dumps = require('./utils/HeapSnapshot'); const sourcemapsReaderServer = require('./servers/sourcemaps-server'); const express = require('express'); +const health = require("./utils/health"); const {request_logger} = require("./utils/helper"); const HOST = process.env.SMR_HOST || '127.0.0.1'; const PORT = process.env.SMR_PORT || 9000; -const PREFIX = process.env.PREFIX || process.env.prefix || '' +const PREFIX = process.env.PREFIX || process.env.prefix || ''; const P_KEY = process.env.SMR_KEY || 'smr'; const heapdump = process.env.heapdump === "1"; @@ -21,14 +22,7 @@ heapdump && app.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router); const server = app.listen(PORT, HOST, () => { console.log(`SR App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); -module.exports = {server}; -app.get('/private/shutdown', (req, res) => { - console.log("Requested shutdown"); - res.statusCode = 200; - res.end("ok!"); - process.kill(1, "SIGTERM"); - } -); \ No newline at end of file +module.exports = {server}; \ No newline at end of file diff --git a/sourcemap-reader/utils/health.js b/sourcemap-reader/utils/health.js new file mode 100644 index 000000000..0b89dd1d8 --- /dev/null +++ b/sourcemap-reader/utils/health.js @@ -0,0 +1,52 @@ +const express = require('express'); +const HOST = process.env.LISTEN_HOST || '0.0.0.0'; +const PORT = process.env.HEALTH_PORT || 8888; + + +const {request_logger} = require("./helper"); +const debug = process.env.debug === "1"; +const respond = function (res, data) { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({"data": data})); +} + +const check_health = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + respond(res, { + "health": true, + "details": { + "version": process.env.npm_package_version + } + }); +} + + +const healthApp = express(); +healthApp.use(express.json()); +healthApp.use(express.urlencoded({extended: true})); +healthApp.use(request_logger("[healthApp]")); +healthApp.get(['/'], (req, res) => { + res.statusCode = 200; + res.end("healthApp ok!"); + } +); +healthApp.get('/health', check_health); +healthApp.get('/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } +); + +const listen_cb = async function () { + console.log(`Health App listening on http://${HOST}:${PORT}`); + console.log('Press Ctrl+C to quit.'); +} + +module.exports = { + healthApp, + PORT, + listen_cb +}; diff --git a/third-party.md b/third-party.md index 0cfe2cac2..ac17bb869 100644 --- a/third-party.md +++ b/third-party.md @@ -1,4 +1,4 @@ -## Licenses (as of January 23, 2023) +## Licenses (as of March 24, 2023) Below is the list of dependencies used in OpenReplay software. Licenses may change between versions, so please keep this up to date with every new library you use. @@ -40,6 +40,7 @@ Below is the list of dependencies used in OpenReplay software. Licenses may chan | python-multipart | Apache | Python | | elasticsearch-py | Apache2 | Python | | jira | BSD2 | Python | +| redis-py | MIT | Python | | clickhouse-driver | MIT | Python | | python3-saml | MIT | Python | | kubernetes | Apache2 | Python | diff --git a/tracker/tracker/CHANGELOG.md b/tracker/tracker/CHANGELOG.md index 90c68a91f..e0ba20c30 100644 --- a/tracker/tracker/CHANGELOG.md +++ b/tracker/tracker/CHANGELOG.md @@ -4,7 +4,9 @@ - Capture DOM node drop event (>30% nodes removed) - Capture iframe network requests - Detect cached requests to img, css and js resources; send transferred size -- added `{ network: { disableClickmaps: boolean } }` to disable calculating el. selectors +- added `{ mouse: { disableClickmaps: boolean } }` to disable calculating el. selectors +- added `{ mouse: { minSelectorDepth?: number; nthThreshold?: number; maxOptimiseTries?: number }` for selector finding optimisations +- fixed inline css loading in specific cases when assets gets around min flush size ## 5.0.1 diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 7705d49c9..7ac8ba10f 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "5.0.2-beta.2", + "version": "5.0.2", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index faca22b9d..4c770f664 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -14,6 +14,8 @@ import type { Options as ObserverOptions } from './observer/top_observer.js' import type { Options as SanitizerOptions } from './sanitizer.js' import type { Options as LoggerOptions } from './logger.js' import type { Options as SessOptions } from './session.js' +import type { Options as NetworkOptions } from '../modules/network.js' + import type { Options as WebworkerOptions, ToWorkerData, @@ -75,6 +77,7 @@ type AppOptions = { // @deprecated onStart?: StartCallback + network?: NetworkOptions } & WebworkerOptions & SessOptions @@ -99,6 +102,7 @@ export default class App { private readonly stopCallbacks: Array<() => any> = [] private readonly commitCallbacks: Array = [] private readonly options: AppOptions + public readonly networkOptions?: NetworkOptions private readonly revID: string private activityState: ActivityState = ActivityState.NotActive private readonly version = 'TRACKER_VERSION' // TODO: version compatability check inside each plugin. @@ -109,6 +113,7 @@ export default class App { // } ?? maybe onStart is good this.projectKey = projectKey + this.networkOptions = options.network this.options = Object.assign( { revID: '', diff --git a/tracker/tracker/src/main/app/observer/top_observer.ts b/tracker/tracker/src/main/app/observer/top_observer.ts index 38944c5c9..7eb15c15b 100644 --- a/tracker/tracker/src/main/app/observer/top_observer.ts +++ b/tracker/tracker/src/main/app/observer/top_observer.ts @@ -1,5 +1,6 @@ import Observer from './observer.js' import { isElementNode, hasTag } from '../guards.js' +import Network from '../../modules/network.js' import IFrameObserver from './iframe_observer.js' import ShadowRootObserver from './shadow_root_observer.js' @@ -92,6 +93,7 @@ export default class TopObserver extends Observer { //TODO: more explicit logic ) { this.contextsSet.add(currentWin) + Network(this.app, this.app.networkOptions, currentWin) //@ts-ignore https://github.com/microsoft/TypeScript/issues/41684 this.contextCallbacks.forEach((cb) => cb(currentWin)) } diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index fb69bef08..5bba2ddda 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -5,13 +5,13 @@ import { MouseMove, MouseClick, MouseThrashing } from '../app/messages.gen.js' import { getInputLabel } from './input.js' import { finder } from '@medv/finder' -function _getSelector(target: Element, document: Document) { +function _getSelector(target: Element, document: Document, options?: MouseHandlerOptions): string { const selector = finder(target, { root: document.body, seedMinLength: 3, - optimizedMinLength: 2, - threshold: 1000, - maxNumberOfTries: 10_000, + optimizedMinLength: options?.minSelectorDepth || 2, + threshold: options?.nthThreshold || 1000, + maxNumberOfTries: options?.maxOptimiseTries || 10_000, }) return selector @@ -75,6 +75,25 @@ function _getTarget(target: Element, document: Document): Element | null { export interface MouseHandlerOptions { disableClickmaps?: boolean + /** minimum length of an optimised selector. + * + * body > div > div > p => body > p for example + * + * default 2 + * */ + minSelectorDepth?: number + /** how many selectors to try before falling back to nth-child selectors + * performance expensive operation + * + * default 1000 + * */ + nthThreshold?: number + /** + * how many tries to optimise and shorten the selector + * + * default 10_000 + * */ + maxOptimiseTries?: number } export default function (app: App, options?: MouseHandlerOptions): void { @@ -155,8 +174,8 @@ export default function (app: App, options?: MouseHandlerOptions): void { } const patchDocument = (document: Document, topframe = false) => { - function getSelector(id: number, target: Element): string { - return (selectorMap[id] = selectorMap[id] || _getSelector(target, document)) + function getSelector(id: number, target: Element, options?: MouseHandlerOptions): string { + return (selectorMap[id] = selectorMap[id] || _getSelector(target, document, options)) } const attachListener = topframe @@ -202,7 +221,7 @@ export default function (app: App, options?: MouseHandlerOptions): void { id, mouseTarget === target ? Math.round(performance.now() - mouseTargetTime) : 0, getTargetLabel(target), - isClickable(target) && !disableClickmaps ? getSelector(id, target) : '', + isClickable(target) && !disableClickmaps ? getSelector(id, target, options) : '', ), true, ) diff --git a/tracker/tracker/src/main/modules/network.ts b/tracker/tracker/src/main/modules/network.ts index a1a3e2c9c..097648d54 100644 --- a/tracker/tracker/src/main/modules/network.ts +++ b/tracker/tracker/src/main/modules/network.ts @@ -92,7 +92,7 @@ export interface Options { sanitizer?: Sanitizer } -export default function (app: App, opts: Partial = {}) { +export default function (app: App, opts: Partial = {}, customEnv?: Record) { const options: Options = Object.assign( { failuresOnly: false, @@ -150,8 +150,11 @@ export default function (app: App, opts: Partial = {}) { } /* ====== Fetch ====== */ - const origFetch = window.fetch.bind(window) as WindowFetch - window.fetch = (input, init = {}) => { + const origFetch = customEnv + ? (customEnv.fetch.bind(customEnv) as WindowFetch) + : (window.fetch.bind(window) as WindowFetch) + + const trackFetch = (input: RequestInfo | URL, init: RequestInit = {}) => { if (!(typeof input === 'string' || input instanceof URL) || app.isServiceURL(String(input))) { return origFetch(input, init) } @@ -237,12 +240,23 @@ export default function (app: App, opts: Partial = {}) { return response }) } + + if (customEnv) { + customEnv.fetch = trackFetch + } else { + window.fetch = trackFetch + } /* ====== <> ====== */ /* ====== XHR ====== */ - const nativeOpen = XMLHttpRequest.prototype.open - XMLHttpRequest.prototype.open = function (initMethod, url) { - const xhr = this + + const nativeOpen = customEnv + ? customEnv.XMLHttpRequest.prototype.open + : XMLHttpRequest.prototype.open + + function trackXMLHttpReqOpen(initMethod: string, url: string | URL) { + // @ts-ignore ??? this -> XMLHttpRequest + const xhr = this as XMLHttpRequest setSessionTokenHeader((name, value) => xhr.setRequestHeader(name, value)) let startTime = 0 @@ -302,23 +316,47 @@ export default function (app: App, opts: Partial = {}) { //TODO: handle error (though it has no Error API nor any useful information) //xhr.addEventListener('error', (e) => {}) - return nativeOpen.apply(this, arguments) + // @ts-ignore ??? this -> XMLHttpRequest + return nativeOpen.apply(this as XMLHttpRequest, arguments) } + if (customEnv) { + customEnv.XMLHttpRequest.prototype.open = trackXMLHttpReqOpen.bind(customEnv) + } else { + XMLHttpRequest.prototype.open = trackXMLHttpReqOpen + } + const nativeSend = XMLHttpRequest.prototype.send - XMLHttpRequest.prototype.send = function (body) { - const rdo = getXHRRequestDataObject(this) + function trackXHRSend(body: Document | XMLHttpRequestBodyInit | null | undefined) { + // @ts-ignore ??? this -> XMLHttpRequest + const rdo = getXHRRequestDataObject(this as XMLHttpRequest) rdo.body = body - return nativeSend.apply(this, arguments) + // @ts-ignore ??? this -> XMLHttpRequest + return nativeSend.apply(this as XMLHttpRequest, arguments) } + + if (customEnv) { + customEnv.XMLHttpRequest.prototype.send = trackXHRSend.bind(customEnv) + } else { + XMLHttpRequest.prototype.send = trackXHRSend + } + const nativeSetRequestHeader = XMLHttpRequest.prototype.setRequestHeader - XMLHttpRequest.prototype.setRequestHeader = function (name, value) { + + function trackSetReqHeader(name: string, value: string) { if (!isHIgnored(name)) { - const rdo = getXHRRequestDataObject(this) + // @ts-ignore ??? this -> XMLHttpRequest + const rdo = getXHRRequestDataObject(this as XMLHttpRequest) rdo.headers[name] = value } + // @ts-ignore ??? this -> XMLHttpRequest + return nativeSetRequestHeader.apply(this as XMLHttpRequest, arguments) + } - return nativeSetRequestHeader.apply(this, arguments) + if (customEnv) { + customEnv.XMLHttpRequest.prototype.setRequestHeader = trackSetReqHeader.bind(customEnv) + } else { + XMLHttpRequest.prototype.setRequestHeader = trackSetReqHeader } /* ====== <> ====== */ } diff --git a/tracker/tracker/src/webworker/BatchWriter.ts b/tracker/tracker/src/webworker/BatchWriter.ts index d9275ada3..cf7a6e4b4 100644 --- a/tracker/tracker/src/webworker/BatchWriter.ts +++ b/tracker/tracker/src/webworker/BatchWriter.ts @@ -117,11 +117,13 @@ export default class BatchWriter { if (this.writeWithSize(message)) { return } - // buffer is too small. Create one with maximal capacity + // buffer is too small. Creating one with maximal capacity for this message only this.encoder = new MessageEncoder(this.beaconSizeLimit) this.prepare() if (!this.writeWithSize(message)) { console.warn('OpenReplay: beacon size overflow. Skipping large message.', message, this) + } else { + this.finaliseBatch() } // reset encoder to normal size this.encoder = new MessageEncoder(this.beaconSize)