Merge pull request #273 from openreplay/dev

v1.4.0
This commit is contained in:
Mehdi Osman 2022-01-16 21:33:33 +01:00 committed by GitHub
commit 6e8910a51a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
561 changed files with 47038 additions and 7348 deletions

View file

@ -1,67 +0,0 @@
{
"version": "2.0",
"app_name": "parrot",
"environment_variables": {
},
"stages": {
"default-foss": {
"api_gateway_stage": "default-fos",
"manage_iam_role": false,
"iam_role_arn": "",
"autogen_policy": true,
"environment_variables": {
"isFOS": "true",
"isEE": "false",
"stage": "default-foss",
"jwt_issuer": "openreplay-default-foss",
"sentryURL": "",
"pg_host": "postgresql.db.svc.cluster.local",
"pg_port": "5432",
"pg_dbname": "postgres",
"pg_user": "postgres",
"pg_password": "asayerPostgres",
"alert_ntf": "http://127.0.0.1:8000/async/alerts/notifications/%s",
"email_signup": "http://127.0.0.1:8000/async/email_signup/%s",
"email_funnel": "http://127.0.0.1:8000/async/funnel/%s",
"email_basic": "http://127.0.0.1:8000/async/basic/%s",
"assign_link": "http://127.0.0.1:8000/async/email_assignment",
"captcha_server": "",
"captcha_key": "",
"sessions_bucket": "mobs",
"sessions_region": "us-east-1",
"put_S3_TTL": "20",
"sourcemaps_reader": "http://0.0.0.0:9000/sourcemaps",
"sourcemaps_bucket": "sourcemaps",
"js_cache_bucket": "sessions-assets",
"peers": "http://0.0.0.0:9000/assist/peers",
"async_Token": "",
"EMAIL_HOST": "",
"EMAIL_PORT": "587",
"EMAIL_USER": "",
"EMAIL_PASSWORD": "",
"EMAIL_USE_TLS": "true",
"EMAIL_USE_SSL": "false",
"EMAIL_SSL_KEY": "",
"EMAIL_SSL_CERT": "",
"EMAIL_FROM": "OpenReplay<do-not-reply@openreplay.com>",
"SITE_URL": "",
"announcement_url": "",
"jwt_secret": "",
"jwt_algorithm": "HS512",
"jwt_exp_delta_seconds": "2592000",
"S3_HOST": "",
"S3_KEY": "",
"S3_SECRET": "",
"invitation_link": "/api/users/invitation?token=%s",
"change_password_link": "/reset-password?invitation=%s&&pass=%s",
"version_number": "1.3.5"
},
"lambda_timeout": 150,
"lambda_memory_size": 400,
"subnet_ids": [
],
"security_group_ids": [
]
}
}
}

View file

@ -1,68 +0,0 @@
{
"version": "2.0",
"app_name": "parrot",
"environment_variables": {
},
"stages": {
"default-foss": {
"api_gateway_stage": "default-fos",
"manage_iam_role": false,
"iam_role_arn": "",
"autogen_policy": true,
"environment_variables": {
"isFOS": "true",
"isEE": "false",
"stage": "default-foss",
"jwt_issuer": "openreplay-default-foss",
"sentryURL": "",
"pg_host": "postgresql.db.svc.cluster.local",
"pg_port": "5432",
"pg_dbname": "postgres",
"pg_user": "postgres",
"pg_password": "asayerPostgres",
"alert_ntf": "http://127.0.0.1:8000/async/alerts/notifications/%s",
"email_signup": "http://127.0.0.1:8000/async/email_signup/%s",
"email_funnel": "http://127.0.0.1:8000/async/funnel/%s",
"email_basic": "http://127.0.0.1:8000/async/basic/%s",
"assign_link": "http://127.0.0.1:8000/async/email_assignment",
"captcha_server": "",
"captcha_key": "",
"sessions_bucket": "mobs",
"sessions_region": "us-east-1",
"put_S3_TTL": "20",
"sourcemaps_reader": "http://utilities-openreplay.app.svc.cluster.local:9000/sourcemaps",
"sourcemaps_bucket": "sourcemaps",
"js_cache_bucket": "sessions-assets",
"peers": "http://utilities-openreplay.app.svc.cluster.local:9000/assist/%s/peers",
"async_Token": "",
"EMAIL_HOST": "",
"EMAIL_PORT": "587",
"EMAIL_USER": "",
"EMAIL_PASSWORD": "",
"EMAIL_USE_TLS": "true",
"EMAIL_USE_SSL": "false",
"EMAIL_SSL_KEY": "",
"EMAIL_SSL_CERT": "",
"EMAIL_FROM": "OpenReplay<do-not-reply@openreplay.com>",
"SITE_URL": "",
"announcement_url": "",
"jwt_secret": "",
"jwt_algorithm": "HS512",
"jwt_exp_delta_seconds": "2592000",
"S3_HOST": "",
"S3_KEY": "",
"S3_SECRET": "",
"invitation_link": "/api/users/invitation?token=%s",
"change_password_link": "/reset-password?invitation=%s&&pass=%s",
"iosBucket": "openreplay-ios-images",
"version_number": "1.3.6"
},
"lambda_timeout": 150,
"lambda_memory_size": 400,
"subnet_ids": [
],
"security_group_ids": [
]
}
}
}

46
api/.env.default Normal file
View file

@ -0,0 +1,46 @@
EMAIL_FROM=OpenReplay<do-not-reply@openreplay.com>
EMAIL_HOST=
EMAIL_PASSWORD=
EMAIL_PORT=587
EMAIL_SSL_CERT=
EMAIL_SSL_KEY=
EMAIL_USER=
EMAIL_USE_SSL=false
EMAIL_USE_TLS=true
S3_HOST=
S3_KEY=
S3_SECRET=
SITE_URL=
alert_ntf=http://127.0.0.1:8000/async/alerts/notifications/%s
announcement_url=
assign_link=http://127.0.0.1:8000/async/email_assignment
async_Token=
captcha_key=
captcha_server=
change_password_link=/reset-password?invitation=%s&&pass=%s
email_basic=http://127.0.0.1:8000/async/basic/%s
email_signup=http://127.0.0.1:8000/async/email_signup/%s
invitation_link=/api/users/invitation?token=%s
isEE=false
isFOS=true
js_cache_bucket=sessions-assets
jwt_algorithm=HS512
jwt_exp_delta_seconds=2592000
jwt_issuer=openreplay-default-foss
jwt_secret="SET A RANDOM STRING HERE"
peers=http://utilities-openreplay.app.svc.cluster.local:9000/assist/%s/peers
pg_dbname=postgres
pg_host=postgresql.db.svc.cluster.local
pg_password=asayerPostgres
pg_port=5432
pg_user=postgres
pg_timeout=30
pg_minconn=50
put_S3_TTL=20
sentryURL=
sessions_bucket=mobs
sessions_region=us-east-1
sourcemaps_bucket=sourcemaps
sourcemaps_reader=http://utilities-openreplay.app.svc.cluster.local:9000/sourcemaps
stage=default-foss
version_number=1.4.0

View file

@ -1,9 +1,10 @@
FROM python:3.6-slim
FROM python:3.9.7-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt -t ./vendor --upgrade
RUN pip install chalice==1.22.2
RUN pip install -r requirements.txt
RUN mv .env.default .env
# Add Tini
# Startup daemon

18
api/Dockerfile.alerts Normal file
View file

@ -0,0 +1,18 @@
FROM python:3.9.7-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env && mv app_alerts.py app.py
ENV pg_minconn 2
# Add Tini
# Startup daemon
ENV TINI_VERSION v0.19.0
ARG envarg
ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh

View file

@ -1,109 +1,70 @@
import sentry_sdk
from chalice import Chalice, Response
from sentry_sdk import configure_scope
import logging
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from decouple import config
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
from starlette.responses import StreamingResponse
from chalicelib import _overrides
from chalicelib.blueprints import bp_authorizers
from chalicelib.blueprints import bp_core, bp_core_crons
from chalicelib.blueprints.app import v1_api
from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons
from chalicelib.blueprints.subs import bp_dashboard
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.helper import environ
from routers import core, core_dynamic
from routers.app import v1_api
from routers.crons import core_crons
from routers.crons import core_dynamic_crons
from routers.subs import dashboard
app = Chalice(app_name='parrot')
app.debug = not helper.is_production() or helper.is_local()
sentry_sdk.init(environ["sentryURL"])
# Monkey-patch print for DataDog hack
import sys
import traceback
old_tb = traceback.print_exception
old_f = sys.stdout
old_e = sys.stderr
OR_SESSION_TOKEN = None
class F:
def write(self, x):
if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local():
old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}")
else:
old_f.write(x)
def flush(self):
pass
def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True):
if OR_SESSION_TOKEN is not None and not helper.is_local():
value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value))
old_tb(etype, value, tb, limit, file, chain)
if helper.is_production():
traceback.print_exception = tb_print_exception
sys.stdout = F()
sys.stderr = F()
# ---End Monkey-patch
_overrides.chalice_app(app)
app = FastAPI()
@app.middleware('http')
def or_middleware(event, get_response):
async def or_middleware(request: Request, call_next):
global OR_SESSION_TOKEN
OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid',
app.current_request.headers.get('vnd.asayer.io.sid'))
if "authorizer" in event.context and event.context["authorizer"] is None:
print("Deleted user!!")
pg_client.close()
return Response(body={"errors": ["Deleted user"]}, status_code=403)
OR_SESSION_TOKEN = request.headers.get('vnd.openreplay.com.sid', request.headers.get('vnd.asayer.io.sid'))
try:
if helper.TRACK_TIME:
import time
now = int(time.time() * 1000)
response = get_response(event)
if response.status_code == 200 and response.body is not None and response.body.get("errors") is not None:
if "not found" in response.body["errors"][0]:
response = Response(status_code=404, body=response.body)
else:
response = Response(status_code=400, body=response.body)
if response.status_code // 100 == 5 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
with configure_scope() as scope:
scope.set_tag('stage', environ["stage"])
scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
scope.set_extra("context", event.context)
sentry_sdk.capture_exception(Exception(response.body))
response: StreamingResponse = await call_next(request)
if helper.TRACK_TIME:
print(f"Execution time: {int(time.time() * 1000) - now} ms")
except Exception as e:
if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
with configure_scope() as scope:
scope.set_tag('stage', environ["stage"])
scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
scope.set_extra("context", event.context)
sentry_sdk.capture_exception(e)
response = Response(body={"Code": "InternalServerError",
"Message": "An internal server error occurred [level=Fatal]."},
status_code=500)
pg_client.close()
raise e
pg_client.close()
return response
# Open source
app.register_blueprint(bp_authorizers.app)
app.register_blueprint(bp_core.app)
app.register_blueprint(bp_core_crons.app)
app.register_blueprint(bp_core_dynamic.app)
app.register_blueprint(bp_core_dynamic_crons.app)
app.register_blueprint(bp_dashboard.app)
app.register_blueprint(v1_api.app)
origins = [
"*",
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(core.public_app)
app.include_router(core.app)
app.include_router(core.app_apikey)
app.include_router(core_dynamic.public_app)
app.include_router(core_dynamic.app)
app.include_router(core_dynamic.app_apikey)
app.include_router(dashboard.app)
# app.include_router(insights.app)
app.include_router(v1_api.app_apikey)
Schedule = AsyncIOScheduler()
Schedule.start()
for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs:
Schedule.add_job(id=job["func"].__name__, **job)
for job in Schedule.get_jobs():
print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO))

27
api/app_alerts.py Normal file
View file

@ -0,0 +1,27 @@
import logging
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from decouple import config
from fastapi import FastAPI
from chalicelib.core import alerts_processor
app = FastAPI()
print("============= ALERTS =============")
@app.get("/")
async def root():
return {"status": "Running"}
app.schedule = AsyncIOScheduler()
app.schedule.start()
app.schedule.add_job(id="alerts_processor", **{"func": alerts_processor.process, "trigger": "interval",
"minutes": config("ALERTS_INTERVAL", cast=int, default=5),
"misfire_grace_time": 20})
for job in app.schedule.get_jobs():
print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO))

28
api/auth/auth_apikey.py Normal file
View file

@ -0,0 +1,28 @@
from typing import Optional
from fastapi import Request
from fastapi.security import APIKeyHeader
from starlette import status
from starlette.exceptions import HTTPException
from chalicelib.core import authorizers
from schemas import CurrentAPIContext
class APIKeyAuth(APIKeyHeader):
def __init__(self, auto_error: bool = True):
super(APIKeyAuth, self).__init__(name="Authorization", auto_error=auto_error)
async def __call__(self, request: Request) -> Optional[CurrentAPIContext]:
api_key: Optional[str] = await super(APIKeyAuth, self).__call__(request)
r = authorizers.api_key_authorizer(api_key)
if r is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API Key",
)
r["authorizer_identity"] = "api_key"
print(r)
request.state.authorizer_identity = "api_key"
request.state.currentContext = CurrentAPIContext(tenant_id=r["tenantId"])
return request.state.currentContext

39
api/auth/auth_jwt.py Normal file
View file

@ -0,0 +1,39 @@
from typing import Optional
from fastapi import Request
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from starlette import status
from starlette.exceptions import HTTPException
from chalicelib.core import authorizers, users
from schemas import CurrentContext
class JWTAuth(HTTPBearer):
def __init__(self, auto_error: bool = True):
super(JWTAuth, self).__init__(auto_error=auto_error)
async def __call__(self, request: Request) -> Optional[CurrentContext]:
credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request)
if credentials:
if not credentials.scheme == "Bearer":
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.")
jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials)
if jwt_payload is None \
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"],
jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.")
user = users.get(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"])
if user is None:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.")
jwt_payload["authorizer_identity"] = "jwt"
print(jwt_payload)
request.state.authorizer_identity = "jwt"
request.state.currentContext = CurrentContext(tenant_id=jwt_payload["tenantId"],
user_id=jwt_payload["userId"],
email=user["email"])
return request.state.currentContext
else:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authorization code.")

View file

@ -22,7 +22,6 @@ function build_api(){
# Copy enterprise code
[[ $1 == "ee" ]] && {
cp -rf ../ee/api/* ./
cp -rf ../ee/api/.chalice/* ./.chalice/
envarg="default-ee"
tag="ee-"
}
@ -31,8 +30,9 @@ function build_api(){
docker push ${DOCKER_REPO:-'local'}/chalice:${git_sha1}
docker tag ${DOCKER_REPO:-'local'}/chalice:${git_sha1} ${DOCKER_REPO:-'local'}/chalice:${tag}latest
docker push ${DOCKER_REPO:-'local'}/chalice:${tag}latest
}
}
}
check_prereq
build_api $1
IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_alerts.sh $1

70
api/build_alerts.sh Normal file
View file

@ -0,0 +1,70 @@
#!/bin/bash
# Script to build alerts module
# flags to accept:
# envarg: build for enterprise edition.
# Default will be OSS build.
# Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh <ee>
function make_submodule() {
[[ $1 != "ee" ]] && {
# -- this part was generated by modules_lister.py --
mkdir alerts
cp -R ./{app_alerts,schemas}.py ./alerts/
mkdir -p ./alerts/chalicelib/
cp -R ./chalicelib/__init__.py ./alerts/chalicelib/
mkdir -p ./alerts/chalicelib/core/
cp -R ./chalicelib/core/{__init__,alerts_processor,alerts_listener,sessions,events,issues,sessions_metas,metadata,projects,users,authorizers,tenants,assist,events_ios,sessions_mobs,errors,sourcemaps,sourcemaps_parser,resources,performance_event,alerts,notifications,slack,collaboration_slack,webhook}.py ./alerts/chalicelib/core/
mkdir -p ./alerts/chalicelib/utils/
cp -R ./chalicelib/utils/{__init__,TimeUTC,pg_client,helper,event_filter_definition,dev,email_helper,email_handler,smtp,s3,metrics_helper}.py ./alerts/chalicelib/utils/
# -- end of generated part
}
[[ $1 == "ee" ]] && {
# -- this part was generated by modules_lister.py --
mkdir alerts
cp -R ./{app_alerts,schemas,schemas_ee}.py ./alerts/
mkdir -p ./alerts/chalicelib/
cp -R ./chalicelib/__init__.py ./alerts/chalicelib/
mkdir -p ./alerts/chalicelib/core/
cp -R ./chalicelib/core/{__init__,alerts_processor,alerts_listener,sessions,events,issues,sessions_metas,metadata,projects,users,authorizers,tenants,roles,assist,events_ios,sessions_mobs,errors,dashboard,sourcemaps,sourcemaps_parser,resources,performance_event,alerts,notifications,slack,collaboration_slack,webhook}.py ./alerts/chalicelib/core/
mkdir -p ./alerts/chalicelib/utils/
cp -R ./chalicelib/utils/{__init__,TimeUTC,pg_client,helper,event_filter_definition,dev,SAML2_helper,email_helper,email_handler,smtp,s3,args_transformer,ch_client,metrics_helper}.py ./alerts/chalicelib/utils/
# -- end of generated part
}
cp -R ./{Dockerfile.alerts,requirements.txt,.env.default,entrypoint.sh} ./alerts/
cp -R ./chalicelib/utils/html ./alerts/chalicelib/utils/html
}
git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)}
envarg="default-foss"
check_prereq() {
which docker || {
echo "Docker not installed, please install docker."
exit=1
}
[[ exit -eq 1 ]] && exit 1
}
function build_api(){
tag=""
# Copy enterprise code
[[ $1 == "ee" ]] && {
cp -rf ../ee/api/* ./
envarg="default-ee"
tag="ee-"
}
make_submodule $1
cd alerts
docker build -f ./Dockerfile.alerts --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/alerts:${git_sha1} .
cd ..
rm -rf alerts
[[ $PUSH_IMAGE -eq 1 ]] && {
docker push ${DOCKER_REPO:-'local'}/alerts:${git_sha1}
docker tag ${DOCKER_REPO:-'local'}/alerts:${git_sha1} ${DOCKER_REPO:-'local'}/alerts:${tag}latest
docker push ${DOCKER_REPO:-'local'}/alerts:${tag}latest
}
}
check_prereq
build_api $1

View file

@ -1,104 +0,0 @@
from chalice import Chalice, CORSConfig
from chalicelib.blueprints import bp_authorizers
from chalicelib.core import authorizers
import sched
import threading
import time
from datetime import datetime
import pytz
from croniter import croniter
base_time = datetime.now(pytz.utc)
cors_config = CORSConfig(
allow_origin='*',
allow_headers=['vnd.openreplay.com.sid', 'vnd.asayer.io.sid'],
# max_age=600,
# expose_headers=['X-Special-Header'],
allow_credentials=True
)
def chalice_app(app):
def app_route(self, path, **kwargs):
kwargs.setdefault('cors', cors_config)
kwargs.setdefault('authorizer', bp_authorizers.jwt_authorizer)
handler_type = 'route'
name = kwargs.pop('name', None)
registration_kwargs = {'path': path, 'kwargs': kwargs, 'authorizer': kwargs.get("authorizer")}
def _register_handler(user_handler):
handler_name = name
if handler_name is None:
handler_name = user_handler.__name__
if registration_kwargs is not None:
kwargs = registration_kwargs
else:
kwargs = {}
if kwargs['authorizer'] == bp_authorizers.jwt_authorizer \
or kwargs['authorizer'] == bp_authorizers.api_key_authorizer:
def _user_handler(context=None, **args):
if context is not None:
args['context'] = context
else:
authorizer_context = app.current_request.context['authorizer']
if kwargs['authorizer'] == bp_authorizers.jwt_authorizer:
args['context'] = authorizers.jwt_context(authorizer_context)
else:
args['context'] = authorizer_context
return user_handler(**args)
wrapped = self._wrap_handler(handler_type, handler_name, _user_handler)
self._register_handler(handler_type, handler_name, _user_handler, wrapped, kwargs)
else:
wrapped = self._wrap_handler(handler_type, handler_name, user_handler)
self._register_handler(handler_type, handler_name, user_handler, wrapped, kwargs)
return wrapped
return _register_handler
app.route = app_route.__get__(app, Chalice)
def app_schedule(self, expression, name=None, description=''):
handler_type = 'schedule'
registration_kwargs = {'expression': expression,
'description': description}
def _register_handler(user_handler):
handler_name = name
if handler_name is None:
handler_name = user_handler.__name__
kwargs = registration_kwargs
cron_expression = kwargs["expression"].to_string()[len("cron("):-1]
if len(cron_expression.split(" ")) > 5:
cron_expression = " ".join(cron_expression.split(" ")[:-1])
cron_expression = cron_expression.replace("?", "*")
cron_shell(user_handler, cron_expression)
wrapped = self._wrap_handler(handler_type, handler_name, user_handler)
self._register_handler(handler_type, handler_name, user_handler, wrapped, kwargs)
return wrapped
return _register_handler
app.schedule = app_schedule.__get__(app, Chalice)
def spawn(function, args):
th = threading.Thread(target=function, kwargs=args)
th.setDaemon(True)
th.start()
def cron_shell(function, cron_expression):
def to_start():
scheduler = sched.scheduler(time.time, time.sleep)
citer = croniter(cron_expression, base_time)
while True:
next_execution = citer.get_next(datetime)
print(f"{function.__name__} next execution: {next_execution}")
scheduler.enterabs(next_execution.timestamp(), 1, function, argument=(None,))
scheduler.run()
print(f"{function.__name__} executed: {next_execution}")
spawn(to_start, None)

View file

@ -1,127 +0,0 @@
from chalice import Blueprint, Response
from chalicelib import _overrides
from chalicelib.blueprints import bp_authorizers
from chalicelib.core import sessions, events, jobs, projects
from chalicelib.utils.TimeUTC import TimeUTC
app = Blueprint(__name__)
_overrides.chalice_app(app)
@app.route('/v1/{projectKey}/users/{userId}/sessions', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer)
def get_user_sessions(projectKey, userId, context):
projectId = projects.get_internal_project_id(projectKey)
params = app.current_request.query_params
if params is None:
params = {}
return {
'data': sessions.get_user_sessions(
project_id=projectId,
user_id=userId,
start_date=params.get('start_date'),
end_date=params.get('end_date')
)
}
@app.route('/v1/{projectKey}/sessions/{sessionId}/events', methods=['GET'],
authorizer=bp_authorizers.api_key_authorizer)
def get_session_events(projectKey, sessionId, context):
projectId = projects.get_internal_project_id(projectKey)
return {
'data': events.get_by_sessionId2_pg(
project_id=projectId,
session_id=sessionId
)
}
@app.route('/v1/{projectKey}/users/{userId}', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer)
def get_user_details(projectKey, userId, context):
projectId = projects.get_internal_project_id(projectKey)
return {
'data': sessions.get_session_user(
project_id=projectId,
user_id=userId
)
}
pass
@app.route('/v1/{projectKey}/users/{userId}', methods=['DELETE'], authorizer=bp_authorizers.api_key_authorizer)
def schedule_to_delete_user_data(projectKey, userId, context):
projectId = projects.get_internal_project_id(projectKey)
data = app.current_request.json_body
data["action"] = "delete_user_data"
data["reference_id"] = userId
data["description"] = f"Delete user sessions of userId = {userId}"
data["start_at"] = TimeUTC.to_human_readable(TimeUTC.midnight(1))
record = jobs.create(project_id=projectId, data=data)
return {
'data': record
}
@app.route('/v1/{projectKey}/jobs', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer)
def get_jobs(projectKey, context):
projectId = projects.get_internal_project_id(projectKey)
return {
'data': jobs.get_all(project_id=projectId)
}
pass
@app.route('/v1/{projectKey}/jobs/{jobId}', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer)
def get_job(projectKey, jobId, context):
return {
'data': jobs.get(job_id=jobId)
}
pass
@app.route('/v1/{projectKey}/jobs/{jobId}', methods=['DELETE'], authorizer=bp_authorizers.api_key_authorizer)
def cancel_job(projectKey, jobId, context):
job = jobs.get(job_id=jobId)
job_not_found = len(job.keys()) == 0
if job_not_found or job["status"] == jobs.JobStatus.COMPLETED or job["status"] == jobs.JobStatus.CANCELLED:
return Response(status_code=501, body="The request job has already been canceled/completed (or was not found).")
job["status"] = "cancelled"
return {
'data': jobs.update(job_id=jobId, job=job)
}
@app.route('/v1/projects', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer)
def get_projects(context):
records = projects.get_projects(tenant_id=context['tenantId'])
for record in records:
del record['projectId']
return {
'data': records
}
@app.route('/v1/projects/{projectKey}', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer)
def get_project(projectKey, context):
return {
'data': projects.get_project_by_key(tenant_id=context['tenantId'], project_key=projectKey)
}
@app.route('/v1/projects', methods=['POST'], authorizer=bp_authorizers.api_key_authorizer)
def create_project(context):
data = app.current_request.json_body
record = projects.create(
tenant_id=context['tenantId'],
user_id=None,
data=data,
skip_authorization=True
)
del record['data']['projectId']
return record

View file

@ -1,37 +0,0 @@
from chalice import Blueprint, AuthResponse
from chalicelib.core import authorizers
from chalicelib.core import users
app = Blueprint(__name__)
@app.authorizer()
def api_key_authorizer(auth_request):
r = authorizers.api_key_authorizer(auth_request.token)
if r is None:
return AuthResponse(routes=[], principal_id=None)
r["authorizer_identity"] = "api_key"
print(r)
return AuthResponse(
routes=['*'],
principal_id=r['tenantId'],
context=r
)
@app.authorizer(ttl_seconds=60)
def jwt_authorizer(auth_request):
jwt_payload = authorizers.jwt_authorizer(auth_request.token)
if jwt_payload is None \
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"],
jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]):
return AuthResponse(routes=[], principal_id=None)
jwt_payload["authorizer_identity"] = "jwt"
print(jwt_payload)
return AuthResponse(
routes=['*'],
principal_id=jwt_payload['userId'],
context=jwt_payload
)

View file

@ -1,909 +0,0 @@
from chalice import Blueprint
from chalice import Response
from chalicelib import _overrides
from chalicelib.blueprints import bp_authorizers
from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \
sessions_metas, alerts, funnels, issues, integrations_manager, errors_favorite_viewed, metadata, \
log_tool_elasticsearch, log_tool_datadog, \
log_tool_stackdriver, reset_password, sessions_favorite_viewed, \
log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \
log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \
assist, heatmaps, mobile
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import email_helper
from chalicelib.utils.helper import environ
app = Blueprint(__name__)
_overrides.chalice_app(app)
@app.route('/{projectId}/sessions2/favorite', methods=['GET'])
def get_favorite_sessions2(projectId, context):
params = app.current_request.query_params
return {
'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context["userId"], include_viewed=True)
}
@app.route('/{projectId}/sessions2/{sessionId}', methods=['GET'])
def get_session2(projectId, sessionId, context):
data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context["userId"],
include_fav_viewed=True, group_metadata=True)
if data is None:
return {"errors": ["session not found"]}
sessions_favorite_viewed.view_session(project_id=projectId, user_id=context['userId'], session_id=sessionId)
return {
'data': data
}
@app.route('/{projectId}/sessions2/{sessionId}/favorite', methods=['GET'])
def add_remove_favorite_session2(projectId, sessionId, context):
return {
"data": sessions_favorite_viewed.favorite_session(project_id=projectId, user_id=context['userId'],
session_id=sessionId)}
@app.route('/{projectId}/sessions2/{sessionId}/assign', methods=['GET'])
def assign_session(projectId, sessionId, context):
data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId,
tenant_id=context['tenantId'],
user_id=context["userId"])
if "errors" in data:
return data
return {
'data': data
}
@app.route('/{projectId}/sessions2/{sessionId}/errors/{errorId}/sourcemaps', methods=['GET'])
def get_error_trace(projectId, sessionId, errorId, context):
data = errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data:
return data
return {
'data': data
}
@app.route('/{projectId}/sessions2/{sessionId}/assign/{issueId}', methods=['GET'])
def assign_session(projectId, sessionId, issueId, context):
data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId,
tenant_id=context['tenantId'], user_id=context["userId"])
if "errors" in data:
return data
return {
'data': data
}
@app.route('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', methods=['POST', 'PUT'])
def comment_assignment(projectId, sessionId, issueId, context):
data = app.current_request.json_body
data = sessions_assignments.comment(tenant_id=context['tenantId'], project_id=projectId,
session_id=sessionId, assignment_id=issueId,
user_id=context["userId"], message=data["message"])
if "errors" in data.keys():
return data
return {
'data': data
}
@app.route('/{projectId}/events/search', methods=['GET'])
def events_search(projectId, context):
params = app.current_request.query_params
if params is None:
return {"data": []}
q = params.get('q', '')
if len(q) == 0:
return {"data": []}
result = events.search_pg2(q, params.get('type', ''), project_id=projectId, source=params.get('source'),
key=params.get("key"))
return result
@app.route('/{projectId}/sessions/search2', methods=['POST'])
def sessions_search2(projectId, context):
data = app.current_request.json_body
data = sessions.search2_pg(data, projectId, user_id=context["userId"])
return {'data': data}
@app.route('/{projectId}/sessions/filters', methods=['GET'])
def session_filter_values(projectId, context):
return {'data': sessions_metas.get_key_values(projectId)}
@app.route('/{projectId}/sessions/filters/top', methods=['GET'])
def session_top_filter_values(projectId, context):
return {'data': sessions_metas.get_top_key_values(projectId)}
@app.route('/{projectId}/sessions/filters/search', methods=['GET'])
def get_session_filters_meta(projectId, context):
params = app.current_request.query_params
if params is None:
return {"data": []}
meta_type = params.get('type', '')
if len(meta_type) == 0:
return {"data": []}
q = params.get('q', '')
if len(q) == 0:
return {"data": []}
return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q)
@app.route('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}',
methods=['POST', 'PUT'])
def integration_notify(projectId, integration, integrationId, source, sourceId, context):
data = app.current_request.json_body
comment = None
if "comment" in data:
comment = data["comment"]
if integration == "slack":
args = {"tenant_id": context["tenantId"],
"user": context['email'], "comment": comment, "project_id": projectId,
"integration_id": integrationId}
if source == "sessions":
return Slack.share_session(session_id=sourceId, **args)
elif source == "errors":
return Slack.share_error(error_id=sourceId, **args)
return {"data": None}
@app.route('/integrations/sentry', methods=['GET'])
def get_all_sentry(context):
return {"data": log_tool_sentry.get_all(tenant_id=context["tenantId"])}
@app.route('/{projectId}/integrations/sentry', methods=['GET'])
def get_sentry(projectId, context):
return {"data": log_tool_sentry.get(project_id=projectId)}
@app.route('/{projectId}/integrations/sentry', methods=['POST', 'PUT'])
def add_edit_sentry(projectId, context):
data = app.current_request.json_body
return {"data": log_tool_sentry.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
@app.route('/{projectId}/integrations/sentry', methods=['DELETE'])
def delete_sentry(projectId, context):
return {"data": log_tool_sentry.delete(tenant_id=context["tenantId"], project_id=projectId)}
@app.route('/{projectId}/integrations/sentry/events/{eventId}', methods=['GET'])
def proxy_sentry(projectId, eventId, context):
return {"data": log_tool_sentry.proxy_get(tenant_id=context["tenantId"], project_id=projectId, event_id=eventId)}
@app.route('/integrations/datadog', methods=['GET'])
def get_all_datadog(context):
return {"data": log_tool_datadog.get_all(tenant_id=context["tenantId"])}
@app.route('/{projectId}/integrations/datadog', methods=['GET'])
def get_datadog(projectId, context):
return {"data": log_tool_datadog.get(project_id=projectId)}
@app.route('/{projectId}/integrations/datadog', methods=['POST', 'PUT'])
def add_edit_datadog(projectId, context):
data = app.current_request.json_body
return {"data": log_tool_datadog.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
@app.route('/{projectId}/integrations/datadog', methods=['DELETE'])
def delete_datadog(projectId, context):
return {"data": log_tool_datadog.delete(tenant_id=context["tenantId"], project_id=projectId)}
@app.route('/integrations/stackdriver', methods=['GET'])
def get_all_stackdriver(context):
return {"data": log_tool_stackdriver.get_all(tenant_id=context["tenantId"])}
@app.route('/{projectId}/integrations/stackdriver', methods=['GET'])
def get_stackdriver(projectId, context):
return {"data": log_tool_stackdriver.get(project_id=projectId)}
@app.route('/{projectId}/integrations/stackdriver', methods=['POST', 'PUT'])
def add_edit_stackdriver(projectId, context):
data = app.current_request.json_body
return {"data": log_tool_stackdriver.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
@app.route('/{projectId}/integrations/stackdriver', methods=['DELETE'])
def delete_stackdriver(projectId, context):
return {"data": log_tool_stackdriver.delete(tenant_id=context["tenantId"], project_id=projectId)}
@app.route('/integrations/newrelic', methods=['GET'])
def get_all_newrelic(context):
return {"data": log_tool_newrelic.get_all(tenant_id=context["tenantId"])}
@app.route('/{projectId}/integrations/newrelic', methods=['GET'])
def get_newrelic(projectId, context):
return {"data": log_tool_newrelic.get(project_id=projectId)}
@app.route('/{projectId}/integrations/newrelic', methods=['POST', 'PUT'])
def add_edit_newrelic(projectId, context):
data = app.current_request.json_body
return {"data": log_tool_newrelic.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
@app.route('/{projectId}/integrations/newrelic', methods=['DELETE'])
def delete_newrelic(projectId, context):
return {"data": log_tool_newrelic.delete(tenant_id=context["tenantId"], project_id=projectId)}
@app.route('/integrations/rollbar', methods=['GET'])
def get_all_rollbar(context):
return {"data": log_tool_rollbar.get_all(tenant_id=context["tenantId"])}
@app.route('/{projectId}/integrations/rollbar', methods=['GET'])
def get_rollbar(projectId, context):
return {"data": log_tool_rollbar.get(project_id=projectId)}
@app.route('/{projectId}/integrations/rollbar', methods=['POST', 'PUT'])
def add_edit_rollbar(projectId, context):
data = app.current_request.json_body
return {"data": log_tool_rollbar.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
@app.route('/{projectId}/integrations/rollbar', methods=['DELETE'])
def delete_datadog(projectId, context):
return {"data": log_tool_rollbar.delete(tenant_id=context["tenantId"], project_id=projectId)}
@app.route('/integrations/bugsnag/list_projects', methods=['POST'])
def list_projects_bugsnag(context):
data = app.current_request.json_body
return {"data": log_tool_bugsnag.list_projects(auth_token=data["authorizationToken"])}
@app.route('/integrations/bugsnag', methods=['GET'])
def get_all_bugsnag(context):
return {"data": log_tool_bugsnag.get_all(tenant_id=context["tenantId"])}
@app.route('/{projectId}/integrations/bugsnag', methods=['GET'])
def get_bugsnag(projectId, context):
return {"data": log_tool_bugsnag.get(project_id=projectId)}
@app.route('/{projectId}/integrations/bugsnag', methods=['POST', 'PUT'])
def add_edit_bugsnag(projectId, context):
data = app.current_request.json_body
return {"data": log_tool_bugsnag.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
@app.route('/{projectId}/integrations/bugsnag', methods=['DELETE'])
def delete_bugsnag(projectId, context):
return {"data": log_tool_bugsnag.delete(tenant_id=context["tenantId"], project_id=projectId)}
@app.route('/integrations/cloudwatch/list_groups', methods=['POST'])
def list_groups_cloudwatch(context):
data = app.current_request.json_body
return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data["awsAccessKeyId"],
aws_secret_access_key=data["awsSecretAccessKey"],
region=data["region"])}
@app.route('/integrations/cloudwatch', methods=['GET'])
def get_all_cloudwatch(context):
return {"data": log_tool_cloudwatch.get_all(tenant_id=context["tenantId"])}
@app.route('/{projectId}/integrations/cloudwatch', methods=['GET'])
def get_cloudwatch(projectId, context):
return {"data": log_tool_cloudwatch.get(project_id=projectId)}
@app.route('/{projectId}/integrations/cloudwatch', methods=['POST', 'PUT'])
def add_edit_cloudwatch(projectId, context):
data = app.current_request.json_body
return {"data": log_tool_cloudwatch.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
@app.route('/{projectId}/integrations/cloudwatch', methods=['DELETE'])
def delete_cloudwatch(projectId, context):
return {"data": log_tool_cloudwatch.delete(tenant_id=context["tenantId"], project_id=projectId)}
@app.route('/integrations/elasticsearch', methods=['GET'])
def get_all_elasticsearch(context):
return {"data": log_tool_elasticsearch.get_all(tenant_id=context["tenantId"])}
@app.route('/{projectId}/integrations/elasticsearch', methods=['GET'])
def get_elasticsearch(projectId, context):
return {"data": log_tool_elasticsearch.get(project_id=projectId)}
@app.route('/integrations/elasticsearch/test', methods=['POST'])
def test_elasticsearch_connection(context):
data = app.current_request.json_body
return {"data": log_tool_elasticsearch.ping(tenant_id=context["tenantId"], **data)}
@app.route('/{projectId}/integrations/elasticsearch', methods=['POST', 'PUT'])
def add_edit_elasticsearch(projectId, context):
data = app.current_request.json_body
return {"data": log_tool_elasticsearch.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
@app.route('/{projectId}/integrations/elasticsearch', methods=['DELETE'])
def delete_elasticsearch(projectId, context):
return {"data": log_tool_elasticsearch.delete(tenant_id=context["tenantId"], project_id=projectId)}
@app.route('/integrations/sumologic', methods=['GET'])
def get_all_sumologic(context):
return {"data": log_tool_sumologic.get_all(tenant_id=context["tenantId"])}
@app.route('/{projectId}/integrations/sumologic', methods=['GET'])
def get_sumologic(projectId, context):
return {"data": log_tool_sumologic.get(project_id=projectId)}
@app.route('/{projectId}/integrations/sumologic', methods=['POST', 'PUT'])
def add_edit_sumologic(projectId, context):
data = app.current_request.json_body
return {"data": log_tool_sumologic.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
@app.route('/{projectId}/integrations/sumologic', methods=['DELETE'])
def delete_sumologic(projectId, context):
return {"data": log_tool_sumologic.delete(tenant_id=context["tenantId"], project_id=projectId)}
@app.route('/integrations/issues', methods=['GET'])
def get_integration_status(context):
error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"],
user_id=context["userId"])
if error is not None:
return {"data": {}}
return {"data": integration.get_obfuscated()}
@app.route('/integrations/jira', methods=['POST', 'PUT'])
def add_edit_jira_cloud(context):
data = app.current_request.json_body
error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER,
tenant_id=context["tenantId"],
user_id=context["userId"])
if error is not None:
return error
return {"data": integration.add_edit(data=data)}
@app.route('/integrations/github', methods=['POST', 'PUT'])
def add_edit_github(context):
data = app.current_request.json_body
error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER,
tenant_id=context["tenantId"],
user_id=context["userId"])
if error is not None:
return error
return {"data": integration.add_edit(data=data)}
@app.route('/integrations/issues', methods=['DELETE'])
def delete_default_issue_tracking_tool(context):
error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"],
user_id=context["userId"])
if error is not None:
return error
return {"data": integration.delete()}
@app.route('/integrations/jira', methods=['DELETE'])
def delete_jira_cloud(context):
error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER,
tenant_id=context["tenantId"],
user_id=context["userId"])
if error is not None:
return error
return {"data": integration.delete()}
@app.route('/integrations/github', methods=['DELETE'])
def delete_github(context):
error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER,
tenant_id=context["tenantId"],
user_id=context["userId"])
if error is not None:
return error
return {"data": integration.delete()}
@app.route('/integrations/issues/list_projects', methods=['GET'])
def get_all_issue_tracking_projects(context):
error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"],
user_id=context["userId"])
if error is not None:
return error
data = integration.issue_handler.get_projects()
if "errors" in data:
return data
return {"data": data}
@app.route('/integrations/issues/{integrationProjectId}', methods=['GET'])
def get_integration_metadata(integrationProjectId, context):
error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"],
user_id=context["userId"])
if error is not None:
return error
data = integration.issue_handler.get_metas(integrationProjectId)
if "errors" in data.keys():
return data
return {"data": data}
@app.route('/{projectId}/assignments', methods=['GET'])
def get_all_assignments(projectId, context):
data = sessions_assignments.get_all(project_id=projectId, user_id=context["userId"])
return {
'data': data
}
@app.route('/{projectId}/sessions2/{sessionId}/assign/projects/{integrationProjectId}', methods=['POST', 'PUT'])
def create_issue_assignment(projectId, sessionId, integrationProjectId, context):
data = app.current_request.json_body
data = sessions_assignments.create_new_assignment(tenant_id=context['tenantId'], project_id=projectId,
session_id=sessionId,
creator_id=context["userId"], assignee=data["assignee"],
description=data["description"], title=data["title"],
issue_type=data["issueType"],
integration_project_id=integrationProjectId)
if "errors" in data.keys():
return data
return {
'data': data
}
@app.route('/{projectId}/gdpr', methods=['GET'])
def get_gdpr(projectId, context):
return {"data": projects.get_gdpr(project_id=projectId)}
@app.route('/{projectId}/gdpr', methods=['POST', 'PUT'])
def edit_gdpr(projectId, context):
data = app.current_request.json_body
return {"data": projects.edit_gdpr(project_id=projectId, gdpr=data)}
@app.route('/password/reset-link', methods=['PUT', 'POST'], authorizer=None)
def reset_password_handler():
data = app.current_request.json_body
if "email" not in data or len(data["email"]) < 5:
return {"errors": ["please provide a valid email address"]}
return reset_password.reset(data)
@app.route('/{projectId}/metadata', methods=['GET'])
def get_metadata(projectId, context):
return {"data": metadata.get(project_id=projectId)}
@app.route('/{projectId}/metadata/list', methods=['POST', 'PUT'])
def add_edit_delete_metadata(projectId, context):
data = app.current_request.json_body
return metadata.add_edit_delete(tenant_id=context["tenantId"], project_id=projectId, new_metas=data["list"])
@app.route('/{projectId}/metadata', methods=['POST', 'PUT'])
def add_metadata(projectId, context):
data = app.current_request.json_body
return metadata.add(tenant_id=context["tenantId"], project_id=projectId, new_name=data["key"])
@app.route('/{projectId}/metadata/{index}', methods=['POST', 'PUT'])
def edit_metadata(projectId, index, context):
data = app.current_request.json_body
return metadata.edit(tenant_id=context["tenantId"], project_id=projectId, index=int(index),
new_name=data["key"])
@app.route('/{projectId}/metadata/{index}', methods=['DELETE'])
def delete_metadata(projectId, index, context):
return metadata.delete(tenant_id=context["tenantId"], project_id=projectId, index=index)
@app.route('/{projectId}/metadata/search', methods=['GET'])
def search_metadata(projectId, context):
params = app.current_request.query_params
q = params.get('q', '')
key = params.get('key', '')
if len(q) == 0 and len(key) == 0:
return {"data": []}
if len(q) == 0:
return {"errors": ["please provide a value for search"]}
if len(key) == 0:
return {"errors": ["please provide a key for search"]}
return metadata.search(tenant_id=context["tenantId"], project_id=projectId, value=q, key=key)
@app.route('/{projectId}/integration/sources', methods=['GET'])
def search_integrations(projectId, context):
return log_tools.search(project_id=projectId)
@app.route('/async/email_assignment', methods=['POST', 'PUT'], authorizer=None)
def async_send_signup_emails():
data = app.current_request.json_body
if data.pop("auth") != environ["async_Token"]:
return {}
email_helper.send_assign_session(recipient=data["email"], link=data["link"], message=data["message"])
@app.route('/async/funnel/weekly_report2', methods=['POST', 'PUT'], authorizer=None)
def async_weekly_report():
print("=========================> Sending weekly report")
data = app.current_request.json_body
if data.pop("auth") != environ["async_Token"]:
return {}
email_helper.weekly_report2(recipients=data["email"], data=data.get("data", None))
@app.route('/async/basic/{step}', methods=['POST', 'PUT'], authorizer=None)
def async_basic_emails(step):
data = app.current_request.json_body
if data.pop("auth") != environ["async_Token"]:
return {}
if step.lower() == "member_invitation":
email_helper.send_team_invitation(recipient=data["email"], invitation_link=data["invitationLink"],
client_id=data["clientId"], sender_name=data["senderName"])
@app.route('/{projectId}/sample_rate', methods=['GET'])
def get_capture_status(projectId, context):
return {"data": projects.get_capture_status(project_id=projectId)}
@app.route('/{projectId}/sample_rate', methods=['POST', 'PUT'])
def update_capture_status(projectId, context):
data = app.current_request.json_body
return {"data": projects.update_capture_status(project_id=projectId, changes=data)}
@app.route('/announcements', methods=['GET'])
def get_all_announcements(context):
return {"data": announcements.get_all(context["userId"])}
@app.route('/announcements/view', methods=['GET'])
def get_all_announcements(context):
return {"data": announcements.view(user_id=context["userId"])}
@app.route('/{projectId}/errors/{errorId}/{action}', methods=['GET'])
def add_remove_favorite_error(projectId, errorId, action, context):
if action == "favorite":
return errors_favorite_viewed.favorite_error(project_id=projectId, user_id=context['userId'], error_id=errorId)
elif action == "sessions":
params = app.current_request.query_params
if params is None:
params = {}
start_date = params.get("startDate")
end_date = params.get("endDate")
return {
"data": errors.get_sessions(project_id=projectId, user_id=context['userId'], error_id=errorId,
start_date=start_date, end_date=end_date)}
elif action in list(errors.ACTION_STATE.keys()):
return errors.change_state(project_id=projectId, user_id=context['userId'], error_id=errorId, action=action)
else:
return {"errors": ["undefined action"]}
@app.route('/{projectId}/errors/merge', methods=['POST'])
def errors_merge(projectId, context):
data = app.current_request.json_body
data = errors.merge(error_ids=data.get("errors", []))
return data
@app.route('/show_banner', methods=['GET'])
def errors_merge(context):
return {"data": False}
@app.route('/{projectId}/alerts', methods=['POST', 'PUT'])
def create_alert(projectId, context):
data = app.current_request.json_body
return alerts.create(projectId, data)
@app.route('/{projectId}/alerts', methods=['GET'])
def get_all_alerts(projectId, context):
return {"data": alerts.get_all(projectId)}
@app.route('/{projectId}/alerts/{alertId}', methods=['GET'])
def get_alert(projectId, alertId, context):
return {"data": alerts.get(alertId)}
@app.route('/{projectId}/alerts/{alertId}', methods=['POST', 'PUT'])
def update_alert(projectId, alertId, context):
data = app.current_request.json_body
return alerts.update(alertId, data)
@app.route('/{projectId}/alerts/{alertId}', methods=['DELETE'])
def delete_alert(projectId, alertId, context):
return alerts.delete(projectId, alertId)
@app.route('/{projectId}/funnels', methods=['POST', 'PUT'])
def add_funnel(projectId, context):
data = app.current_request.json_body
return funnels.create(project_id=projectId,
user_id=context['userId'],
name=data["name"],
filter=data["filter"],
is_public=data.get("isPublic", False))
@app.route('/{projectId}/funnels', methods=['GET'])
def get_funnels(projectId, context):
params = app.current_request.query_params
if params is None:
params = {}
return {"data": funnels.get_by_user(project_id=projectId,
user_id=context['userId'],
range_value=None,
start_date=None,
end_date=None,
details=False)}
@app.route('/{projectId}/funnels/details', methods=['GET'])
def get_funnels_with_details(projectId, context):
params = app.current_request.query_params
if params is None:
params = {}
return {"data": funnels.get_by_user(project_id=projectId,
user_id=context['userId'],
range_value=params.get("rangeValue", None),
start_date=params.get('startDate', None),
end_date=params.get('endDate', None),
details=True)}
@app.route('/{projectId}/funnels/issue_types', methods=['GET'])
def get_possible_issue_types(projectId, context):
params = app.current_request.query_params
if params is None:
params = {}
return {"data": funnels.get_possible_issue_types(project_id=projectId)}
@app.route('/{projectId}/funnels/{funnelId}/insights', methods=['GET'])
def get_funnel_insights(projectId, funnelId, context):
params = app.current_request.query_params
if params is None:
params = {}
return funnels.get_top_insights(funnel_id=funnelId, project_id=projectId,
range_value=params.get("range_value", None),
start_date=params.get('startDate', None),
end_date=params.get('endDate', None))
@app.route('/{projectId}/funnels/{funnelId}/insights', methods=['POST', 'PUT'])
def get_funnel_insights_on_the_fly(projectId, funnelId, context):
params = app.current_request.query_params
if params is None:
params = {}
data = app.current_request.json_body
if data is None:
data = {}
return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, project_id=projectId, data={**params, **data})
@app.route('/{projectId}/funnels/{funnelId}/issues', methods=['GET'])
def get_funnel_issues(projectId, funnelId, context):
params = app.current_request.query_params
if params is None:
params = {}
return funnels.get_issues(funnel_id=funnelId, project_id=projectId,
range_value=params.get("range_value", None),
start_date=params.get('startDate', None), end_date=params.get('endDate', None))
@app.route('/{projectId}/funnels/{funnelId}/issues', methods=['POST', 'PUT'])
def get_funnel_issues_on_the_fly(projectId, funnelId, context):
params = app.current_request.query_params
if params is None:
params = {}
data = app.current_request.json_body
if data is None:
data = {}
return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, project_id=projectId, data={**params, **data})}
@app.route('/{projectId}/funnels/{funnelId}/sessions', methods=['GET'])
def get_funnel_sessions(projectId, funnelId, context):
params = app.current_request.query_params
if params is None:
params = {}
return {"data": funnels.get_sessions(funnel_id=funnelId, user_id=context['userId'], project_id=projectId,
range_value=params.get("range_value", None),
start_date=params.get('startDate', None),
end_date=params.get('endDate', None))}
@app.route('/{projectId}/funnels/{funnelId}/sessions', methods=['POST', 'PUT'])
def get_funnel_sessions_on_the_fly(projectId, funnelId, context):
params = app.current_request.query_params
if params is None:
params = {}
data = app.current_request.json_body
if data is None:
data = {}
return {"data": funnels.get_sessions_on_the_fly(funnel_id=funnelId, user_id=context['userId'], project_id=projectId,
data={**params, **data})}
@app.route('/{projectId}/funnels/issues/{issueId}/sessions', methods=['GET'])
def get_issue_sessions(projectId, issueId, context):
params = app.current_request.query_params
if params is None:
params = {}
issue = issues.get(project_id=projectId, issue_id=issueId)
return {
"data": {"sessions": sessions.search_by_issue(user_id=context["userId"], project_id=projectId, issue=issue,
start_date=params.get('startDate', None),
end_date=params.get('endDate', None)),
"issue": issue}}
@app.route('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', methods=['POST', 'PUT'])
def get_funnel_issue_sessions(projectId, funnelId, issueId, context):
data = app.current_request.json_body
data = funnels.search_by_issue(project_id=projectId, user_id=context["userId"], issue_id=issueId,
funnel_id=funnelId, data=data)
if "errors" in data:
return data
if data.get("issue") is None:
data["issue"] = issues.get(project_id=projectId, issue_id=issueId)
return {
"data": data
}
@app.route('/{projectId}/funnels/{funnelId}', methods=['GET'])
def get_funnel(projectId, funnelId, context):
data = funnels.get(funnel_id=funnelId,
project_id=projectId)
if data is None:
return {"errors": ["funnel not found"]}
return {"data": data}
@app.route('/{projectId}/funnels/{funnelId}', methods=['POST', 'PUT'])
def edit_funnel(projectId, funnelId, context):
data = app.current_request.json_body
return funnels.update(funnel_id=funnelId,
user_id=context['userId'],
name=data.get("name"),
filter=data.get("filter"),
is_public=data.get("isPublic"))
@app.route('/{projectId}/funnels/{funnelId}', methods=['DELETE'])
def delete_filter(projectId, funnelId, context):
return funnels.delete(user_id=context['userId'], funnel_id=funnelId, project_id=projectId)
@app.route('/{projectId}/sourcemaps', methods=['PUT'], authorizer=bp_authorizers.api_key_authorizer)
def sign_sourcemap_for_upload(projectId, context):
data = app.current_request.json_body
project_id = projects.get_internal_project_id(projectId)
if project_id is None:
return Response(status_code=400, body='invalid projectId')
return {"data": sourcemaps.presign_upload_urls(project_id=project_id, urls=data["URL"])}
@app.route('/config/weekly_report', methods=['GET'])
def get_weekly_report_config(context):
return {"data": weekly_report.get_config(user_id=context['userId'])}
@app.route('/config/weekly_report', methods=['POST', 'PUT'])
def get_weekly_report_config(context):
data = app.current_request.json_body
return {"data": weekly_report.edit_config(user_id=context['userId'], weekly_report=data.get("weeklyReport", True))}
@app.route('/{projectId}/issue_types', methods=['GET'])
def issue_types(projectId, context):
# return {"data": issues.get_types_by_project(project_id=projectId)}
return {"data": issues.get_all_types()}
@app.route('/issue_types', methods=['GET'])
def all_issue_types(context):
return {"data": issues.get_all_types()}
@app.route('/flows', methods=['GET', 'PUT', 'POST', 'DELETE'])
@app.route('/{projectId}/flows', methods=['GET', 'PUT', 'POST', 'DELETE'])
def removed_endpoints(projectId=None, context=None):
return Response(body={"errors": ["Endpoint no longer available"]}, status_code=410)
@app.route('/{projectId}/assist/sessions', methods=['GET'])
def sessions_live(projectId, context):
data = assist.get_live_sessions(projectId)
return {'data': data}
@app.route('/{projectId}/assist/sessions', methods=['POST'])
def sessions_live_search(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
data = assist.get_live_sessions(projectId, filters=data.get("filters"))
return {'data': data}
@app.route('/{projectId}/heatmaps/url', methods=['POST'])
def get_heatmaps_by_url(projectId, context):
data = app.current_request.json_body
return {"data": heatmaps.get_by_url(project_id=projectId, data=data)}
@app.route('/general_stats', methods=['GET'], authorizer=None)
def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}
@app.route('/{projectId}/mobile/{sessionId}/urls', methods=['POST'])
def mobile_signe(projectId, sessionId, context):
data = app.current_request.json_body
return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data["keys"])}

View file

@ -1,18 +0,0 @@
from chalice import Blueprint
from chalice import Cron
from chalicelib import _overrides
from chalicelib.core import reset_password, weekly_report, jobs
app = Blueprint(__name__)
_overrides.chalice_app(app)
@app.schedule(Cron('0', '*', '?', '*', '*', '*'))
def run_scheduled_jobs(event):
jobs.execute_jobs()
# Run every monday.
@app.schedule(Cron('5', '0', '?', '*', 'MON', '*'))
def weekly_report2(event):
weekly_report.cron()

View file

@ -1,460 +0,0 @@
from chalice import Blueprint, Response
from chalicelib import _overrides
from chalicelib.core import assist
from chalicelib.core import boarding
from chalicelib.core import errors
from chalicelib.core import license
from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integrations_manager
from chalicelib.core import notifications
from chalicelib.core import projects
from chalicelib.core import signup
from chalicelib.core import tenants
from chalicelib.core import users
from chalicelib.core import webhook
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import captcha
from chalicelib.utils import helper
from chalicelib.utils.helper import environ
app = Blueprint(__name__)
_overrides.chalice_app(app)
@app.route('/login', methods=['POST'], authorizer=None)
def login():
data = app.current_request.json_body
if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]):
return {"errors": ["Invalid captcha."]}
r = users.authenticate(data['email'], data['password'],
for_plugin=False
)
if r is None:
return Response(status_code=401, body={
'errors': ['Youve entered invalid Email or Password.']
})
tenant_id = r.pop("tenantId")
r["limits"] = {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)}
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True, version=True)
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
return {
'jwt': r.pop('jwt'),
'data': {
"user": r,
"client": c
}
}
@app.route('/account', methods=['GET'])
def get_account(context):
r = users.get(tenant_id=context['tenantId'], user_id=context['userId'])
return {
'data': {
**r,
"limits": {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(context['tenantId'])
},
**license.get_status(context["tenantId"]),
"smtp": helper.has_smtp(),
"iceServers": assist.get_ice_servers()
}
}
@app.route('/projects', methods=['GET'])
def get_projects(context):
return {"data": projects.get_projects(tenant_id=context["tenantId"], recording_state=True, gdpr=True, recorded=True,
stack_integrations=True, version=True)}
@app.route('/projects', methods=['POST', 'PUT'])
def create_project(context):
data = app.current_request.json_body
return projects.create(tenant_id=context["tenantId"], user_id=context["userId"], data=data)
@app.route('/projects/{projectId}', methods=['POST', 'PUT'])
def create_edit_project(projectId, context):
data = app.current_request.json_body
return projects.edit(tenant_id=context["tenantId"], user_id=context["userId"], data=data, project_id=projectId)
@app.route('/projects/{projectId}', methods=['GET'])
def get_project(projectId, context):
data = projects.get_project(tenant_id=context["tenantId"], project_id=projectId, include_last_session=True,
include_gdpr=True)
if data is None:
return {"errors": ["project not found"]}
return {"data": data}
@app.route('/projects/{projectId}', methods=['DELETE'])
def delete_project(projectId, context):
return projects.delete(tenant_id=context["tenantId"], user_id=context["userId"], project_id=projectId)
@app.route('/projects/limit', methods=['GET'])
def get_projects_limit(context):
return {"data": {
"current": projects.count_by_tenant(tenant_id=context["tenantId"]),
"remaining": -1
}}
@app.route('/client', methods=['GET'])
def get_client(context):
r = tenants.get_by_tenant_id(context['tenantId'])
if r is not None:
r.pop("createdAt")
r["projects"] = projects.get_projects(tenant_id=context['tenantId'], recording_state=True, recorded=True,
stack_integrations=True, version=True)
return {
'data': r
}
@app.route('/client/new_api_key', methods=['GET'])
def generate_new_tenant_token(context):
return {
'data': tenants.generate_new_api_key(context['tenantId'])
}
@app.route('/client', methods=['PUT', 'POST'])
def put_client(context):
data = app.current_request.json_body
return tenants.update(tenant_id=context["tenantId"], user_id=context["userId"], data=data)
@app.route('/signup', methods=['GET'], authorizer=None)
def get_all_signup():
return {"data": {"tenants": tenants.tenants_exists(),
"sso": None,
"ssoProvider": None,
"edition": helper.get_edition()}}
@app.route('/signup', methods=['POST', 'PUT'], authorizer=None)
def signup_handler():
data = app.current_request.json_body
return signup.create_step1(data)
@app.route('/integrations/slack', methods=['POST', 'PUT'])
def add_slack_client(context):
data = app.current_request.json_body
if "url" not in data or "name" not in data:
return {"errors": ["please provide a url and a name"]}
n = Slack.add_channel(tenant_id=context["tenantId"], url=data["url"], name=data["name"])
if n is None:
return {
"errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
}
return {"data": n}
@app.route('/integrations/slack/{integrationId}', methods=['POST', 'PUT'])
def edit_slack_integration(integrationId, context):
data = app.current_request.json_body
if data.get("url") and len(data["url"]) > 0:
old = webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId)
if old["endpoint"] != data["url"]:
if not Slack.say_hello(data["url"]):
return {
"errors": [
"We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
}
return {"data": webhook.update(tenant_id=context["tenantId"], webhook_id=integrationId,
changes={"name": data.get("name", ""), "endpoint": data["url"]})}
@app.route('/{projectId}/errors/search', methods=['POST'])
def errors_search(projectId, context):
data = app.current_request.json_body
params = app.current_request.query_params
if params is None:
params = {}
return errors.search(data, projectId, user_id=context["userId"], status=params.get("status", "ALL"),
favorite_only="favorite" in params)
@app.route('/{projectId}/errors/stats', methods=['GET'])
def errors_stats(projectId, context):
params = app.current_request.query_params
if params is None:
params = {}
return errors.stats(projectId, user_id=context["userId"], **params)
@app.route('/{projectId}/errors/{errorId}', methods=['GET'])
def errors_get_details(projectId, errorId, context):
params = app.current_request.query_params
if params is None:
params = {}
data = errors.get_details(project_id=projectId, user_id=context["userId"], error_id=errorId, **params)
if data.get("data") is not None:
errors_favorite_viewed.viewed_error(project_id=projectId, user_id=context['userId'], error_id=errorId)
return data
@app.route('/{projectId}/errors/{errorId}/stats', methods=['GET'])
def errors_get_details_right_column(projectId, errorId, context):
params = app.current_request.query_params
if params is None:
params = {}
data = errors.get_details_chart(project_id=projectId, user_id=context["userId"], error_id=errorId, **params)
return data
@app.route('/{projectId}/errors/{errorId}/sourcemaps', methods=['GET'])
def errors_get_details_sourcemaps(projectId, errorId, context):
data = errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data:
return data
return {
'data': data
}
@app.route('/async/alerts/notifications/{step}', methods=['POST', 'PUT'], authorizer=None)
def send_alerts_notification_async(step):
data = app.current_request.json_body
if data.pop("auth") != environ["async_Token"]:
return {"errors": ["missing auth"]}
if step == "slack":
slack.send_batch(notifications_list=data.get("notifications"))
elif step == "email":
alerts.send_by_email_batch(notifications_list=data.get("notifications"))
elif step == "webhook":
webhook.trigger_batch(data_list=data.get("notifications"))
@app.route('/notifications', methods=['GET'])
def get_notifications(context):
return {"data": notifications.get_all(tenant_id=context['tenantId'], user_id=context['userId'])}
@app.route('/notifications/{notificationId}/view', methods=['GET'])
def view_notifications(notificationId, context):
return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context['userId'])}
@app.route('/notifications/view', methods=['POST', 'PUT'])
def batch_view_notifications(context):
data = app.current_request.json_body
return {"data": notifications.view_notification(notification_ids=data.get("ids", []),
startTimestamp=data.get("startTimestamp"),
endTimestamp=data.get("endTimestamp"),
user_id=context['userId'],
tenant_id=context["tenantId"])}
@app.route('/notifications', methods=['POST', 'PUT'], authorizer=None)
def create_notifications():
data = app.current_request.json_body
if data.get("token", "") != "nF46JdQqAM5v9KI9lPMpcu8o9xiJGvNNWOGL7TJP":
return {"errors": ["missing token"]}
return notifications.create(data.get("notifications", []))
@app.route('/boarding', methods=['GET'])
def get_boarding_state(context):
return {"data": boarding.get_state(tenant_id=context["tenantId"])}
@app.route('/boarding/installing', methods=['GET'])
def get_boarding_state_installing(context):
return {"data": boarding.get_state_installing(tenant_id=context["tenantId"])}
@app.route('/boarding/identify-users', methods=['GET'])
def get_boarding_state_identify_users(context):
return {"data": boarding.get_state_identify_users(tenant_id=context["tenantId"])}
@app.route('/boarding/manage-users', methods=['GET'])
def get_boarding_state_manage_users(context):
return {"data": boarding.get_state_manage_users(tenant_id=context["tenantId"])}
@app.route('/boarding/integrations', methods=['GET'])
def get_boarding_state_integrations(context):
return {"data": boarding.get_state_integrations(tenant_id=context["tenantId"])}
# this endpoint supports both jira & github based on `provider` attribute
@app.route('/integrations/issues', methods=['POST', 'PUT'])
def add_edit_jira_cloud_github(context):
data = app.current_request.json_body
provider = data.get("provider", "").upper()
error, integration = integrations_manager.get_integration(tool=provider, tenant_id=context["tenantId"],
user_id=context["userId"])
if error is not None:
return error
return {"data": integration.add_edit(data=data)}
@app.route('/integrations/slack/{integrationId}', methods=['GET'])
def get_slack_webhook(integrationId, context):
return {"data": webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId)}
@app.route('/integrations/slack/channels', methods=['GET'])
def get_slack_integration(context):
return {"data": webhook.get_by_type(tenant_id=context["tenantId"], webhook_type='slack')}
@app.route('/integrations/slack/{integrationId}', methods=['DELETE'])
def delete_slack_integration(integrationId, context):
return webhook.delete(context["tenantId"], integrationId)
@app.route('/webhooks', methods=['POST', 'PUT'])
def add_edit_webhook(context):
data = app.current_request.json_body
return {"data": webhook.add_edit(tenant_id=context["tenantId"], data=data, replace_none=True)}
@app.route('/webhooks', methods=['GET'])
def get_webhooks(context):
return {"data": webhook.get_by_tenant(tenant_id=context["tenantId"], replace_none=True)}
@app.route('/webhooks/{webhookId}', methods=['DELETE'])
def delete_webhook(webhookId, context):
return {"data": webhook.delete(tenant_id=context["tenantId"], webhook_id=webhookId)}
@app.route('/client/members', methods=['GET'])
def get_members(context):
return {"data": users.get_members(tenant_id=context['tenantId'])}
@app.route('/client/members', methods=['PUT', 'POST'])
def add_member(context):
data = app.current_request.json_body
return users.create_member(tenant_id=context['tenantId'], user_id=context['userId'], data=data)
@app.route('/users/invitation', methods=['GET'], authorizer=None)
def process_invitation_link():
params = app.current_request.query_params
if params is None or len(params.get("token", "")) < 64:
return {"errors": ["please provide a valid invitation"]}
user = users.get_by_invitation_token(params["token"])
if user is None:
return {"errors": ["invitation not found"]}
if user["expiredInvitation"]:
return {"errors": ["expired invitation, please ask your admin to send a new one"]}
if user["expiredChange"] is not None and not user["expiredChange"] \
and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60:
pass_token = user["changePwdToken"]
else:
pass_token = users.allow_password_change(user_id=user["userId"])
return Response(
status_code=307,
body='',
headers={'Location': environ["SITE_URL"] + environ["change_password_link"] % (params["token"], pass_token),
'Content-Type': 'text/plain'})
@app.route('/password/reset', methods=['POST', 'PUT'], authorizer=None)
def change_password_by_invitation():
data = app.current_request.json_body
if data is None or len(data.get("invitation", "")) < 64 or len(data.get("pass", "")) < 8:
return {"errors": ["please provide a valid invitation & pass"]}
user = users.get_by_invitation_token(token=data["invitation"], pass_token=data["pass"])
if user is None:
return {"errors": ["invitation not found"]}
if user["expiredChange"]:
return {"errors": ["expired change, please re-use the invitation link"]}
return users.set_password_invitation(new_password=data["password"], user_id=user["userId"])
@app.route('/client/members/{memberId}', methods=['PUT', 'POST'])
def edit_member(memberId, context):
data = app.current_request.json_body
return users.edit(tenant_id=context['tenantId'], editor_id=context['userId'], changes=data,
user_id_to_update=memberId)
@app.route('/client/members/{memberId}/reset', methods=['GET'])
def reset_reinvite_member(memberId, context):
return users.reset_member(tenant_id=context['tenantId'], editor_id=context['userId'], user_id_to_update=memberId)
@app.route('/client/members/{memberId}', methods=['DELETE'])
def delete_member(memberId, context):
return users.delete_member(tenant_id=context["tenantId"], user_id=context['userId'], id_to_delete=memberId)
@app.route('/account/new_api_key', methods=['GET'])
def generate_new_user_token(context):
return {"data": users.generate_new_api_key(user_id=context['userId'])}
@app.route('/account', methods=['POST', 'PUT'])
def edit_account(context):
data = app.current_request.json_body
return users.edit(tenant_id=context['tenantId'], user_id_to_update=context['userId'], changes=data,
editor_id=context['userId'])
@app.route('/account/password', methods=['PUT', 'POST'])
def change_client_password(context):
data = app.current_request.json_body
return users.change_password(email=context['email'], old_password=data["oldPassword"],
new_password=data["newPassword"], tenant_id=context["tenantId"],
user_id=context["userId"])
@app.route('/metadata/session_search', methods=['GET'])
def search_sessions_by_metadata(context):
params = app.current_request.query_params
if params is None:
return {"errors": ["please provide a key&value for search"]}
value = params.get('value', '')
key = params.get('key', '')
project_id = params.get('projectId')
if len(value) == 0 and len(key) == 0:
return {"errors": ["please provide a key&value for search"]}
if len(value) == 0:
return {"errors": ["please provide a value for search"]}
if len(key) == 0:
return {"errors": ["please provide a key for search"]}
return {
"data": sessions.search_by_metadata(tenant_id=context["tenantId"], user_id=context["userId"], m_value=value,
m_key=key,
project_id=project_id)}
@app.route('/plans', methods=['GET'])
def get_current_plan(context):
return {
"data": license.get_status(context["tenantId"])
}
@app.route('/alerts/notifications', methods=['POST', 'PUT'], authorizer=None)
def send_alerts_notifications():
data = app.current_request.json_body
return {"data": alerts.process_notifications(data.get("notifications", []))}

View file

@ -1,13 +0,0 @@
from chalice import Blueprint, Cron
from chalicelib import _overrides
app = Blueprint(__name__)
_overrides.chalice_app(app)
from chalicelib.core import telemetry
# Run every day.
@app.schedule(Cron('0', '0', '?', '*', '*', '*'))
def telemetry_cron(event):
telemetry.compute()

View file

@ -1,550 +0,0 @@
from chalice import Blueprint
from chalicelib.utils import helper
from chalicelib import _overrides
from chalicelib.core import dashboard
from chalicelib.core import metadata
app = Blueprint(__name__)
_overrides.chalice_app(app)
@app.route('/{projectId}/dashboard/metadata', methods=['GET'])
def get_metadata_map(projectId, context):
metamap = []
for m in metadata.get(project_id=projectId):
metamap.append({"name": m["key"], "key": f"metadata{m['index']}"})
return {"data": metamap}
@app.route('/{projectId}/dashboard/sessions', methods=['GET', 'POST'])
def get_dashboard_processed_sessions(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_processed_sessions(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/dashboard/errors', methods=['GET', 'POST'])
def get_dashboard_errors(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_errors(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/dashboard/errors_trend', methods=['GET', 'POST'])
def get_dashboard_errors_trend(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_errors_trend(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/dashboard/application_activity', methods=['GET', 'POST'])
def get_dashboard_application_activity(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_application_activity(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/dashboard/page_metrics', methods=['GET', 'POST'])
def get_dashboard_page_metrics(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_page_metrics(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/dashboard/user_activity', methods=['GET', 'POST'])
def get_dashboard_user_activity(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_user_activity(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/dashboard/performance', methods=['GET', 'POST'])
def get_dashboard_performance(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_performance(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/dashboard/slowest_images', methods=['GET', 'POST'])
def get_dashboard_slowest_images(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_slowest_images(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/dashboard/missing_resources', methods=['GET', 'POST'])
def get_performance_sessions(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_missing_resources_trend(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/dashboard/network', methods=['GET', 'POST'])
def get_network_widget(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_network(project_id=projectId, **{**data, **args})}
@app.route('/{projectId}/dashboard/{widget}/search', methods=['GET'])
def get_dashboard_autocomplete(projectId, widget, context):
params = app.current_request.query_params
if params is None or params.get('q') is None or len(params.get('q')) == 0:
return {"data": []}
params['q'] = '^' + params['q']
if widget in ['performance']:
data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
platform=params.get('platform', None), performance=True)
elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render',
'impacted_sessions_by_slow_pages', 'pages_response_time']:
data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
platform=params.get('platform', None), pages_only=True)
elif widget in ['resources_loading_time']:
data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
platform=params.get('platform', None), performance=False)
elif widget in ['time_between_events', 'events']:
data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
platform=params.get('platform', None), performance=False, events_only=True)
elif widget in ['metadata']:
data = dashboard.search(params.get('q', ''), None, project_id=projectId,
platform=params.get('platform', None), metadata=True, key=params.get("key"))
else:
return {"errors": [f"unsupported widget: {widget}"]}
return {'data': data}
# 1
@app.route('/{projectId}/dashboard/slowest_resources', methods=['GET', 'POST'])
def get_dashboard_slowest_resources(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_slowest_resources(project_id=projectId, **{**data, **args})}
# 2
@app.route('/{projectId}/dashboard/resources_loading_time', methods=['GET', 'POST'])
def get_dashboard_resources(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_resources_loading_time(project_id=projectId, **{**data, **args})}
# 3
@app.route('/{projectId}/dashboard/pages_dom_buildtime', methods=['GET', 'POST'])
def get_dashboard_pages_dom(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_pages_dom_build_time(project_id=projectId, **{**data, **args})}
# 4
@app.route('/{projectId}/dashboard/busiest_time_of_day', methods=['GET', 'POST'])
def get_dashboard_busiest_time_of_day(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_busiest_time_of_day(project_id=projectId, **{**data, **args})}
# 5
@app.route('/{projectId}/dashboard/sessions_location', methods=['GET', 'POST'])
def get_dashboard_sessions_location(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_sessions_location(project_id=projectId, **{**data, **args})}
# 6
@app.route('/{projectId}/dashboard/speed_location', methods=['GET', 'POST'])
def get_dashboard_speed_location(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_speed_index_location(project_id=projectId, **{**data, **args})}
# 7
@app.route('/{projectId}/dashboard/pages_response_time', methods=['GET', 'POST'])
def get_dashboard_pages_response_time(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_pages_response_time(project_id=projectId, **{**data, **args})}
# 8
@app.route('/{projectId}/dashboard/pages_response_time_distribution', methods=['GET', 'POST'])
def get_dashboard_pages_response_time_distribution(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_pages_response_time_distribution(project_id=projectId, **{**data, **args})}
# 9
@app.route('/{projectId}/dashboard/top_metrics', methods=['GET', 'POST'])
def get_dashboard_top_metrics(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_top_metrics(project_id=projectId, **{**data, **args})}
# 10
@app.route('/{projectId}/dashboard/time_to_render', methods=['GET', 'POST'])
def get_dashboard_time_to_render(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_time_to_render(project_id=projectId, **{**data, **args})}
# 11
@app.route('/{projectId}/dashboard/impacted_sessions_by_slow_pages', methods=['GET', 'POST'])
def get_dashboard_impacted_sessions_by_slow_pages(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_impacted_sessions_by_slow_pages(project_id=projectId, **{**data, **args})}
# 12
@app.route('/{projectId}/dashboard/memory_consumption', methods=['GET', 'POST'])
def get_dashboard_memory_consumption(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_memory_consumption(project_id=projectId, **{**data, **args})}
# 12.1
@app.route('/{projectId}/dashboard/fps', methods=['GET', 'POST'])
def get_dashboard_avg_fps(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_avg_fps(project_id=projectId, **{**data, **args})}
# 12.2
@app.route('/{projectId}/dashboard/cpu', methods=['GET', 'POST'])
def get_dashboard_avg_cpu(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_avg_cpu(project_id=projectId, **{**data, **args})}
# 13
@app.route('/{projectId}/dashboard/crashes', methods=['GET', 'POST'])
def get_dashboard_impacted_sessions_by_slow_pages(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_crashes(project_id=projectId, **{**data, **args})}
# 14
@app.route('/{projectId}/dashboard/domains_errors', methods=['GET', 'POST'])
def get_dashboard_domains_errors(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_domains_errors(project_id=projectId, **{**data, **args})}
# 14.1
@app.route('/{projectId}/dashboard/domains_errors_4xx', methods=['GET', 'POST'])
def get_dashboard_domains_errors_4xx(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_domains_errors_4xx(project_id=projectId, **{**data, **args})}
# 14.2
@app.route('/{projectId}/dashboard/domains_errors_5xx', methods=['GET', 'POST'])
def get_dashboard_domains_errors_5xx(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_domains_errors_5xx(project_id=projectId, **{**data, **args})}
# 15
@app.route('/{projectId}/dashboard/slowest_domains', methods=['GET', 'POST'])
def get_dashboard_slowest_domains(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_slowest_domains(project_id=projectId, **{**data, **args})}
# 16
@app.route('/{projectId}/dashboard/errors_per_domains', methods=['GET', 'POST'])
def get_dashboard_errors_per_domains(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_errors_per_domains(project_id=projectId, **{**data, **args})}
# 17
@app.route('/{projectId}/dashboard/sessions_per_browser', methods=['GET', 'POST'])
def get_dashboard_sessions_per_browser(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_sessions_per_browser(project_id=projectId, **{**data, **args})}
# 18
@app.route('/{projectId}/dashboard/calls_errors', methods=['GET', 'POST'])
def get_dashboard_calls_errors(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_calls_errors(project_id=projectId, **{**data, **args})}
# 18.1
@app.route('/{projectId}/dashboard/calls_errors_4xx', methods=['GET', 'POST'])
def get_dashboard_calls_errors_4xx(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_calls_errors_4xx(project_id=projectId, **{**data, **args})}
# 18.2
@app.route('/{projectId}/dashboard/calls_errors_5xx', methods=['GET', 'POST'])
def get_dashboard_calls_errors_5xx(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_calls_errors_5xx(project_id=projectId, **{**data, **args})}
# 19
@app.route('/{projectId}/dashboard/errors_per_type', methods=['GET', 'POST'])
def get_dashboard_errors_per_type(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_errors_per_type(project_id=projectId, **{**data, **args})}
# 20
@app.route('/{projectId}/dashboard/resources_by_party', methods=['GET', 'POST'])
def get_dashboard_resources_by_party(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_resources_by_party(project_id=projectId, **{**data, **args})}
# 21
@app.route('/{projectId}/dashboard/resource_type_vs_response_end', methods=['GET', 'POST'])
def get_dashboard_errors_per_resource_type(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.resource_type_vs_response_end(project_id=projectId, **{**data, **args})}
# 22
@app.route('/{projectId}/dashboard/resources_vs_visually_complete', methods=['GET', 'POST'])
def get_dashboard_resources_vs_visually_complete(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_resources_vs_visually_complete(project_id=projectId, **{**data, **args})}
# 23
@app.route('/{projectId}/dashboard/impacted_sessions_by_js_errors', methods=['GET', 'POST'])
def get_dashboard_impacted_sessions_by_js_errors(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_impacted_sessions_by_js_errors(project_id=projectId, **{**data, **args})}
# 24
@app.route('/{projectId}/dashboard/resources_count_by_type', methods=['GET', 'POST'])
def get_dashboard_resources_count_by_type(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": dashboard.get_resources_count_by_type(project_id=projectId, **{**data, **args})}
# 25
@app.route('/{projectId}/dashboard/time_between_events', methods=['GET'])
def get_dashboard_resources_count_by_type(projectId, context):
return {"errors": ["please choose 2 events"]}
@app.route('/{projectId}/dashboard/overview', methods=['GET', 'POST'])
def get_dashboard_group(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
params = app.current_request.query_params
args = dashboard.dashboard_args(params)
return {"data": [
*helper.explode_widget(key="count_sessions",
data=dashboard.get_processed_sessions(project_id=projectId, **{**data, **args})),
*helper.explode_widget(data={**dashboard.get_application_activity(project_id=projectId, **{**data, **args}),
"chart": dashboard.get_performance(project_id=projectId, **{**data, **args})
.get("chart", [])}),
*helper.explode_widget(data=dashboard.get_page_metrics(project_id=projectId, **{**data, **args})),
*helper.explode_widget(data=dashboard.get_user_activity(project_id=projectId, **{**data, **args})),
*helper.explode_widget(data=dashboard.get_pages_dom_build_time(project_id=projectId, **{**data, **args}),
key="avg_pages_dom_buildtime"),
*helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **{**data, **args}),
key="avg_pages_response_time"),
*helper.explode_widget(dashboard.get_top_metrics(project_id=projectId, **{**data, **args})),
*helper.explode_widget(data=dashboard.get_time_to_render(project_id=projectId, **{**data, **args}),
key="avg_time_to_render"),
*helper.explode_widget(dashboard.get_memory_consumption(project_id=projectId, **{**data, **args})),
*helper.explode_widget(dashboard.get_avg_cpu(project_id=projectId, **{**data, **args})),
*helper.explode_widget(dashboard.get_avg_fps(project_id=projectId, **{**data, **args})),
]}

View file

@ -1,13 +1,11 @@
import json
import time
from chalicelib.utils.helper import environ
from chalicelib.core import notifications
import schemas
from chalicelib.core import notifications, slack, webhook
from chalicelib.utils import pg_client, helper, email_helper
from chalicelib.utils.TimeUTC import TimeUTC
import json
ALLOW_UPDATE = ["name", "description", "active", "detectionMethod", "query", "options"]
import logging
def get(id):
with pg_client.PostgresClient() as cur:
@ -37,34 +35,6 @@ def get_all(project_id):
return all
SUPPORTED_THRESHOLD = [15, 30, 60, 120, 240, 1440]
def __transform_structure(data):
if data.get("options") is None:
return f"Missing 'options'", None
if data["options"].get("currentPeriod") not in SUPPORTED_THRESHOLD:
return f"Unsupported currentPeriod, please provide one of these values {SUPPORTED_THRESHOLD}", None
if data["options"].get("previousPeriod", 15) not in SUPPORTED_THRESHOLD:
return f"Unsupported previousPeriod, please provide one of these values {SUPPORTED_THRESHOLD}", None
if data["options"].get("renotifyInterval") is None:
data["options"]["renotifyInterval"] = 720
data["query"]["right"] = float(data["query"]["right"])
data["query"] = json.dumps(data["query"])
data["description"] = data["description"] if data.get("description") is not None and len(
data["description"]) > 0 else None
if data.get("options"):
messages = []
for m in data["options"].get("message", []):
if m.get("value") is None:
continue
m["value"] = str(m["value"])
messages.append(m)
data["options"]["message"] = messages
data["options"] = json.dumps(data["options"])
return None, data
def __process_circular(alert):
if alert is None:
return None
@ -73,15 +43,16 @@ def __process_circular(alert):
return alert
def create(project_id, data):
err, data = __transform_structure(data)
if err is not None:
return {"errors": [err]}
def create(project_id, data: schemas.AlertSchema):
data = data.dict()
data["query"] = json.dumps(data["query"])
data["options"] = json.dumps(data["options"])
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
INSERT INTO public.alerts(project_id, name, description, detection_method, query, options)
VALUES (%(project_id)s, %(name)s, %(description)s, %(detectionMethod)s, %(query)s, %(options)s::jsonb)
INSERT INTO public.alerts(project_id, name, description, detection_method, query, options, series_id)
VALUES (%(project_id)s, %(name)s, %(description)s, %(detection_method)s, %(query)s, %(options)s::jsonb, %(series_id)s)
RETURNING *;""",
{"project_id": project_id, **data})
)
@ -89,23 +60,24 @@ def create(project_id, data):
return {"data": helper.dict_to_camel_case(__process_circular(a))}
def update(id, changes):
changes = {k: changes[k] for k in changes.keys() if k in ALLOW_UPDATE}
err, changes = __transform_structure(changes)
if err is not None:
return {"errors": [err]}
updateq = []
for k in changes.keys():
updateq.append(f"{helper.key_to_snake_case(k)} = %({k})s")
if len(updateq) == 0:
return {"errors": ["nothing to update"]}
def update(id, data: schemas.AlertSchema):
data = data.dict()
data["query"] = json.dumps(data["query"])
data["options"] = json.dumps(data["options"])
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
query = cur.mogrify("""\
UPDATE public.alerts
SET {", ".join(updateq)}
SET name = %(name)s,
description = %(description)s,
active = TRUE,
detection_method = %(detection_method)s,
query = %(query)s,
options = %(options)s,
series_id = %(series_id)s
WHERE alert_id =%(id)s AND deleted_at ISNULL
RETURNING *;""",
{"id": id, **changes})
{"id": id, **data})
cur.execute(query=query)
a = helper.dict_to_camel_case(cur.fetchone())
return {"data": __process_circular(a)}
@ -132,7 +104,26 @@ def process_notifications(data):
BATCH_SIZE = 200
for t in full.keys():
for i in range(0, len(full[t]), BATCH_SIZE):
helper.async_post(environ['alert_ntf'] % t, {"notifications": full[t][i:i + BATCH_SIZE]})
notifications_list = full[t][i:i + BATCH_SIZE]
if t == "slack":
try:
slack.send_batch(notifications_list=notifications_list)
except Exception as e:
logging.error("!!!Error while sending slack notifications batch")
logging.error(str(e))
elif t == "email":
try:
send_by_email_batch(notifications_list=notifications_list)
except Exception as e:
logging.error("!!!Error while sending email notifications batch")
logging.error(str(e))
elif t == "webhook":
try:
webhook.trigger_batch(data_list=notifications_list)
except Exception as e:
logging.error("!!!Error while sending webhook notifications batch")
logging.error(str(e))
def send_by_email(notification, destination):

View file

@ -0,0 +1,27 @@
from chalicelib.utils import pg_client, helper
def get_all_alerts():
with pg_client.PostgresClient(long_query=True) as cur:
query = """SELECT -1 AS tenant_id,
alert_id,
project_id,
detection_method,
query,
options,
(EXTRACT(EPOCH FROM alerts.created_at) * 1000)::BIGINT AS created_at,
alerts.name,
alerts.series_id,
filter
FROM public.alerts
LEFT JOIN metric_series USING (series_id)
INNER JOIN projects USING (project_id)
WHERE alerts.deleted_at ISNULL
AND alerts.active
AND projects.active
AND projects.deleted_at ISNULL
AND (alerts.series_id ISNULL OR metric_series.deleted_at ISNULL)
ORDER BY alerts.created_at;"""
cur.execute(query=query)
all_alerts = helper.list_to_camel_case(cur.fetchall())
return all_alerts

View file

@ -0,0 +1,250 @@
import decimal
import logging
import schemas
from chalicelib.core import alerts_listener
from chalicelib.core import sessions, alerts
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
LeftToDb = {
schemas.AlertColumn.performance__dom_content_loaded__average: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"},
schemas.AlertColumn.performance__first_meaningful_paint__average: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"},
schemas.AlertColumn.performance__page_load_time__average: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"},
schemas.AlertColumn.performance__dom_build_time__average: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(dom_building_time,0))"},
schemas.AlertColumn.performance__speed_index__average: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"},
schemas.AlertColumn.performance__page_response_time__average: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(response_time,0))"},
schemas.AlertColumn.performance__ttfb__average: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(first_paint_time,0))"},
schemas.AlertColumn.performance__time_to_render__average: {
"table": "events.pages INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(visually_complete,0))"},
schemas.AlertColumn.performance__image_load_time__average: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"},
schemas.AlertColumn.performance__request_load_time__average: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"},
schemas.AlertColumn.resources__load_time__average: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "AVG(NULLIF(resources.duration,0))"},
schemas.AlertColumn.resources__missing__count: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE"},
schemas.AlertColumn.errors__4xx_5xx__count: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)",
"condition": "status/100!=2"},
schemas.AlertColumn.errors__4xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=4"},
schemas.AlertColumn.errors__5xx__count: {"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(session_id)", "condition": "status/100=5"},
schemas.AlertColumn.errors__javascript__impacted_sessions__count: {
"table": "events.resources INNER JOIN public.sessions USING(session_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"},
schemas.AlertColumn.performance__crashes__count: {
"table": "(SELECT *, start_ts AS timestamp FROM public.sessions WHERE errors_count > 0) AS sessions",
"formula": "COUNT(DISTINCT session_id)", "condition": "errors_count > 0"},
schemas.AlertColumn.errors__javascript__count: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False},
schemas.AlertColumn.errors__backend__count: {
"table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)",
"formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False},
}
# This is the frequency of execution for each threshold
TimeInterval = {
15: 3,
30: 5,
60: 10,
120: 20,
240: 30,
1440: 60,
}
def can_check(a) -> bool:
now = TimeUTC.now()
repetitionBase = a["options"]["currentPeriod"] \
if a["detectionMethod"] == schemas.AlertDetectionMethod.change \
and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \
else a["options"]["previousPeriod"]
if TimeInterval.get(repetitionBase) is None:
logging.error(f"repetitionBase: {repetitionBase} NOT FOUND")
return False
return (a["options"]["renotifyInterval"] <= 0 or
a["options"].get("lastNotification") is None or
a["options"]["lastNotification"] <= 0 or
((now - a["options"]["lastNotification"]) > a["options"]["renotifyInterval"] * 60 * 1000)) \
and ((now - a["createdAt"]) % (TimeInterval[repetitionBase] * 60 * 1000)) < 60 * 1000
def Build(a):
params = {"project_id": a["projectId"]}
full_args = {}
j_s = True
if a["seriesId"] is not None:
a["filter"]["sort"] = "session_id"
a["filter"]["order"] = "DESC"
a["filter"]["startDate"] = -1
a["filter"]["endDate"] = TimeUTC.now()
full_args, query_part, sort = sessions.search_query_parts(
data=schemas.SessionsSearchPayloadSchema.parse_obj(a["filter"]),
error_status=None, errors_only=False,
favorite_only=False, issue=None, project_id=a["projectId"],
user_id=None)
subQ = f"""SELECT COUNT(session_id) AS value
{query_part}"""
else:
colDef = LeftToDb[a["query"]["left"]]
subQ = f"""SELECT {colDef["formula"]} AS value
FROM {colDef["table"]}
WHERE project_id = %(project_id)s
{"AND " + colDef["condition"] if colDef.get("condition") is not None else ""}"""
j_s = colDef.get("joinSessions", True)
q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid"""
# if len(colDef.group) > 0 {
# subQ = subQ.Column(colDef.group + " AS group_value")
# subQ = subQ.GroupBy(colDef.group)
# q = q.Column("group_value")
# }
if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold:
if a["seriesId"] is not None:
q += f""" FROM ({subQ}) AS stat"""
else:
q += f""" FROM ({subQ} AND timestamp>=%(startDate)s
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}) AS stat"""
params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000}
else:
if a["options"]["change"] == schemas.AlertDetectionChangeType.change:
# if len(colDef.group) > 0:
# subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60))
# sub2, args2, _ := subQ.Where(
# sq.And{
# sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
# sq.Expr("timestamp>=$4 ", time.Now().Unix()-2 * a.Options.CurrentPeriod * 60),
# }).ToSql()
# sub1 := sq.Select("group_value", "(stat1.value-stat2.value) AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...)
# q = q.FromSelect(sub1, "stat")
# else:
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})-({sub2})) AS value"
q += f" FROM ( {sub1} ) AS stat"
params = {**params, **full_args,
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
"timestamp_sub2": TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000}
else:
sub1 = f"""{subQ} AND timestamp>=%(startDate)s
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} AND timestamp<%(startDate)s
AND timestamp>=%(timestamp_sub2)s
{"AND sessions.start_ts < %(startDate)s AND sessions.start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
params["timestamp_sub2"] = TimeUTC.now() - 2 * a["options"]["currentPeriod"] * 60 * 1000
sub1 = f"SELECT (( {sub1} )-( {sub2} )) AS value"
q += f" FROM ( {sub1} ) AS stat"
else:
# if len(colDef.group) >0 {
# subq1 := subQ.Where(sq.Expr("timestamp>=$2 ", time.Now().Unix()-a.Options.CurrentPeriod * 60))
# sub2, args2, _ := subQ.Where(
# sq.And{
# sq.Expr("timestamp<$3 ", time.Now().Unix()-a.Options.CurrentPeriod * 60),
# sq.Expr("timestamp>=$4 ", time.Now().Unix()-a.Options.PreviousPeriod * 60-a.Options.CurrentPeriod * 60),
# }).ToSql()
# sub1 := sq.Select("group_value", "(stat1.value/stat2.value-1)*100 AS value").FromSelect(subq1, "stat1").JoinClause("INNER JOIN ("+sub2+") AS stat2 USING(group_value)", args2...)
# q = q.FromSelect(sub1, "stat")
# } else {
if a["seriesId"] is not None:
sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s")
sub1 = f"SELECT (({subQ})/NULLIF(({sub2}),0)-1)*100 AS value"
q += f" FROM ({sub1}) AS stat"
params = {**params, **full_args,
"startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000,
"timestamp_sub2": TimeUTC.now() \
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) \
* 60 * 1000}
else:
sub1 = f"""{subQ} AND timestamp>=%(startDate)s
{"AND sessions.start_ts >= %(startDate)s" if j_s else ""}"""
params["startDate"] = TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000
sub2 = f"""{subQ} AND timestamp<%(startDate)s
AND timestamp>=%(timestamp_sub2)s
{"AND sessions.start_ts < %(startDate)s AND sessions.start_ts >= %(timestamp_sub2)s" if j_s else ""}"""
params["timestamp_sub2"] = TimeUTC.now() \
- (a["options"]["currentPeriod"] + a["options"]["currentPeriod"]) * 60 * 1000
sub1 = f"SELECT (({sub1})/NULLIF(({sub2}),0)-1)*100 AS value"
q += f" FROM ({sub1}) AS stat"
return q, params
def process():
notifications = []
all_alerts = alerts_listener.get_all_alerts()
with pg_client.PostgresClient() as cur:
for alert in all_alerts:
if can_check(alert):
logging.info(f"Querying alertId:{alert['alertId']} name: {alert['name']}")
query, params = Build(alert)
query = cur.mogrify(query, params)
logging.debug(alert)
logging.debug(query)
try:
cur.execute(query)
result = cur.fetchone()
if result["valid"]:
logging.info("Valid alert, notifying users")
notifications.append({
"alertId": alert["alertId"],
"tenantId": alert["tenantId"],
"title": alert["name"],
"description": f"has been triggered, {alert['query']['left']} = {round(result['value'], 2)} ({alert['query']['operator']} {alert['query']['right']}).",
"buttonText": "Check metrics for more details",
"buttonUrl": f"/{alert['projectId']}/metrics",
"imageUrl": None,
"options": {"source": "ALERT", "sourceId": alert["alertId"],
"sourceMeta": alert["detectionMethod"],
"message": alert["options"]["message"], "projectId": alert["projectId"],
"data": {"title": alert["name"],
"limitValue": alert["query"]["right"],
"actualValue": float(result["value"]) \
if isinstance(result["value"], decimal.Decimal) \
else result["value"],
"operator": alert["query"]["operator"],
"trigger": alert["query"]["left"],
"alertId": alert["alertId"],
"detectionMethod": alert["detectionMethod"],
"currentPeriod": alert["options"]["currentPeriod"],
"previousPeriod": alert["options"]["previousPeriod"],
"createdAt": TimeUTC.now()}},
})
except Exception as e:
logging.error(f"!!!Error while running alert query for alertId:{alert['alertId']}")
logging.error(str(e))
logging.error(query)
if len(notifications) > 0:
cur.execute(
cur.mogrify(f"""UPDATE public.Alerts
SET options = options||'{{"lastNotification":{TimeUTC.now()}}}'::jsonb
WHERE alert_id IN %(ids)s;""", {"ids": tuple([n["alertId"] for n in notifications])}))
if len(notifications) > 0:
alerts.process_notifications(notifications)

View file

@ -1,6 +1,6 @@
from chalicelib.utils import pg_client
from chalicelib.utils import helper
from chalicelib.utils.helper import environ
from decouple import config
from chalicelib.utils.TimeUTC import TimeUTC
@ -22,7 +22,7 @@ def get_all(user_id):
for a in announcements:
a["createdAt"] = TimeUTC.datetime_to_timestamp(a["createdAt"])
if a["imageUrl"] is not None and len(a["imageUrl"]) > 0:
a["imageUrl"] = environ["announcement_url"] + a["imageUrl"]
a["imageUrl"] = config("announcement_url") + a["imageUrl"]
return announcements

View file

@ -1,9 +1,11 @@
import schemas
from chalicelib.utils import pg_client, helper
from chalicelib.core import projects, sessions, sessions_metas
import requests
from decouple import config
from chalicelib.core import projects, sessions, sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.core import projects, sessions, sessions_metas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.helper import environ
SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
@ -23,7 +25,7 @@ SESSION_PROJECTION_COLS = """s.project_id,
def get_live_sessions(project_id, filters=None):
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(environ["peers"] % environ["S3_KEY"] + f"/{project_key}")
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
@ -43,7 +45,7 @@ def get_live_sessions(project_id, filters=None):
continue
filter_type = f["type"].upper()
f["value"] = sessions.__get_sql_value_multiple(f["value"])
if filter_type == sessions_metas.meta_type.USERID:
if filter_type == schemas.FilterType.user_id:
op = sessions.__get_sql_operator(f["operator"])
extra_constraints.append(f"user_id {op} %(value_{i})s")
extra_params[f"value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
@ -67,7 +69,7 @@ def get_live_sessions(project_id, filters=None):
def is_live(project_id, session_id, project_key=None):
if project_key is None:
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(environ["peers"] % environ["S3_KEY"] + f"/{project_key}")
connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}")
if connected_peers.status_code != 200:
print("!! issue with the peer-server")
print(connected_peers.text)
@ -77,5 +79,5 @@ def is_live(project_id, session_id, project_key=None):
def get_ice_servers():
return environ.get("iceServers") if environ.get("iceServers") is not None \
and len(environ["iceServers"]) > 0 else None
return config("iceServers") if config("iceServers", default=None) is not None \
and len(config("iceServers")) > 0 else None

View file

@ -1,8 +1,7 @@
from chalicelib.utils.helper import environ
import jwt
from chalicelib.utils import helper
from chalicelib.utils.TimeUTC import TimeUTC
from decouple import config
from chalicelib.core import tenants
from chalicelib.core import users
@ -14,8 +13,8 @@ def jwt_authorizer(token):
try:
payload = jwt.decode(
token[1],
environ["jwt_secret"],
algorithms=environ["jwt_algorithm"],
config("jwt_secret"),
algorithms=config("jwt_algorithm"),
audience=[f"plugin:{helper.get_stage_name()}", f"front:{helper.get_stage_name()}"]
)
except jwt.ExpiredSignatureError:
@ -43,15 +42,15 @@ def generate_jwt(id, tenant_id, iat, aud):
payload={
"userId": id,
"tenantId": tenant_id,
"exp": iat // 1000 + int(environ["jwt_exp_delta_seconds"]) + TimeUTC.get_utc_offset() // 1000,
"iss": environ["jwt_issuer"],
"exp": iat // 1000 + config("jwt_exp_delta_seconds",cast=int) + TimeUTC.get_utc_offset() // 1000,
"iss": config("jwt_issuer"),
"iat": iat // 1000,
"aud": aud
},
key=environ["jwt_secret"],
algorithm=environ["jwt_algorithm"]
key=config("jwt_secret"),
algorithm=config("jwt_algorithm")
)
return token.decode("utf-8")
return token
def api_key_authorizer(token):

View file

@ -1,5 +1,5 @@
import requests
from chalicelib.utils.helper import environ
from decouple import config
from datetime import datetime
from chalicelib.core import webhook
@ -95,8 +95,8 @@ class Slack:
def share_session(cls, tenant_id, project_id, session_id, user, comment, integration_id=None):
args = {"fallback": f"{user} has shared the below session!",
"pretext": f"{user} has shared the below session!",
"title": f"{environ['SITE_URL']}/{project_id}/session/{session_id}",
"title_link": f"{environ['SITE_URL']}/{project_id}/session/{session_id}",
"title": f"{config('SITE_URL')}/{project_id}/session/{session_id}",
"title_link": f"{config('SITE_URL')}/{project_id}/session/{session_id}",
"text": comment}
return {"data": cls.__share_to_slack(tenant_id, integration_id, **args)}
@ -104,8 +104,8 @@ class Slack:
def share_error(cls, tenant_id, project_id, error_id, user, comment, integration_id=None):
args = {"fallback": f"{user} has shared the below error!",
"pretext": f"{user} has shared the below error!",
"title": f"{environ['SITE_URL']}/{project_id}/errors/{error_id}",
"title_link": f"{environ['SITE_URL']}/{project_id}/errors/{error_id}",
"title": f"{config('SITE_URL')}/{project_id}/errors/{error_id}",
"title_link": f"{config('SITE_URL')}/{project_id}/errors/{error_id}",
"text": comment}
return {"data": cls.__share_to_slack(tenant_id, integration_id, **args)}

View file

@ -0,0 +1,225 @@
import json
import schemas
from chalicelib.core import sessions
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
def try_live(project_id, data: schemas.TryCustomMetricsSchema):
results = []
for s in data.series:
s.filter.startDate = data.startDate
s.filter.endDate = data.endDate
results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.viewType))
if data.viewType == schemas.MetricViewType.progress:
r = {"count": results[-1]}
diff = s.filter.endDate - s.filter.startDate
s.filter.startDate = data.endDate
s.filter.endDate = data.endDate - diff
r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density,
view_type=data.viewType)
r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"])
results[-1] = r
return results
def make_chart(project_id, user_id, metric_id, data: schemas.CustomMetricChartPayloadSchema):
metric = get(metric_id=metric_id, project_id=project_id, user_id=user_id)
metric: schemas.TryCustomMetricsSchema = schemas.TryCustomMetricsSchema.parse_obj({**data.dict(), **metric})
return try_live(project_id=project_id, data=metric)
def create(project_id, user_id, data: schemas.CreateCustomMetricsSchema):
with pg_client.PostgresClient() as cur:
_data = {}
for i, s in enumerate(data.series):
for k in s.dict().keys():
_data[f"{k}_{i}"] = s.__getattribute__(k)
_data[f"index_{i}"] = i
_data[f"filter_{i}"] = s.filter.json()
series_len = len(data.series)
data.series = None
params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data}
query = cur.mogrify(f"""\
WITH m AS (INSERT INTO metrics (project_id, user_id, name)
VALUES (%(project_id)s, %(user_id)s, %(name)s)
RETURNING *)
INSERT
INTO metric_series(metric_id, index, name, filter)
VALUES {",".join([f"((SELECT metric_id FROM m), %(index_{i})s, %(name_{i})s, %(filter_{i})s::jsonb)"
for i in range(series_len)])}
RETURNING metric_id;""", params)
cur.execute(
query
)
r = cur.fetchone()
r = helper.dict_to_camel_case(r)
return {"data": r}
def __get_series_id(metric_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT series_id
FROM metric_series
WHERE metric_series.metric_id = %(metric_id)s
AND metric_series.deleted_at ISNULL;""",
{"metric_id": metric_id}
)
)
rows = cur.fetchall()
return [r["series_id"] for r in rows]
def update(metric_id, user_id, project_id, data: schemas.UpdateCustomMetricsSchema):
series_ids = __get_series_id(metric_id)
n_series = []
d_series_ids = []
u_series = []
u_series_ids = []
params = {"metric_id": metric_id, "is_public": data.is_public, "name": data.name,
"user_id": user_id, "project_id": project_id}
for i, s in enumerate(data.series):
prefix = "u_"
if s.series_id is None:
n_series.append({"i": i, "s": s})
prefix = "n_"
else:
u_series.append({"i": i, "s": s})
u_series_ids.append(s.series_id)
ns = s.dict()
for k in ns.keys():
if k == "filter":
ns[k] = json.dumps(ns[k])
params[f"{prefix}{k}_{i}"] = ns[k]
for i in series_ids:
if i not in u_series_ids:
d_series_ids.append(i)
params["d_series_ids"] = tuple(d_series_ids)
with pg_client.PostgresClient() as cur:
sub_queries = []
if len(n_series) > 0:
sub_queries.append(f"""\
n AS (INSERT INTO metric_series (metric_id, index, name, filter)
VALUES {",".join([f"(%(metric_id)s, %(n_index_{s['i']})s, %(n_name_{s['i']})s, %(n_filter_{s['i']})s::jsonb)"
for s in n_series])}
RETURNING 1)""")
if len(u_series) > 0:
sub_queries.append(f"""\
u AS (UPDATE metric_series
SET name=series.name,
filter=series.filter,
index=series.filter.index
FROM (VALUES {",".join([f"(%(u_series_id_{s['i']})s,%(u_index_{s['i']})s,%(u_name_{s['i']})s,%(u_filter_{s['i']})s::jsonb)"
for s in n_series])}) AS series(series_id, index, name, filter)
WHERE metric_id =%(metric_id)s AND series_id=series.series_id
RETURNING 1)""")
if len(d_series_ids) > 0:
sub_queries.append("""\
d AS (DELETE FROM metric_series WHERE metric_id =%(metric_id)s AND series_id IN %(d_series_ids)s
RETURNING 1)""")
query = cur.mogrify(f"""\
{"WITH " if len(sub_queries) > 0 else ""}{",".join(sub_queries)}
UPDATE metrics
SET name = %(name)s, is_public= %(is_public)s
WHERE metric_id = %(metric_id)s
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING metric_id;""", params)
cur.execute(
query
)
r = cur.fetchone()
r = helper.dict_to_camel_case(r)
return r
def get_all(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT *
FROM metrics
LEFT JOIN LATERAL (SELECT jsonb_agg(metric_series.* ORDER BY index) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (user_id = %(user_id)s OR is_public)
ORDER BY created_at;""",
{"project_id": project_id, "user_id": user_id}
)
)
rows = cur.fetchall()
for r in rows:
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
rows = helper.list_to_camel_case(rows)
return rows
def delete(project_id, metric_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.metrics
SET deleted_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s
AND metric_id = %(metric_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id})
)
return {"state": "success"}
def get(metric_id, project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT *
FROM metrics
LEFT JOIN LATERAL (SELECT jsonb_agg(metric_series.* ORDER BY index) AS series
FROM metric_series
WHERE metric_series.metric_id = metrics.metric_id
AND metric_series.deleted_at ISNULL
) AS metric_series ON (TRUE)
WHERE metrics.project_id = %(project_id)s
AND metrics.deleted_at ISNULL
AND (metrics.user_id = %(user_id)s OR metrics.is_public)
AND metrics.metric_id = %(metric_id)s
ORDER BY created_at;""",
{"metric_id": metric_id, "project_id": project_id, "user_id": user_id}
)
)
row = cur.fetchone()
row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"])
return helper.dict_to_camel_case(row)
def get_series_for_alert(project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT metric_id,
series_id,
metrics.name AS metric_name,
metric_series.name AS series_name,
index AS series_index
FROM metric_series
INNER JOIN metrics USING (metric_id)
WHERE metrics.deleted_at ISNULL
AND metrics.project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
ORDER BY metric_name, series_index, series_name;""",
{"project_id": project_id, "user_id": user_id}
)
)
rows = cur.fetchall()
return helper.list_to_camel_case(rows)

View file

@ -1,3 +1,4 @@
import schemas
from chalicelib.core import metadata
from chalicelib.utils import args_transformer
from chalicelib.utils import helper, dev
@ -94,25 +95,25 @@ def __get_meta_constraint(project_id, data):
else:
filter_type = f["key"].upper()
filter_type = [filter_type, "USER" + filter_type, filter_type[4:]]
if any(item in [sessions_metas.meta_type.USERBROWSER] \
if any(item in [schemas.FilterType.user_browser] \
for item in filter_type):
constraints.append(f"sessions.user_browser = %({f['key']}_{i})s")
elif any(item in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS] \
elif any(item in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios] \
for item in filter_type):
constraints.append(f"sessions.user_os = %({f['key']}_{i})s")
elif any(item in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS] \
elif any(item in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios] \
for item in filter_type):
constraints.append(f"sessions.user_device = %({f['key']}_{i})s")
elif any(item in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS] \
elif any(item in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios] \
for item in filter_type):
constraints.append(f"sessions.user_country = %({f['key']}_{i})s")
elif any(item in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS] \
elif any(item in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios] \
for item in filter_type):
constraints.append(f"sessions.user_id = %({f['key']}_{i})s")
elif any(item in [sessions_metas.meta_type.USERANONYMOUSID, sessions_metas.meta_type.USERANONYMOUSID_IOS] \
elif any(item in [schemas.FilterType.user_anonymous_id, schemas.FilterType.user_anonymous_id_ios] \
for item in filter_type):
constraints.append(f"sessions.user_anonymous_id = %({f['key']}_{i})s")
elif any(item in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS] \
elif any(item in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios] \
for item in filter_type):
constraints.append(f"sessions.rev_id = %({f['key']}_{i})s")
return constraints

View file

@ -777,4 +777,4 @@ def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTim
return {
"data": helper.dict_to_camel_case(row)
}
}

View file

@ -1,6 +1,7 @@
from chalicelib.utils import pg_client, helper
from chalicelib.core import sessions_metas, metadata
import schemas
from chalicelib.core import issues
from chalicelib.core import sessions_metas, metadata
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
@ -235,23 +236,23 @@ def __generic_autocomplete(event: Event):
class event_type:
CLICK = Event(ui_type="CLICK", table="events.clicks", column="label")
INPUT = Event(ui_type="INPUT", table="events.inputs", column="label")
LOCATION = Event(ui_type="LOCATION", table="events.pages", column="base_path")
CUSTOM = Event(ui_type="CUSTOM", table="events_common.customs", column="name")
REQUEST = Event(ui_type="REQUEST", table="events_common.requests", column="url")
GRAPHQL = Event(ui_type="GRAPHQL", table="events.graphql", column="name")
STATEACTION = Event(ui_type="STATEACTION", table="events.state_actions", column="name")
ERROR = Event(ui_type="ERROR", table="events.errors",
CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label")
INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label")
LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="base_path")
CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name")
REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="url")
GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name")
STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name")
ERROR = Event(ui_type=schemas.EventType.error, table="events.errors",
column=None) # column=None because errors are searched by name or message
METADATA = Event(ui_type="METADATA", table="public.sessions", column=None)
METADATA = Event(ui_type=schemas.EventType.metadata, table="public.sessions", column=None)
# IOS
CLICK_IOS = Event(ui_type="CLICK_IOS", table="events_ios.clicks", column="label")
INPUT_IOS = Event(ui_type="INPUT_IOS", table="events_ios.inputs", column="label")
VIEW_IOS = Event(ui_type="VIEW_IOS", table="events_ios.views", column="name")
CUSTOM_IOS = Event(ui_type="CUSTOM_IOS", table="events_common.customs", column="name")
REQUEST_IOS = Event(ui_type="REQUEST_IOS", table="events_common.requests", column="url")
ERROR_IOS = Event(ui_type="ERROR_IOS", table="events_ios.crashes",
CLICK_IOS = Event(ui_type=schemas.EventType.click_ios, table="events_ios.clicks", column="label")
INPUT_IOS = Event(ui_type=schemas.EventType.input_ios, table="events_ios.inputs", column="label")
VIEW_IOS = Event(ui_type=schemas.EventType.view_ios, table="events_ios.views", column="name")
CUSTOM_IOS = Event(ui_type=schemas.EventType.custom_ios, table="events_common.customs", column="name")
REQUEST_IOS = Event(ui_type=schemas.EventType.request_ios, table="events_common.requests", column="url")
ERROR_IOS = Event(ui_type=schemas.EventType.error_ios, table="events_ios.crashes",
column=None) # column=None because errors are searched by name or message
@ -389,18 +390,18 @@ def search_pg2(text, event_type, project_id, source, key):
if not event_type:
return {"data": __get_autocomplete_table(text, project_id)}
if event_type.upper() in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type.upper()].get(project_id=project_id, value=text, key=key, source=source)
if event_type.upper() + "_IOS" in SUPPORTED_TYPES.keys():
rows += SUPPORTED_TYPES[event_type.upper() + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
elif event_type.upper() + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type.upper() + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
elif event_type.upper() in sessions_metas.SUPPORTED_TYPES.keys():
if event_type in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
if event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,
source=source)
elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id)
elif event_type.upper().endswith("_IOS") \
and event_type.upper()[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys():
elif event_type.endswith("_IOS") \
and event_type[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys():
return sessions_metas.search(text, event_type, project_id)
else:
return {"errors": ["unsupported event"]}

View file

@ -1,10 +1,10 @@
import json
import chalicelib.utils.helper
from chalicelib.core import events, significance, sessions
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils import helper, pg_client
from chalicelib.utils import dev
import json
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
@ -40,7 +40,7 @@ def create(project_id, user_id, name, filter, is_public):
return {"data": r}
def update(funnel_id, user_id, name=None, filter=None, is_public=None):
def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=None):
s_query = []
if filter is not None:
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
@ -56,9 +56,10 @@ def update(funnel_id, user_id, name=None, filter=None, is_public=None):
UPDATE public.funnels
SET {" , ".join(s_query)}
WHERE funnel_id=%(funnel_id)s
RETURNING *;""",
{"user_id": user_id, "funnel_id": funnel_id, "name": name,
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public})
AND project_id = %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING *;""", {"user_id": user_id, "funnel_id": funnel_id, "name": name,
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public})
# print("--------------------")
# print(query)
# print("--------------------")
@ -74,13 +75,12 @@ def update(funnel_id, user_id, name=None, filter=None, is_public=None):
def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False):
with pg_client.PostgresClient() as cur:
team_query = ""
cur.execute(
cur.mogrify(
f"""\
SELECT DISTINCT ON (funnels.funnel_id) funnel_id,project_id, user_id, name, created_at, deleted_at, is_public
SELECT funnel_id, project_id, user_id, name, created_at, deleted_at, is_public
{",filter" if details else ""}
FROM public.funnels {team_query}
FROM public.funnels
WHERE project_id = %(project_id)s
AND funnels.deleted_at IS NULL
AND (funnels.user_id = %(user_id)s OR funnels.is_public);""",
@ -135,7 +135,8 @@ def delete(project_id, funnel_id, user_id):
UPDATE public.funnels
SET deleted_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s
AND funnel_id = %(funnel_id)s;""",
AND funnel_id = %(funnel_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
)
@ -220,7 +221,7 @@ def get_issues_on_the_fly(funnel_id, project_id, data):
last_stage=last_stage))}
def get(funnel_id, project_id):
def get(funnel_id, project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
@ -230,8 +231,9 @@ def get(funnel_id, project_id):
FROM public.funnels
WHERE project_id = %(project_id)s
AND deleted_at IS NULL
AND funnel_id = %(funnel_id)s;""",
{"funnel_id": funnel_id, "project_id": project_id}
AND funnel_id = %(funnel_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}
)
)
@ -247,7 +249,7 @@ def get(funnel_id, project_id):
@dev.timed
def search_by_issue(user_id, project_id, funnel_id, issue_id, data, range_value=None, start_date=None, end_date=None):
if len(data.get("events", [])) == 0:
f = get(funnel_id=funnel_id, project_id=project_id)
f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id)
if f is None:
return {"errors": ["funnel not found"]}
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.get('startDate', start_date),

View file

@ -0,0 +1,933 @@
import schemas
from chalicelib.core import sessions_metas
from chalicelib.utils import helper, dev
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.metrics_helper import __get_step_size
import math
from chalicelib.core.dashboard import __get_constraints, __get_constraint_values
def __transform_journey(rows):
nodes = []
links = []
for r in rows:
source = r["source_event"][r["source_event"].index("_") + 1:]
target = r["target_event"][r["target_event"].index("_") + 1:]
if source not in nodes:
nodes.append(source)
if target not in nodes:
nodes.append(target)
links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]})
return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)}
JOURNEY_DEPTH = 5
JOURNEY_TYPES = {
"PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"},
"CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"},
# "VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"}, TODO: enable this for SAAS only
"EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"}
}
@dev.timed
def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args):
pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
event_start = None
event_table = JOURNEY_TYPES["PAGES"]["table"]
event_column = JOURNEY_TYPES["PAGES"]["column"]
event_table_id = JOURNEY_TYPES["PAGES"]["table_id"]
extra_values = {}
for f in filters:
if f["type"] == "START_POINT":
event_start = f["value"]
elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_table = JOURNEY_TYPES[f["value"]]["table"]
event_column = JOURNEY_TYPES[f["value"]]["column"]
elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
pg_sub_query_subset.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT source_event,
target_event,
count(*) AS value
FROM (SELECT event_number || '_' || value as target_event,
LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event
FROM (SELECT value,
session_rank,
message_id,
ROW_NUMBER() OVER ( PARTITION BY session_rank ORDER BY timestamp ) AS event_number
{f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY timestamp ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN timestamp ELSE NULL END as mark"
if event_start else ""}
FROM (SELECT session_id,
message_id,
timestamp,
value,
SUM(new_session) OVER (ORDER BY session_id, timestamp) AS session_rank
FROM (SELECT *,
CASE
WHEN source_timestamp IS NULL THEN 1
ELSE 0 END AS new_session
FROM (SELECT session_id,
{event_table_id} AS message_id,
timestamp,
{event_column} AS value,
LAG(timestamp)
OVER (PARTITION BY session_id ORDER BY timestamp) AS source_timestamp
FROM {event_table} INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_subset)}
) AS related_events) AS ranked_events) AS processed
{") AS marked) AS maxed WHERE timestamp >= max) AS filtered" if event_start else ""}
) AS sorted_events
WHERE event_number <= %(JOURNEY_DEPTH)s) AS final
WHERE source_event IS NOT NULL
and target_event IS NOT NULL
GROUP BY source_event, target_event
ORDER BY value DESC
LIMIT 20;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH,
**__get_constraint_values(args), **extra_values}
# print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
return __transform_journey(rows)
def __compute_weekly_percentage(rows):
if rows is None or len(rows) == 0:
return rows
t = -1
for r in rows:
if r["week"] == 0:
t = r["usersCount"]
r["percentage"] = r["usersCount"] / t
return rows
def __complete_retention(rows, start_date, end_date=None):
if rows is None:
return []
max_week = 10
for i in range(max_week):
if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date:
break
neutral = {
"firstConnexionWeek": start_date,
"week": i,
"usersCount": 0,
"connectedUsers": [],
"percentage": 0
}
if i < len(rows) \
and i != rows[i]["week"]:
rows.insert(i, neutral)
elif i >= len(rows):
rows.append(neutral)
return rows
def __complete_acquisition(rows, start_date, end_date=None):
if rows is None:
return []
max_week = 10
week = 0
delta_date = 0
while max_week > 0:
start_date += TimeUTC.MS_WEEK
if end_date is not None and start_date >= end_date:
break
delta = 0
if delta_date + week >= len(rows) \
or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date:
for i in range(max_week):
if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date:
break
neutral = {
"firstConnexionWeek": start_date,
"week": i,
"usersCount": 0,
"connectedUsers": [],
"percentage": 0
}
rows.insert(delta_date + week + i, neutral)
delta = i
else:
for i in range(max_week):
if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date:
break
neutral = {
"firstConnexionWeek": start_date,
"week": i,
"usersCount": 0,
"connectedUsers": [],
"percentage": 0
}
if delta_date + week + i < len(rows) \
and i != rows[delta_date + week + i]["week"]:
rows.insert(delta_date + week + i, neutral)
elif delta_date + week + i >= len(rows):
rows.append(neutral)
delta = i
week += delta
max_week -= 1
delta_date += 1
return rows
@dev.timed
def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[],
**args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
pg_sub_query.append("user_id IS NOT NULL")
pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week,
COUNT(DISTINCT connexions_list.user_id) AS users_count,
ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users
FROM (SELECT DISTINCT user_id
FROM sessions
WHERE {" AND ".join(pg_sub_query)}
AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1
AND NOT EXISTS((SELECT 1
FROM sessions AS bsess
WHERE bsess.start_ts < %(startTimestamp)s
AND project_id = %(project_id)s
AND bsess.user_id = sessions.user_id
LIMIT 1))
) AS users_list
LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week,
user_id
FROM sessions
WHERE users_list.user_id = sessions.user_id
AND %(startTimestamp)s <=sessions.start_ts
AND sessions.project_id = %(project_id)s
AND sessions.start_ts < (%(endTimestamp)s - 1)
GROUP BY connexion_week, user_id
) AS connexions_list ON (TRUE)
GROUP BY week
ORDER BY week;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __compute_weekly_percentage(helper.list_to_camel_case(rows))
return {
"startTimestamp": startTimestamp,
"chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now())
}
@dev.timed
def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
pg_sub_query.append("user_id IS NOT NULL")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week,
FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week,
COUNT(DISTINCT connexions_list.user_id) AS users_count,
ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users
FROM (SELECT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week
FROM sessions
WHERE {" AND ".join(pg_sub_query)}
AND NOT EXISTS((SELECT 1
FROM sessions AS bsess
WHERE bsess.start_ts<%(startTimestamp)s
AND project_id = %(project_id)s
AND bsess.user_id = sessions.user_id
LIMIT 1))
GROUP BY user_id) AS users_list
LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week,
user_id
FROM sessions
WHERE users_list.user_id = sessions.user_id
AND first_connexion_week <=
DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp)
AND sessions.project_id = %(project_id)s
AND sessions.start_ts < (%(endTimestamp)s - 1)
GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE)
GROUP BY first_connexion_week, week
ORDER BY first_connexion_week, week;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __compute_weekly_percentage(helper.list_to_camel_case(rows))
return {
"startTimestamp": startTimestamp,
"chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now())
}
@dev.timed
def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
pg_sub_query.append("user_id IS NOT NULL")
pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
event_type = "PAGES"
event_value = "/"
extra_values = {}
default = True
for f in filters:
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_type = f["value"]
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
pg_sub_query.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
event_table = JOURNEY_TYPES[event_type]["table"]
event_column = JOURNEY_TYPES[event_type]["column"]
pg_sub_query.append(f"feature.{event_column} = %(value)s")
with pg_client.PostgresClient() as cur:
if default:
# get most used value
pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query[:-1])}
AND length({event_column}) > 2
GROUP BY value
ORDER BY count DESC
LIMIT 1;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is not None:
event_value = row["value"]
extra_values["value"] = event_value
if len(event_value) > 2:
pg_sub_query.append(f"length({event_column})>2")
pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - to_timestamp(%(startTimestamp)s/1000)) / 7)::integer AS week,
COUNT(DISTINCT connexions_list.user_id) AS users_count,
ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users
FROM (SELECT DISTINCT user_id
FROM sessions INNER JOIN {event_table} AS feature USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1
AND NOT EXISTS((SELECT 1
FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id)
WHERE bsess.start_ts<%(startTimestamp)s
AND project_id = %(project_id)s
AND bsess.user_id = sessions.user_id
AND bfeature.timestamp<%(startTimestamp)s
AND bfeature.{event_column}=%(value)s
LIMIT 1))
GROUP BY user_id) AS users_list
LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week,
user_id
FROM sessions INNER JOIN {event_table} AS feature USING (session_id)
WHERE users_list.user_id = sessions.user_id
AND %(startTimestamp)s <= sessions.start_ts
AND sessions.project_id = %(project_id)s
AND sessions.start_ts < (%(endTimestamp)s - 1)
AND feature.timestamp >= %(startTimestamp)s
AND feature.timestamp < %(endTimestamp)s
AND feature.{event_column} = %(value)s
GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE)
GROUP BY week
ORDER BY week;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __compute_weekly_percentage(helper.list_to_camel_case(rows))
return {
"startTimestamp": startTimestamp,
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}],
"chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now())
}
@dev.timed
def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
pg_sub_query.append("user_id IS NOT NULL")
pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
event_type = "PAGES"
event_value = "/"
extra_values = {}
default = True
for f in filters:
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_type = f["value"]
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
pg_sub_query.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
event_table = JOURNEY_TYPES[event_type]["table"]
event_column = JOURNEY_TYPES[event_type]["column"]
pg_sub_query.append(f"feature.{event_column} = %(value)s")
with pg_client.PostgresClient() as cur:
if default:
# get most used value
pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query[:-1])}
AND length({event_column}) > 2
GROUP BY value
ORDER BY count DESC
LIMIT 1;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is not None:
event_value = row["value"]
extra_values["value"] = event_value
if len(event_value) > 2:
pg_sub_query.append(f"length({event_column})>2")
pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week,
FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week,
COUNT(DISTINCT connexions_list.user_id) AS users_count,
ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users
FROM (SELECT user_id, DATE_TRUNC('week', to_timestamp(first_connexion_week / 1000)) AS first_connexion_week
FROM(SELECT DISTINCT user_id, MIN(start_ts) AS first_connexion_week
FROM sessions INNER JOIN {event_table} AS feature USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND NOT EXISTS((SELECT 1
FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id)
WHERE bsess.start_ts<%(startTimestamp)s
AND project_id = %(project_id)s
AND bsess.user_id = sessions.user_id
AND bfeature.timestamp<%(startTimestamp)s
AND bfeature.{event_column}=%(value)s
LIMIT 1))
GROUP BY user_id) AS raw_users_list) AS users_list
LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week,
user_id
FROM sessions INNER JOIN {event_table} AS feature USING(session_id)
WHERE users_list.user_id = sessions.user_id
AND first_connexion_week <=
DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp)
AND sessions.project_id = %(project_id)s
AND sessions.start_ts < (%(endTimestamp)s - 1)
AND feature.timestamp >= %(startTimestamp)s
AND feature.timestamp < %(endTimestamp)s
AND feature.{event_column} = %(value)s
GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE)
GROUP BY first_connexion_week, week
ORDER BY first_connexion_week, week;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
rows = __compute_weekly_percentage(helper.list_to_camel_case(rows))
return {
"startTimestamp": startTimestamp,
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}],
"chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now())
}
@dev.timed
def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
startTimestamp = TimeUTC.trunc_week(startTimestamp)
endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
event_table = JOURNEY_TYPES["CLICK"]["table"]
event_column = JOURNEY_TYPES["CLICK"]["column"]
extra_values = {}
for f in filters:
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_table = JOURNEY_TYPES[f["value"]]["table"]
event_column = JOURNEY_TYPES[f["value"]]["column"]
elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
pg_sub_query.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count
FROM sessions
WHERE {" AND ".join(pg_sub_query)}
AND user_id IS NOT NULL;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
# print(cur.mogrify(pg_query, params))
# print("---------------------")
cur.execute(cur.mogrify(pg_query, params))
all_user_count = cur.fetchone()["count"]
if all_user_count == 0:
return []
pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
pg_sub_query.append(f"length({event_column})>2")
pg_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count
FROM {event_table} AS feature INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND user_id IS NOT NULL
GROUP BY value
ORDER BY count DESC
LIMIT 7;"""
# TODO: solve full scan
print(cur.mogrify(pg_query, params))
print("---------------------")
cur.execute(cur.mogrify(pg_query, params))
popularity = cur.fetchall()
pg_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count
FROM {event_table} AS feature INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY value;"""
# TODO: solve full scan
print(cur.mogrify(pg_query, params))
print("---------------------")
cur.execute(cur.mogrify(pg_query, params))
frequencies = cur.fetchall()
total_usage = sum([f["count"] for f in frequencies])
frequencies = {f["value"]: f["count"] for f in frequencies}
for p in popularity:
p["popularity"] = p.pop("count") / all_user_count
p["frequency"] = frequencies[p["value"]] / total_usage
return popularity
@dev.timed
def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
event_type = "CLICK"
event_value = '/'
extra_values = {}
default = True
for f in filters:
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_type = f["value"]
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
pg_sub_query.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
event_table = JOURNEY_TYPES[event_type]["table"]
event_column = JOURNEY_TYPES[event_type]["column"]
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count
FROM sessions
WHERE {" AND ".join(pg_sub_query)}
AND user_id IS NOT NULL;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
# print(cur.mogrify(pg_query, params))
# print("---------------------")
cur.execute(cur.mogrify(pg_query, params))
all_user_count = cur.fetchone()["count"]
if all_user_count == 0:
return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type},
{"type": "EVENT_VALUE", "value": event_value}], }
pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
if default:
# get most used value
pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query[:-1])}
AND length({event_column}) > 2
GROUP BY value
ORDER BY count DESC
LIMIT 1;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is not None:
event_value = row["value"]
extra_values["value"] = event_value
if len(event_value) > 2:
pg_sub_query.append(f"length({event_column})>2")
pg_sub_query.append(f"feature.{event_column} = %(value)s")
pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count
FROM {event_table} AS feature INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND user_id IS NOT NULL;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
# print(cur.mogrify(pg_query, params))
# print("---------------------")
cur.execute(cur.mogrify(pg_query, params))
adoption = cur.fetchone()["count"] / all_user_count
return {"target": all_user_count, "adoption": adoption,
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
pg_sub_query.append("user_id IS NOT NULL")
event_type = "CLICK"
event_value = '/'
extra_values = {}
default = True
for f in filters:
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_type = f["value"]
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
pg_sub_query.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
event_table = JOURNEY_TYPES[event_type]["table"]
event_column = JOURNEY_TYPES[event_type]["column"]
with pg_client.PostgresClient() as cur:
pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
if default:
# get most used value
pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query[:-1])}
AND length({event_column}) > 2
GROUP BY value
ORDER BY count DESC
LIMIT 1;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is not None:
event_value = row["value"]
extra_values["value"] = event_value
if len(event_value) > 2:
pg_sub_query.append(f"length({event_column})>2")
pg_sub_query.append(f"feature.{event_column} = %(value)s")
pg_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count
FROM {event_table} AS feature
INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY 1
ORDER BY 2 DESC
LIMIT 10;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
# print(cur.mogrify(pg_query, params))
# print("---------------------")
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
return {"users": helper.list_to_camel_case(rows),
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
event_type = "CLICK"
event_value = '/'
extra_values = {}
default = True
for f in filters:
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_type = f["value"]
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
event_table = JOURNEY_TYPES[event_type]["table"]
event_column = JOURNEY_TYPES[event_type]["column"]
with pg_client.PostgresClient() as cur:
pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s")
pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s")
pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
if default:
# get most used value
pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
AND length({event_column})>2
GROUP BY value
ORDER BY count DESC
LIMIT 1;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is not None:
event_value = row["value"]
extra_values["value"] = event_value
if len(event_value) > 2:
pg_sub_query.append(f"length({event_column})>2")
pg_sub_query_chart.append(f"feature.{event_column} = %(value)s")
pg_query = f"""SELECT generated_timestamp AS timestamp,
COALESCE(COUNT(session_id), 0) AS count
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT DISTINCT session_id
FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query_chart)}
) AS users ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp;"""
params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
print(cur.mogrify(pg_query, params))
print("---------------------")
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
return {"chart": helper.list_to_camel_case(rows),
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]}
@dev.timed
def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
event_table = JOURNEY_TYPES["CLICK"]["table"]
event_column = JOURNEY_TYPES["CLICK"]["column"]
extra_values = {}
for f in filters:
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_table = JOURNEY_TYPES[f["value"]]["table"]
event_column = JOURNEY_TYPES[f["value"]]["column"]
elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
pg_sub_query.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
pg_sub_query.append(f"length({event_column})>2")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg
FROM {event_table} AS feature INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY value
ORDER BY avg DESC
LIMIT 7;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
# TODO: solve full scan issue
print(cur.mogrify(pg_query, params))
print("---------------------")
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
return rows
@dev.timed
def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[],
**args):
pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True,
chart=True, data=args)
pg_sub_query_chart.append("user_id IS NOT NULL")
period = "DAY"
extra_values = {}
for f in filters:
if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]:
period = f["value"]
elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart
FROM (SELECT generated_timestamp AS timestamp,
COALESCE(COUNT(users), 0) AS count
FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp
LEFT JOIN LATERAL ( SELECT DISTINCT user_id
FROM public.sessions
WHERE {" AND ".join(pg_sub_query_chart)}
) AS users ON (TRUE)
GROUP BY generated_timestamp
ORDER BY generated_timestamp) AS chart;"""
params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK,
"project_id": project_id,
"startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week(
startTimestamp),
"endTimestamp": endTimestamp, **__get_constraint_values(args),
**extra_values}
# print(cur.mogrify(pg_query, params))
# print("---------------------")
cur.execute(cur.mogrify(pg_query, params))
row_users = cur.fetchone()
return row_users
@dev.timed
def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
pg_sub_query.append("user_id IS NOT NULL")
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition
FROM (SELECT number_of_days, COUNT(user_id) AS count
FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days
FROM sessions
WHERE {" AND ".join(pg_sub_query)}
GROUP BY 1) AS users_connexions
GROUP BY number_of_days
ORDER BY number_of_days) AS day_users_partition;"""
params = {"project_id": project_id,
"startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)}
# print(cur.mogrify(pg_query, params))
# print("---------------------")
cur.execute(cur.mogrify(pg_query, params))
row_users = cur.fetchone()
return helper.dict_to_camel_case(row_users)
@dev.timed
def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(),
filters=[], **args):
pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions",
time_constraint=True)
pg_sub_query.append("user_id IS NOT NULL")
pg_sub_query.append("feature.timestamp >= %(startTimestamp)s")
pg_sub_query.append("feature.timestamp < %(endTimestamp)s")
event_type = "PAGES"
event_value = "/"
extra_values = {}
default = True
for f in filters:
if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]):
event_type = f["value"]
elif f["type"] == "EVENT_VALUE":
event_value = f["value"]
default = False
elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
pg_sub_query.append(f"sessions.user_id = %(user_id)s")
extra_values["user_id"] = f["value"]
event_table = JOURNEY_TYPES[event_type]["table"]
event_column = JOURNEY_TYPES[event_type]["column"]
pg_sub_query.append(f"feature.{event_column} = %(value)s")
with pg_client.PostgresClient() as cur:
if default:
# get most used value
pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count
FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query[:-1])}
AND length({event_column}) > 2
GROUP BY value
ORDER BY count DESC
LIMIT 1;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
cur.execute(cur.mogrify(pg_query, params))
row = cur.fetchone()
if row is not None:
event_value = row["value"]
extra_values["value"] = event_value
if len(event_value) > 2:
pg_sub_query.append(f"length({event_column})>2")
pg_query = f"""SELECT user_id, last_time, interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen
FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count
FROM {event_table} AS feature INNER JOIN sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
GROUP BY user_id) AS user_last_usage
INNER JOIN sessions USING (user_id)
WHERE EXTRACT(EPOCH FROM now()) * 1000 - last_time > 7 * 24 * 60 * 60 * 1000
GROUP BY user_id, last_time,interactions_count;"""
params = {"project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values}
# print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
return {
"startTimestamp": startTimestamp,
"filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}],
"list": helper.list_to_camel_case(rows)
}
@dev.timed
def search(text, feature_type, project_id, platform=None):
if not feature_type:
resource_type = "ALL"
data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform)
return data
pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, duration=True,
data={} if platform is None else {"platform": platform})
params = {"startTimestamp": TimeUTC.now() - 2 * TimeUTC.MS_MONTH,
"endTimestamp": TimeUTC.now(),
"project_id": project_id,
"value": helper.string_to_sql_like(text.lower()),
"platform_0": platform}
if feature_type == "ALL":
with pg_client.PostgresClient() as cur:
sub_queries = []
for e in JOURNEY_TYPES:
sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type"
FROM {JOURNEY_TYPES[e]["table"]} INNER JOIN public.sessions USING(session_id)
WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[e]["column"]} ILIKE %(value)s
LIMIT 10)""")
pg_query = "UNION ALL".join(sub_queries)
# print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
elif JOURNEY_TYPES.get(feature_type) is not None:
with pg_client.PostgresClient() as cur:
pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type"
FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id)
WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s
LIMIT 10;"""
# print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
else:
return []
return [helper.dict_to_camel_case(row) for row in rows]

View file

@ -39,7 +39,7 @@ class GithubIntegrationIssue(BaseIntegrationIssue):
for a in assignees:
for u in metas["users"]:
if a == str(u["id"]):
real_assignees.append(u["login"])
real_assignees.append(u["name"])
break
real_labels = ["OpenReplay"]
for l in labels:

View file

@ -132,7 +132,7 @@ def get_scheduled_jobs():
def execute_jobs():
jobs = get_scheduled_jobs()
if len(jobs) == 0:
print('No jobs to execute.')
# No jobs to execute
return
for job in jobs:

View file

@ -1,7 +1,7 @@
from chalicelib.utils import pg_client, helper, dev
import re
from chalicelib.core import projects
import re
from chalicelib.utils import pg_client, dev
MAX_INDEXES = 10
@ -30,6 +30,30 @@ def get(project_id):
return results
def get_batch(project_ids):
if project_ids is None or len(project_ids) == 0:
return []
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""\
SELECT
project_id, {",".join(_get_column_names())}
FROM public.projects
WHERE project_id IN %(project_ids)s
AND deleted_at ISNULL;""", {"project_ids": tuple(project_ids)})
)
full_metas = cur.fetchall()
results = {}
if full_metas is not None and len(full_metas) > 0:
for metas in full_metas:
results[str(metas["project_id"])] = []
for i, k in enumerate(metas.keys()):
if metas[k] is not None and k != "project_id":
results[str(metas["project_id"])].append({"key": metas[k], "index": i + 1})
return results
regex = re.compile(r'^[a-z0-9_-]+$', re.IGNORECASE)
@ -90,7 +114,9 @@ def delete(tenant_id, project_id, index: int):
cur.execute(query=query)
query = cur.mogrify(f"""UPDATE public.sessions
SET {colname}= NULL
WHERE project_id = %(project_id)s""",
WHERE project_id = %(project_id)s
AND {colname} IS NOT NULL
""",
{"project_id": project_id})
cur.execute(query=query)
@ -251,12 +277,13 @@ def add_edit_delete(tenant_id, project_id, new_metas):
def get_remaining_metadata_with_count(tenant_id):
all_projects = projects.get_projects(tenant_id=tenant_id)
results = []
used_metas = get_batch([p["projectId"] for p in all_projects])
for p in all_projects:
used_metas = get(p["projectId"])
if MAX_INDEXES < 0:
remaining = -1
else:
remaining = MAX_INDEXES - len(used_metas)
results.append({**p, "limit": MAX_INDEXES, "remaining": remaining, "count": len(used_metas)})
remaining = MAX_INDEXES - len(used_metas[str(p["projectId"])])
results.append(
{**p, "limit": MAX_INDEXES, "remaining": remaining, "count": len(used_metas[str(p["projectId"])])})
return results

View file

@ -1,13 +1,13 @@
from chalicelib.core import projects
from chalicelib.utils import s3
from chalicelib.utils.helper import environ
from decouple import config
def sign_keys(project_id, session_id, keys):
result = []
project_key = projects.get_project_key(project_id)
for k in keys:
result.append(s3.get_presigned_url_for_sharing(bucket=environ["iosBucket"],
result.append(s3.get_presigned_url_for_sharing(bucket=config("iosBucket"),
key=f"{project_key}/{session_id}/{k}",
expires_in=60 * 60))
return result

View file

@ -0,0 +1,15 @@
import schemas
def get_col(perf: schemas.PerformanceEventType):
return {
schemas.PerformanceEventType.location_dom_complete: {"column": "dom_building_time", "extraJoin": None},
schemas.PerformanceEventType.location_ttfb: {"column": "ttfb", "extraJoin": None},
schemas.PerformanceEventType.location_avg_cpu_load: {"column": "avg_cpu", "extraJoin": "events.performance"},
schemas.PerformanceEventType.location_avg_memory_usage: {"column": "avg_used_js_heap_size",
"extraJoin": "events.performance"},
schemas.PerformanceEventType.fetch_failed: {"column": "success", "extraJoin": None},
# schemas.PerformanceEventType.fetch_duration: {"column": "duration", "extraJoin": None},
schemas.PerformanceEventType.location_largest_contentful_paint_time: {"column": "first_contentful_paint_time",
"extraJoin": None}
}.get(perf)

View file

@ -1,5 +1,6 @@
import json
import schemas
from chalicelib.core import users
from chalicelib.utils import pg_client, helper, dev
from chalicelib.utils.TimeUTC import TimeUTC
@ -41,18 +42,29 @@ def __create(tenant_id, name):
@dev.timed
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False,version=False):
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False, version=False,
last_tracker_version=None):
with pg_client.PostgresClient() as cur:
tracker_query = ""
if last_tracker_version is not None and len(last_tracker_version) > 0:
tracker_query = cur.mogrify(
""",(SELECT tracker_version FROM public.sessions
WHERE sessions.project_id = s.project_id
AND tracker_version=%(version)s AND tracker_version IS NOT NULL LIMIT 1) AS tracker_version""",
{"version": last_tracker_version}).decode('UTF-8')
elif version:
tracker_query = ",(SELECT tracker_version FROM public.sessions WHERE sessions.project_id = s.project_id ORDER BY start_ts DESC LIMIT 1) AS tracker_version"
cur.execute(f"""\
SELECT
s.project_id, s.name, s.project_key
{',s.gdpr' if gdpr else ''}
{',COALESCE((SELECT TRUE FROM public.sessions WHERE sessions.project_id = s.project_id LIMIT 1), FALSE) AS recorded' if recorded else ''}
{',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''}
{',(SELECT tracker_version FROM public.sessions WHERE sessions.project_id = s.project_id ORDER BY start_ts DESC LIMIT 1) AS tracker_version' if version else ''}
{tracker_query}
FROM public.projects AS s
{'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''}
where s.deleted_at IS NULL
WHERE s.deleted_at IS NULL
ORDER BY s.project_id;"""
)
rows = cur.fetchall()
@ -75,8 +87,19 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
return helper.list_to_camel_case(rows)
def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None):
def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None, version=False,
last_tracker_version=None):
with pg_client.PostgresClient() as cur:
tracker_query = ""
if last_tracker_version is not None and len(last_tracker_version) > 0:
tracker_query = cur.mogrify(
""",(SELECT tracker_version FROM public.sessions
WHERE sessions.project_id = s.project_id
AND tracker_version=%(version)s AND tracker_version IS NOT NULL LIMIT 1) AS tracker_version""",
{"version": last_tracker_version}).decode('UTF-8')
elif version:
tracker_query = ",(SELECT tracker_version FROM public.sessions WHERE sessions.project_id = s.project_id ORDER BY start_ts DESC LIMIT 1) AS tracker_version"
query = cur.mogrify(f"""\
SELECT
s.project_id,
@ -84,6 +107,7 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
s.name
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""}
{',s.gdpr' if include_gdpr else ''}
{tracker_query}
FROM public.projects AS s
where s.project_id =%(project_id)s
AND s.deleted_at IS NULL
@ -96,6 +120,7 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=
row = cur.fetchone()
return helper.dict_to_camel_case(row)
def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
@ -117,20 +142,20 @@ def get_project_by_key(tenant_id, project_key, include_last_session=False, inclu
return helper.dict_to_camel_case(row)
def create(tenant_id, user_id, data, skip_authorization=False):
def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False):
if not skip_authorization:
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
return {"data": __create(tenant_id=tenant_id, name=data.get("name", "my first project"))}
return {"data": __create(tenant_id=tenant_id, name=data.name)}
def edit(tenant_id, user_id, project_id, data):
def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
return {"data": __update(tenant_id=tenant_id, project_id=project_id,
changes={"name": data.get("name", "my first project")})}
changes={"name": data.name})}
def delete(tenant_id, user_id, project_id):

View file

@ -1,26 +1,25 @@
import schemas
from chalicelib.core import users
from chalicelib.utils import email_helper, captcha, helper
def reset(data):
def reset(data: schemas.ForgetPasswordPayloadSchema):
print("====================== reset password ===============")
print(data)
if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]):
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
print("error: Invalid captcha.")
return {"errors": ["Invalid captcha."]}
if "email" not in data:
return {"errors": ["email not found in body"]}
if not helper.has_smtp():
return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]}
a_users = users.get_by_email_only(data["email"])
a_users = users.get_by_email_only(data.email)
if len(a_users) > 1:
print(f"multiple users found for [{data['email']}] please contact our support")
print(f"multiple users found for [{data.email}] please contact our support")
return {"errors": ["multiple users, please contact our support"]}
elif len(a_users) == 1:
a_users = a_users[0]
invitation_link = users.generate_new_invitation(user_id=a_users["id"])
email_helper.send_forgot_password(recipient=data["email"], invitation_link=invitation_link)
email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link)
else:
print(f"invalid email address [{data['email']}]")
print(f"invalid email address [{data.email}]")
return {"errors": ["invalid email address"]}
return {"data": {"state": "success"}}

View file

@ -0,0 +1,115 @@
import json
import schemas
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
def create(project_id, user_id, data: schemas.SavedSearchSchema):
with pg_client.PostgresClient() as cur:
data = data.dict()
data["filter"] = json.dumps(data["filter"])
query = cur.mogrify("""\
INSERT INTO public.searches (project_id, user_id, name, filter,is_public)
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
RETURNING *;""", {"user_id": user_id, "project_id": project_id, **data})
cur.execute(
query
)
r = cur.fetchone()
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
return {"data": r}
def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema):
with pg_client.PostgresClient() as cur:
data = data.dict()
data["filter"] = json.dumps(data["filter"])
query = cur.mogrify(f"""\
UPDATE public.searches
SET name = %(name)s,
filter = %(filter)s,
is_public = %(is_public)s
WHERE search_id=%(search_id)s
AND project_id= %(project_id)s
AND (user_id = %(user_id)s OR is_public)
RETURNING *;""", {"search_id": search_id, "project_id": project_id, "user_id": user_id, **data})
cur.execute(
query
)
r = cur.fetchone()
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
r = helper.dict_to_camel_case(r)
# r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
return r
def get_all(project_id, user_id, details=False):
with pg_client.PostgresClient() as cur:
print(cur.mogrify(
f"""\
SELECT search_id, project_id, user_id, name, created_at, deleted_at, is_public
{",filter" if details else ""}
FROM public.searches
WHERE project_id = %(project_id)s
AND deleted_at IS NULL
AND (user_id = %(user_id)s OR is_public);""",
{"project_id": project_id, "user_id": user_id}
))
cur.execute(
cur.mogrify(
f"""\
SELECT search_id, project_id, user_id, name, created_at, deleted_at, is_public
{",filter" if details else ""}
FROM public.searches
WHERE project_id = %(project_id)s
AND deleted_at IS NULL
AND (user_id = %(user_id)s OR is_public);""",
{"project_id": project_id, "user_id": user_id}
)
)
rows = cur.fetchall()
rows = helper.list_to_camel_case(rows)
for row in rows:
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
return rows
def delete(project_id, search_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify("""\
UPDATE public.searches
SET deleted_at = timezone('utc'::text, now())
WHERE project_id = %(project_id)s
AND search_id = %(search_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"search_id": search_id, "project_id": project_id, "user_id": user_id})
)
return {"state": "success"}
def get(search_id, project_id, user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
"""SELECT
*
FROM public.searches
WHERE project_id = %(project_id)s
AND deleted_at IS NULL
AND search_id = %(search_id)s
AND (user_id = %(user_id)s OR is_public);""",
{"search_id": search_id, "project_id": project_id, "user_id": user_id}
)
)
f = helper.dict_to_camel_case(cur.fetchone())
if f is None:
return None
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
return f

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,4 @@
from chalicelib.utils.helper import environ as env
from decouple import config
from chalicelib.utils import helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils import pg_client
@ -32,7 +32,7 @@ def create_new_assignment(tenant_id, project_id, session_id, creator_id, assigne
if i is None:
return {"errors": [f"integration not found"]}
link = env["SITE_URL"] + f"/{project_id}/session/{session_id}"
link = config("SITE_URL") + f"/{project_id}/session/{session_id}"
description += f"\n> {link}"
try:
issue = integration.issue_handler.create_new_assignment(title=title, assignee=assignee, description=description,

View file

@ -1,3 +1,4 @@
import schemas
from chalicelib.utils import pg_client, helper
from chalicelib.utils.event_filter_definition import SupportedFilter
@ -8,40 +9,47 @@ def get_key_values(project_id):
cur.mogrify(
f"""\
SELECT ARRAY_AGG(DISTINCT s.user_os
ORDER BY s.user_os) FILTER ( WHERE s.user_os IS NOT NULL AND s.platform='web') AS {meta_type.USEROS},
ORDER BY s.user_os) FILTER ( WHERE s.user_os IS NOT NULL AND s.platform='web') AS {schemas.FilterType.user_os},
ARRAY_AGG(DISTINCT s.user_browser
ORDER BY s.user_browser)
FILTER ( WHERE s.user_browser IS NOT NULL AND s.platform='web') AS {meta_type.USERBROWSER},
FILTER ( WHERE s.user_browser IS NOT NULL AND s.platform='web') AS {schemas.FilterType.user_browser},
ARRAY_AGG(DISTINCT s.user_device
ORDER BY s.user_device)
FILTER ( WHERE s.user_device IS NOT NULL AND s.user_device != '' AND s.platform='web') AS {meta_type.USERDEVICE},
FILTER ( WHERE s.user_device IS NOT NULL AND s.user_device != '' AND s.platform='web') AS {schemas.FilterType.user_device},
ARRAY_AGG(DISTINCT s.user_country
ORDER BY s.user_country)
FILTER ( WHERE s.user_country IS NOT NULL AND s.platform='web')::text[] AS {meta_type.USERCOUNTRY},
FILTER ( WHERE s.user_country IS NOT NULL AND s.platform='web')::text[] AS {schemas.FilterType.user_country},
ARRAY_AGG(DISTINCT s.user_id
ORDER BY s.user_id) FILTER ( WHERE s.user_id IS NOT NULL AND s.user_id != 'none' AND s.user_id != '' AND s.platform='web') AS {meta_type.USERID},
ORDER BY s.user_id) FILTER ( WHERE s.user_id IS NOT NULL AND s.user_id != 'none' AND s.user_id != '' AND s.platform='web') AS {schemas.FilterType.user_id},
ARRAY_AGG(DISTINCT s.user_anonymous_id
ORDER BY s.user_anonymous_id) FILTER ( WHERE s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != 'none' AND s.user_anonymous_id != '' AND s.platform='web') AS {meta_type.USERANONYMOUSID},
ORDER BY s.user_anonymous_id) FILTER ( WHERE s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != 'none' AND s.user_anonymous_id != '' AND s.platform='web') AS {schemas.FilterType.user_anonymous_id},
ARRAY_AGG(DISTINCT s.rev_id
ORDER BY s.rev_id) FILTER ( WHERE s.rev_id IS NOT NULL AND s.platform='web') AS {meta_type.REVID},
ORDER BY s.rev_id) FILTER ( WHERE s.rev_id IS NOT NULL AND s.platform='web') AS {schemas.FilterType.rev_id},
ARRAY_AGG(DISTINCT p.referrer
ORDER BY p.referrer)
FILTER ( WHERE p.referrer != '' ) AS {meta_type.REFERRER},
FILTER ( WHERE p.referrer != '' ) AS {schemas.FilterType.referrer},
ARRAY_AGG(DISTINCT s.utm_source
ORDER BY s.utm_source) FILTER ( WHERE s.utm_source IS NOT NULL AND s.utm_source != 'none' AND s.utm_source != '') AS {schemas.FilterType.utm_source},
ARRAY_AGG(DISTINCT s.utm_medium
ORDER BY s.utm_medium) FILTER ( WHERE s.utm_medium IS NOT NULL AND s.utm_medium != 'none' AND s.utm_medium != '') AS {schemas.FilterType.utm_medium},
ARRAY_AGG(DISTINCT s.utm_campaign
ORDER BY s.utm_campaign) FILTER ( WHERE s.utm_campaign IS NOT NULL AND s.utm_campaign != 'none' AND s.utm_campaign != '') AS {schemas.FilterType.utm_campaign},
ARRAY_AGG(DISTINCT s.user_os
ORDER BY s.user_os) FILTER ( WHERE s.user_os IS NOT NULL AND s.platform='ios' ) AS {meta_type.USEROS_IOS},
ORDER BY s.user_os) FILTER ( WHERE s.user_os IS NOT NULL AND s.platform='ios' ) AS {schemas.FilterType.user_os_ios},
ARRAY_AGG(DISTINCT s.user_device
ORDER BY s.user_device)
FILTER ( WHERE s.user_device IS NOT NULL AND s.user_device != '' AND s.platform='ios') AS {meta_type.USERDEVICE},
FILTER ( WHERE s.user_device IS NOT NULL AND s.user_device != '' AND s.platform='ios') AS {schemas.FilterType.user_device_ios},
ARRAY_AGG(DISTINCT s.user_country
ORDER BY s.user_country)
FILTER ( WHERE s.user_country IS NOT NULL AND s.platform='ios')::text[] AS {meta_type.USERCOUNTRY_IOS},
FILTER ( WHERE s.user_country IS NOT NULL AND s.platform='ios')::text[] AS {schemas.FilterType.user_country_ios},
ARRAY_AGG(DISTINCT s.user_id
ORDER BY s.user_id) FILTER ( WHERE s.user_id IS NOT NULL AND s.user_id != 'none' AND s.user_id != '' AND s.platform='ios') AS {meta_type.USERID_IOS},
ORDER BY s.user_id) FILTER ( WHERE s.user_id IS NOT NULL AND s.user_id != 'none' AND s.user_id != '' AND s.platform='ios') AS {schemas.FilterType.user_id_ios},
ARRAY_AGG(DISTINCT s.user_anonymous_id
ORDER BY s.user_anonymous_id) FILTER ( WHERE s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != 'none' AND s.user_anonymous_id != '' AND s.platform='ios') AS {meta_type.USERANONYMOUSID_IOS},
ORDER BY s.user_anonymous_id) FILTER ( WHERE s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != 'none' AND s.user_anonymous_id != '' AND s.platform='ios') AS {schemas.FilterType.user_anonymous_id_ios},
ARRAY_AGG(DISTINCT s.rev_id
ORDER BY s.rev_id) FILTER ( WHERE s.rev_id IS NOT NULL AND s.platform='ios') AS {meta_type.REVID_IOS}
ORDER BY s.rev_id) FILTER ( WHERE s.rev_id IS NOT NULL AND s.platform='ios') AS {schemas.FilterType.rev_id_ios}
FROM public.sessions AS s
LEFT JOIN events.pages AS p USING (session_id)
WHERE s.project_id = %(site_id)s;""",
@ -108,119 +116,137 @@ def __generic_autocomplete(typename):
return f
class meta_type:
USEROS = "USEROS"
USERBROWSER = "USERBROWSER"
USERDEVICE = "USERDEVICE"
USERCOUNTRY = "USERCOUNTRY"
USERID = "USERID"
USERANONYMOUSID = "USERANONYMOUSID"
REFERRER = "REFERRER"
REVID = "REVID"
# IOS
USEROS_IOS = "USEROS_IOS"
USERDEVICE_IOS = "USERDEVICE_IOS"
USERCOUNTRY_IOS = "USERCOUNTRY_IOS"
USERID_IOS = "USERID_IOS"
USERANONYMOUSID_IOS = "USERANONYMOUSID_IOS"
REVID_IOS = "REVID_IOS"
SUPPORTED_TYPES = {
meta_type.USEROS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USEROS),
query=__generic_query(typename=meta_type.USEROS),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
meta_type.USERBROWSER: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERBROWSER),
query=__generic_query(typename=meta_type.USERBROWSER),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
meta_type.USERDEVICE: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERDEVICE),
query=__generic_query(typename=meta_type.USERDEVICE),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
meta_type.USERCOUNTRY: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERCOUNTRY),
query=__generic_query(typename=meta_type.USERCOUNTRY),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
meta_type.USERID: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERID),
query=__generic_query(typename=meta_type.USERID),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
meta_type.USERANONYMOUSID: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERANONYMOUSID),
query=__generic_query(typename=meta_type.USERANONYMOUSID),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
meta_type.REVID: SupportedFilter(get=__generic_autocomplete(typename=meta_type.REVID),
query=__generic_query(typename=meta_type.REVID),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
meta_type.REFERRER: SupportedFilter(get=__generic_autocomplete(typename=meta_type.REFERRER),
query=__generic_query(typename=meta_type.REFERRER),
value_limit=5,
starts_with="/",
starts_limit=5,
ignore_if_starts_with=[]),
schemas.FilterType.user_os: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os),
query=__generic_query(typename=schemas.FilterType.user_os),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
schemas.FilterType.user_browser: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_browser),
query=__generic_query(typename=schemas.FilterType.user_browser),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
schemas.FilterType.user_device: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_device),
query=__generic_query(typename=schemas.FilterType.user_device),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
schemas.FilterType.user_country: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country),
query=__generic_query(typename=schemas.FilterType.user_country),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
schemas.FilterType.user_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id),
query=__generic_query(typename=schemas.FilterType.user_id),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
schemas.FilterType.user_anonymous_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
schemas.FilterType.rev_id: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id),
query=__generic_query(typename=schemas.FilterType.rev_id),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
schemas.FilterType.referrer: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.referrer),
query=__generic_query(typename=schemas.FilterType.referrer),
value_limit=5,
starts_with="/",
starts_limit=5,
ignore_if_starts_with=[]),
schemas.FilterType.utm_campaign: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_campaign),
query=__generic_query(typename=schemas.FilterType.utm_campaign),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
schemas.FilterType.utm_medium: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_medium),
query=__generic_query(typename=schemas.FilterType.utm_medium),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
schemas.FilterType.utm_source: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.utm_source),
query=__generic_query(typename=schemas.FilterType.utm_source),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
# IOS
meta_type.USEROS_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USEROS_IOS),
query=__generic_query(typename=meta_type.USEROS_IOS),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
meta_type.USERDEVICE_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERDEVICE_IOS),
query=__generic_query(typename=meta_type.USERDEVICE_IOS),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
meta_type.USERCOUNTRY_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERCOUNTRY_IOS),
query=__generic_query(typename=meta_type.USERCOUNTRY_IOS),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
meta_type.USERID_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERID_IOS),
query=__generic_query(typename=meta_type.USERID_IOS),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
meta_type.USERANONYMOUSID_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERANONYMOUSID_IOS),
query=__generic_query(typename=meta_type.USERANONYMOUSID_IOS),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
meta_type.REVID_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.REVID_IOS),
query=__generic_query(typename=meta_type.REVID_IOS),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
schemas.FilterType.user_os_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_os_ios),
query=__generic_query(typename=schemas.FilterType.user_os_ios),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
schemas.FilterType.user_device_ios: SupportedFilter(
get=__generic_autocomplete(
typename=schemas.FilterType.user_device_ios),
query=__generic_query(typename=schemas.FilterType.user_device_ios),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
schemas.FilterType.user_country_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_country_ios),
query=__generic_query(typename=schemas.FilterType.user_country_ios),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
schemas.FilterType.user_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_id_ios),
query=__generic_query(typename=schemas.FilterType.user_id_ios),
value_limit=2,
starts_with="",
starts_limit=2,
ignore_if_starts_with=["/"]),
schemas.FilterType.user_anonymous_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.user_anonymous_id_ios),
query=__generic_query(typename=schemas.FilterType.user_anonymous_id_ios),
value_limit=3,
starts_with="",
starts_limit=3,
ignore_if_starts_with=["/"]),
schemas.FilterType.rev_id_ios: SupportedFilter(
get=__generic_autocomplete(typename=schemas.FilterType.rev_id_ios),
query=__generic_query(typename=schemas.FilterType.rev_id_ios),
value_limit=0,
starts_with="",
starts_limit=0,
ignore_if_starts_with=["/"]),
}
def search(text, meta_type, project_id):
rows = []
if meta_type.upper() not in list(SUPPORTED_TYPES.keys()):
if meta_type not in list(SUPPORTED_TYPES.keys()):
return {"errors": ["unsupported type"]}
rows += SUPPORTED_TYPES[meta_type.upper()].get(project_id=project_id, text=text)
if meta_type.upper() + "_IOS" in list(SUPPORTED_TYPES.keys()):
rows += SUPPORTED_TYPES[meta_type.upper() + "_IOS"].get(project_id=project_id, text=text)
rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text)
if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
return {"data": rows}

View file

@ -1,14 +1,15 @@
from chalicelib.utils.helper import environ
from chalicelib.utils.s3 import client
from decouple import config
from chalicelib.utils import s3
from chalicelib.utils.s3 import client
def get_web(sessionId):
return client.generate_presigned_url(
'get_object',
Params={
'Bucket': environ["sessions_bucket"],
'Key': sessionId
'Bucket': config("sessions_bucket"),
'Key': str(sessionId)
},
ExpiresIn=100000
)
@ -18,8 +19,8 @@ def get_ios(sessionId):
return client.generate_presigned_url(
'get_object',
Params={
'Bucket': environ["ios_bucket"],
'Key': sessionId
'Bucket': config("ios_bucket"),
'Key': str(sessionId)
},
ExpiresIn=100000
)
@ -27,4 +28,4 @@ def get_ios(sessionId):
def delete_mobs(session_ids):
for session_id in session_ids:
s3.schedule_for_deletion(environ["sessions_bucket"], session_id)
s3.schedule_for_deletion(config("sessions_bucket"), session_id)

View file

@ -1,6 +1,7 @@
__author__ = "AZNAUROV David"
__maintainer__ = "KRAIEM Taha Yassine"
import schemas
from chalicelib.core import events, sessions_metas, metadata, sessions
from chalicelib.utils import dev
@ -30,87 +31,107 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
:param filter_d: dict contains events&filters&...
:return:
"""
stages = filter_d["events"]
filters = filter_d.get("filters", [])
stages: [dict] = filter_d["events"]
filters: [dict] = filter_d.get("filters", [])
filter_issues = filter_d.get("issueTypes")
if filter_issues is None or len(filter_issues) == 0:
filter_issues = []
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s",
"s.start_ts <= %(endTimestamp)s"]
extra_from = ""
filter_extra_from = []
n_stages_query = []
values = {}
if len(filters) > 0:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
meta_keys = None
for i, f in enumerate(filters):
if not isinstance(f.get("value"), list):
if isinstance(f.get("value"), tuple):
f["value"] = list(f.get("value"))
else:
f["value"] = [f.get("value")]
if len(f["value"]) == 0 or f["value"][0] is None:
if not isinstance(f["value"], list):
f.value = [f["value"]]
if len(f["value"]) == 0 or f["value"] is None:
continue
filter_type = f["type"].upper()
values[f"f_value_{i}"] = sessions.__get_sql_value_multiple(f["value"])
if filter_type == sessions_metas.meta_type.USERBROWSER:
op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(f's.user_browser {op} %({f"f_value_{i}"})s')
f["value"] = helper.values_for_operator(value=f["value"], op=f["operator"])
# filter_args = _multiple_values(f["value"])
op = sessions.__get_sql_operator(f["operator"])
elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]:
op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(f's.user_os {op} %({f"f_value_{i}"})s')
filter_type = f["type"]
# values[f_k] = sessions.__get_sql_value_multiple(f["value"])
f_k = f"f_value{i}"
values = {**values,
**sessions._multiple_values(helper.values_for_operator(value=f["value"], op=f["operator"]),
value_key=f_k)}
if filter_type == schemas.FilterType.user_browser:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_browser {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]:
op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(f's.user_device {op} %({f"f_value_{i}"})s')
elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_os {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]:
op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(f's.user_country {op} %({f"f_value_{i}"})s')
elif filter_type == "duration".upper():
elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_device {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]:
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_country {op} %({f_k})s', f["value"], value_key=f_k))
elif filter_type == schemas.FilterType.duration:
if len(f["value"]) > 0 and f["value"][0] is not None:
first_stage_extra_constraints.append(f's.duration >= %({f"f_value_{i}"})s')
values[f"f_value_{i}"] = f["value"][0]
if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0:
first_stage_extra_constraints.append('s.duration <= %({f"f_value_{i}"})s')
values[f"f_value_{i}"] = f["value"][1]
elif filter_type == sessions_metas.meta_type.REFERRER:
first_stage_extra_constraints.append(f's.duration >= %(minDuration)s')
values["minDuration"] = f["value"][0]
if len(f["value"]) > 1 and f["value"][1] is not None and int(f["value"][1]) > 0:
first_stage_extra_constraints.append('s.duration <= %(maxDuration)s')
values["maxDuration"] = f["value"][1]
elif filter_type == schemas.FilterType.referrer:
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(f"p.base_referrer {op} %(referrer)s")
filter_extra_from = [f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"]
# op = sessions.__get_sql_operator_multiple(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f"p.base_referrer {op} %({f_k})s", f["value"], value_key=f_k))
elif filter_type == events.event_type.METADATA.ui_type:
op = sessions.__get_sql_operator(f["operator"])
if meta_keys is None:
meta_keys = metadata.get(project_id=project_id)
meta_keys = {m["key"]: m["index"] for m in meta_keys}
# op = sessions.__get_sql_operator(f["operator"])
if f.get("key") in meta_keys.keys():
first_stage_extra_constraints.append(
f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f"f_value_{i}"})s')
values[f"f_value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(f's.user_id {op} %({f"f_value_{i}"})s')
values[f"f_value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID,
sessions_metas.meta_type.USERANONYMOUSID_IOS]:
op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(f's.user_anonymous_id {op} %({f"f_value_{i}"})s')
values[f"f_value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]:
op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(f's.rev_id {op} %({f"f_value_{i}"})s')
values[f"f_value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
sessions._multiple_conditions(
f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f_k})s', f["value"],
value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.user_anonymous_id,
schemas.FilterType.user_anonymous_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.user_anonymous_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]:
# op = sessions.__get_sql_operator(f["operator"])
first_stage_extra_constraints.append(
sessions._multiple_conditions(f's.rev_id {op} %({f_k})s', f["value"], value_key=f_k))
# values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op)
for i, s in enumerate(stages):
if i == 0:
extra_from = ["INNER JOIN public.sessions AS s USING (session_id)"]
extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"]
else:
extra_from = []
if s.get("operator") is None:
s["operator"] = "is"
if not isinstance(s["value"], list):
s["value"] = [s["value"]]
is_any = sessions._isAny_opreator(s["operator"])
op = sessions.__get_sql_operator(s["operator"])
event_type = s["type"].upper()
next_label = s["value"]
if event_type == events.event_type.CLICK.ui_type:
next_table = events.event_type.CLICK.table
next_col_name = events.event_type.CLICK.column
@ -140,7 +161,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
print("=================UNDEFINED")
continue
values[f"value{i + 1}"] = helper.string_to_sql_like_with_op(next_label, op)
values = {**values, **sessions._multiple_values(helper.values_for_operator(value=s["value"], op=s["operator"]),
value_key=f"value{i + 1}")}
if sessions.__is_negation_operator(op) and i > 0:
op = sessions.__reverse_sql_operator(op)
main_condition = "left_not.session_id ISNULL"
@ -150,7 +172,11 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
AND s_main.timestamp >= T{i}.stage{i}_timestamp
AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""")
else:
main_condition = f"""main.{next_col_name} {op} %(value{i + 1})s"""
if is_any:
main_condition = "TRUE"
else:
main_condition = sessions._multiple_conditions(f"main.{next_col_name} {op} %(value{i + 1})s",
values=s["value"], value_key=f"value{i + 1}")
n_stages_query.append(f"""
(SELECT main.session_id,
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp,
@ -197,9 +223,9 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"],
"issueTypes": tuple(filter_issues), **values}
with pg_client.PostgresClient() as cur:
# print("---------------------------------------------------")
# print(cur.mogrify(n_stages_query, params))
# print("---------------------------------------------------")
print("---------------------------------------------------")
print(cur.mogrify(n_stages_query, params))
print("---------------------------------------------------")
cur.execute(cur.mogrify(n_stages_query, params))
rows = cur.fetchall()
return rows
@ -535,7 +561,8 @@ def get_top_insights(filter_d, project_id):
"dropDueToIssues": 0
}]
counts = sessions.search2_pg(data=filter_d, project_id=project_id, user_id=None, count_only=True)
counts = sessions.search2_pg(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), project_id=project_id,
user_id=None, count_only=True)
output[0]["sessionsCount"] = counts["countSessions"]
output[0]["usersCount"] = counts["countUsers"]
return output, 0

View file

@ -1,21 +1,24 @@
from chalicelib.utils import helper
from chalicelib.utils import pg_client
import json
from decouple import config
import schemas
from chalicelib.core import users, telemetry, tenants
from chalicelib.utils import captcha
import json
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import environ
def create_step1(data):
def create_step1(data: schemas.UserSignupSchema):
print(f"===================== SIGNUP STEP 1 AT {TimeUTC.to_human_readable(TimeUTC.now())} UTC")
errors = []
if tenants.tenants_exists():
return {"errors": ["tenants already registered"]}
email = data.get("email")
email = data.email
print(f"=====================> {email}")
password = data.get("password")
password = data.password
print("Verifying email validity")
if email is None or len(email) < 5 or not helper.is_valid_email(email):
@ -28,25 +31,25 @@ def create_step1(data):
errors.append("Email address previously deleted.")
print("Verifying captcha")
if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]):
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
errors.append("Invalid captcha.")
print("Verifying password validity")
if len(data["password"]) < 6:
if len(password) < 6:
errors.append("Password is too short, it must be at least 6 characters long.")
print("Verifying fullname validity")
fullname = data.get("fullname")
fullname = data.fullname
if fullname is None or len(fullname) < 1 or not helper.is_alphabet_space_dash(fullname):
errors.append("Invalid full name.")
print("Verifying company's name validity")
company_name = data.get("organizationName")
company_name = data.organizationName
if company_name is None or len(company_name) < 1 or not helper.is_alphanumeric_space(company_name):
errors.append("invalid organization's name")
print("Verifying project's name validity")
project_name = data.get("projectName")
project_name = data.projectName
if project_name is None or len(project_name) < 1:
project_name = "my first project"
@ -61,7 +64,7 @@ def create_step1(data):
"projectName": project_name,
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}),
"organizationName": company_name,
"versionNumber": environ["version_number"]
"versionNumber": config("version_number")
}
query = f"""\
WITH t AS (

View file

@ -1,5 +1,5 @@
from datetime import datetime
from chalicelib.utils.helper import environ
from decouple import config
from chalicelib.core.collaboration_slack import Slack
@ -10,7 +10,7 @@ def send(notification, destination):
return Slack.send_text(tenant_id=notification["tenantId"],
webhook_id=destination,
text=notification["description"] \
+ f"\n<{environ['SITE_URL']}{notification['buttonUrl']}|{notification['buttonText']}>",
+ f"\n<{config('SITE_URL')}{notification['buttonUrl']}|{notification['buttonText']}>",
title=notification["title"],
title_link=notification["buttonUrl"], )
@ -23,7 +23,7 @@ def send_batch(notifications_list):
if n.get("destination") not in webhookId_map:
webhookId_map[n.get("destination")] = {"tenantId": n["notification"]["tenantId"], "batch": []}
webhookId_map[n.get("destination")]["batch"].append({"text": n["notification"]["description"] \
+ f"\n<{environ['SITE_URL']}{n['notification']['buttonUrl']}|{n['notification']['buttonText']}>",
+ f"\n<{config('SITE_URL')}{n['notification']['buttonUrl']}|{n['notification']['buttonText']}>",
"title": n["notification"]["title"],
"title_link": n["notification"]["buttonUrl"],
"ts": datetime.now().timestamp()})

View file

@ -1,10 +1,10 @@
import requests
from chalicelib.utils.helper import environ
from decouple import config
from chalicelib.core import projects
def start_replay(project_id, session_id, device, os_version, mob_url):
r = requests.post(environ["IOS_MIDDLEWARE"] + "/replay", json={
r = requests.post(config("IOS_MIDDLEWARE") + "/replay", json={
"projectId": project_id,
"projectKey": projects.get_project_key(project_id),
"sessionId": session_id,
@ -18,5 +18,5 @@ def start_replay(project_id, session_id, device, os_version, mob_url):
print(r.text)
return r.text
result = r.json()
result["url"] = environ["IOS_MIDDLEWARE"]
result["url"] = config("IOS_MIDDLEWARE")
return result

View file

@ -1,4 +1,4 @@
from chalicelib.utils.helper import environ
from decouple import config
from chalicelib.utils import helper
from chalicelib.utils import s3
@ -17,7 +17,7 @@ def __get_key(project_id, url):
def presign_share_urls(project_id, urls):
results = []
for u in urls:
results.append(s3.get_presigned_url_for_sharing(bucket=environ['sourcemaps_bucket'], expires_in=120,
results.append(s3.get_presigned_url_for_sharing(bucket=config('sourcemaps_bucket'), expires_in=120,
key=__get_key(project_id, u),
check_exists=True))
return results
@ -26,7 +26,7 @@ def presign_share_urls(project_id, urls):
def presign_upload_urls(project_id, urls):
results = []
for u in urls:
results.append(s3.get_presigned_url_for_upload(bucket=environ['sourcemaps_bucket'],
results.append(s3.get_presigned_url_for_upload(bucket=config('sourcemaps_bucket'),
expires_in=1800,
key=__get_key(project_id, u)))
return results
@ -87,7 +87,7 @@ def get_traces_group(project_id, payload):
print(key)
print("===============================")
if key not in payloads:
file_exists = s3.exists(environ['sourcemaps_bucket'], key)
file_exists = s3.exists(config('sourcemaps_bucket'), key)
all_exists = all_exists and file_exists
if not file_exists:
print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3")
@ -130,10 +130,10 @@ def fetch_missed_contexts(frames):
if frames[i]["frame"]["absPath"] in source_cache:
file = source_cache[frames[i]["frame"]["absPath"]]
else:
file = s3.get_file(environ['js_cache_bucket'], get_js_cache_path(frames[i]["frame"]["absPath"]))
file = s3.get_file(config('js_cache_bucket'), get_js_cache_path(frames[i]["frame"]["absPath"]))
if file is None:
print(
f"File {get_js_cache_path(frames[i]['frame']['absPath'])} not found in {environ['js_cache_bucket']}")
f"File {get_js_cache_path(frames[i]['frame']['absPath'])} not found in {config('js_cache_bucket')}")
source_cache[frames[i]["frame"]["absPath"]] = file
if file is None:
continue

View file

@ -1,6 +1,6 @@
import requests
from chalicelib.utils.helper import environ
from decouple import config
def get_original_trace(key, positions):
@ -8,13 +8,13 @@ def get_original_trace(key, positions):
"key": key,
"positions": positions,
"padding": 5,
"bucket": environ['sourcemaps_bucket'],
"S3_HOST": environ['S3_HOST'],
"S3_KEY": environ['S3_KEY'],
"S3_SECRET": environ['S3_SECRET'],
"region": environ['sessions_region']
"bucket": config('sourcemaps_bucket'),
"S3_HOST": config('S3_HOST'),
"S3_KEY": config('S3_KEY'),
"S3_SECRET": config('S3_SECRET'),
"region": config('sessions_region')
}
r = requests.post(environ["sourcemaps_reader"], json=payload)
r = requests.post(config("sourcemaps_reader"), json=payload)
if r.status_code != 200:
return {}

View file

@ -1,3 +1,4 @@
import schemas
from chalicelib.utils import pg_client
from chalicelib.utils import helper
from chalicelib.core import users
@ -62,18 +63,18 @@ def edit_client(tenant_id, changes):
return helper.dict_to_camel_case(cur.fetchone())
def update(tenant_id, user_id, data):
def update(tenant_id, user_id, data: schemas.UpdateTenantSchema):
admin = users.get(user_id=user_id, tenant_id=tenant_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"error": "unauthorized"}
if "name" not in data and "optOut" not in data:
if data.name is None and data.opt_out is None:
return {"errors": ["please provide 'name' of 'optOut' attribute for update"]}
changes = {}
if "name" in data:
changes["name"] = data["name"]
if "optOut" in data:
changes["optOut"] = data["optOut"]
if data.name is not None and len(data.name) > 0:
changes["name"] = data.name
if data.opt_out is not None:
changes["optOut"] = data.opt_out
return edit_client(tenant_id=tenant_id, changes=changes)

View file

@ -1,16 +1,15 @@
import json
import secrets
from chalicelib.core import authorizers, metadata, projects, assist
from chalicelib.core import tenants
from chalicelib.utils import dev
from decouple import config
from fastapi import BackgroundTasks
from chalicelib.core import authorizers, metadata, projects
from chalicelib.core import tenants, assist
from chalicelib.utils import dev, email_helper
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import environ
from chalicelib.core import tenants, assist
import secrets
def __generate_invitation_token():
@ -182,7 +181,7 @@ def update(tenant_id, user_id, changes):
return helper.dict_to_camel_case(cur.fetchone())
def create_member(tenant_id, user_id, data):
def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks):
admin = get(tenant_id=tenant_id, user_id=user_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
@ -205,18 +204,25 @@ def create_member(tenant_id, user_id, data):
new_member = create_new_member(email=data["email"], invitation_token=invitation_token,
admin=data.get("admin", False), name=name)
new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken"))
helper.async_post(environ['email_basic'] % 'member_invitation',
{
"email": data["email"],
"invitationLink": new_member["invitationLink"],
"clientId": tenants.get_by_tenant_id(tenant_id)["name"],
"senderName": admin["name"]
})
# helper.async_post(config('email_basic') % 'member_invitation',
# {
# "email": data["email"],
# "invitationLink": new_member["invitationLink"],
# "clientId": tenants.get_by_tenant_id(tenant_id)["name"],
# "senderName": admin["name"]
# })
background_tasks.add_task(email_helper.send_team_invitation, **{
"recipient": data["email"],
"invitation_link": new_member["invitationLink"],
"client_id": tenants.get_by_tenant_id(tenant_id)["name"],
"sender_name": admin["name"]
})
return {"data": new_member}
def __get_invitation_link(invitation_token):
return environ["SITE_URL"] + environ["invitation_link"] % invitation_token
return config("SITE_URL") + config("invitation_link") % invitation_token
def allow_password_change(user_id, delta_min=10):
@ -282,12 +288,15 @@ def edit(user_id_to_update, tenant_id, changes, editor_id):
admin = get(tenant_id=tenant_id, user_id=editor_id)
if not admin["superAdmin"] and not admin["admin"]:
return {"errors": ["unauthorized"]}
if user["superAdmin"]:
changes.pop("admin")
if editor_id == user_id_to_update:
if user["superAdmin"]:
changes.pop("admin")
elif user["admin"] != changes["admin"]:
return {"errors": ["cannot change your own role"]}
keys = list(changes.keys())
for k in keys:
if k not in ALLOW_EDIT:
if k not in ALLOW_EDIT or changes[k] is None:
changes.pop(k)
keys = list(changes.keys())
@ -441,7 +450,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password):
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True)
c["smtp"] = helper.has_smtp()
c["iceServers"]= assist.get_ice_servers()
c["iceServers"] = assist.get_ice_servers()
return {
'jwt': r.pop('jwt'),
'data': {
@ -469,7 +478,7 @@ def set_password_invitation(user_id, new_password):
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True)
c["smtp"] = helper.has_smtp()
c["iceServers"]= assist.get_ice_servers()
c["iceServers"] = assist.get_ice_servers()
return {
'jwt': r.pop('jwt'),
'data': {

View file

@ -1,6 +1,9 @@
import logging
import requests
from chalicelib.utils import pg_client, helper
from chalicelib.utils.TimeUTC import TimeUTC
import requests
def get_by_id(webhook_id):
@ -114,7 +117,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
def add_edit(tenant_id, data, replace_none=None):
if "webhookId" in data:
if data.get("webhookId") is not None:
return update(tenant_id=tenant_id, webhook_id=data["webhookId"],
changes={"endpoint": data["endpoint"],
"authHeader": None if "authHeader" not in data else data["authHeader"],
@ -144,28 +147,24 @@ def trigger_batch(data_list):
for w in data_list:
if w["destination"] not in webhooks_map:
webhooks_map[w["destination"]] = get_by_id(webhook_id=w["destination"])
__trigger(hook=webhooks_map[w["destination"]], data=w["data"])
if webhooks_map[w["destination"]] is None:
logging.error(f"!!Error webhook not found: webhook_id={w['destination']}")
else:
__trigger(hook=webhooks_map[w["destination"]], data=w["data"])
def __trigger(hook, data):
if hook["type"] == 'webhook':
if hook is not None and hook["type"] == 'webhook':
headers = {}
if hook["authHeader"] is not None and len(hook["authHeader"]) > 0:
headers = {"Authorization": hook["authHeader"]}
# body = {
# "webhookId": hook["id"],
# "createdAt": TimeUTC.now(),
# "event": event,
# "data": data
# }
r = requests.post(url=hook["endpoint"], json=data, headers=headers)
if r.status_code != 200:
print("=======> webhook: something went wrong")
print(r)
print(r.status_code)
print(r.text)
logging.error("=======> webhook: something went wrong")
logging.error(r)
logging.error(r.status_code)
logging.error(r.text)
return
response = None
try:
@ -174,5 +173,5 @@ def __trigger(hook, data):
try:
response = r.text
except:
print("no response found")
logging.info("no response found")
return response

View file

@ -1,6 +1,5 @@
from chalicelib.utils import pg_client, helper
from chalicelib.utils import pg_client, helper, email_helper
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import environ
from chalicelib.utils.helper import get_issue_title
LOWEST_BAR_VALUE = 3
@ -30,7 +29,7 @@ def edit_config(user_id, weekly_report):
def cron():
with pg_client.PostgresClient() as cur:
with pg_client.PostgresClient(long_query=True) as cur:
params = {"3_days_ago": TimeUTC.midnight(delta_days=-3),
"1_week_ago": TimeUTC.midnight(delta_days=-7),
"2_week_ago": TimeUTC.midnight(delta_days=-14),
@ -83,6 +82,7 @@ def cron():
) AS month_1_issues ON (TRUE)
WHERE projects.deleted_at ISNULL;"""), params)
projects_data = cur.fetchall()
emails_to_send = []
for p in projects_data:
params["project_id"] = p["project_id"]
print(f"checking {p['project_name']} : {p['project_id']}")
@ -227,13 +227,14 @@ def cron():
if j["type"] in keep_types:
keep.append(j)
i["partition"] = keep
helper.async_post(environ['email_funnel'] % "weekly_report2",
{"email": p.pop("emails"),
"data": {
**p,
"days_partition": days_partition,
"issues_by_type": issues_by_type,
"issues_breakdown_by_day": issues_breakdown_by_day,
"issues_breakdown_list": issues_breakdown_list
}
})
emails_to_send.append({"email": p.pop("emails"),
"data": {
**p,
"days_partition": days_partition,
"issues_by_type": issues_by_type,
"issues_breakdown_by_day": issues_breakdown_by_day,
"issues_breakdown_list": issues_breakdown_list
}})
print(f">>> Sending weekly report to {len(emails_to_send)} email-group")
for e in emails_to_send:
email_helper.weekly_report2(recipients=e["email"], data=e["data"])

View file

@ -1,6 +1,9 @@
from datetime import datetime, timedelta
from calendar import monthrange
import pytz
from datetime import datetime, timedelta
import zoneinfo
UTC_ZI = zoneinfo.ZoneInfo("UTC")
class TimeUTC:
@ -9,20 +12,20 @@ class TimeUTC:
MS_DAY = MS_HOUR * 24
MS_WEEK = MS_DAY * 7
MS_MONTH = MS_DAY * 30
MS_MONTH_TRUE = monthrange(datetime.now(pytz.utc).astimezone(pytz.utc).year,
datetime.now(pytz.utc).astimezone(pytz.utc).month)[1] * MS_DAY
MS_MONTH_TRUE = monthrange(datetime.now(UTC_ZI).astimezone(UTC_ZI).year,
datetime.now(UTC_ZI).astimezone(UTC_ZI).month)[1] * MS_DAY
RANGE_VALUE = None
@staticmethod
def midnight(delta_days=0):
return int((datetime.now(pytz.utc) + timedelta(delta_days)) \
return int((datetime.now(UTC_ZI) + timedelta(delta_days)) \
.replace(hour=0, minute=0, second=0, microsecond=0) \
.astimezone(pytz.utc).timestamp() * 1000)
.astimezone(UTC_ZI).timestamp() * 1000)
@staticmethod
def __now(delta_days=0, delta_minutes=0, delta_seconds=0):
return (datetime.now(pytz.utc) + timedelta(days=delta_days, minutes=delta_minutes, seconds=delta_seconds)) \
.astimezone(pytz.utc)
return (datetime.now(UTC_ZI) + timedelta(days=delta_days, minutes=delta_minutes, seconds=delta_seconds)) \
.astimezone(UTC_ZI)
@staticmethod
def now(delta_days=0, delta_minutes=0, delta_seconds=0):
@ -32,28 +35,28 @@ class TimeUTC:
@staticmethod
def month_start(delta_month=0):
month = TimeUTC.__now().month + delta_month
return int(datetime.now(pytz.utc) \
return int(datetime.now(UTC_ZI) \
.replace(year=TimeUTC.__now().year + ((-12 + month) // 12 if month % 12 <= 0 else month // 12),
month=12 + month % 12 if month % 12 <= 0 else month % 12 if month > 12 else month,
day=1,
hour=0, minute=0,
second=0,
microsecond=0) \
.astimezone(pytz.utc).timestamp() * 1000)
.astimezone(UTC_ZI).timestamp() * 1000)
@staticmethod
def year_start(delta_year=0):
return int(datetime.now(pytz.utc) \
return int(datetime.now(UTC_ZI) \
.replace(year=TimeUTC.__now().year + delta_year, month=1, day=1, hour=0, minute=0, second=0,
microsecond=0) \
.astimezone(pytz.utc).timestamp() * 1000)
.astimezone(UTC_ZI).timestamp() * 1000)
@staticmethod
def custom(year=None, month=None, day=None, hour=None, minute=None):
args = locals()
return int(datetime.now(pytz.utc) \
return int(datetime.now(UTC_ZI) \
.replace(**{key: args[key] for key in args if args[key] is not None}, second=0, microsecond=0) \
.astimezone(pytz.utc).timestamp() * 1000)
.astimezone(UTC_ZI).timestamp() * 1000)
@staticmethod
def future(delta_day, delta_hour, delta_minute, minutes_period=None, start=None):
@ -78,7 +81,7 @@ class TimeUTC:
@staticmethod
def from_ms_timestamp(ts):
return datetime.fromtimestamp(ts // 1000, pytz.utc)
return datetime.fromtimestamp(ts // 1000, UTC_ZI)
@staticmethod
def to_human_readable(ts, fmt='%Y-%m-%d %H:%M:%S UTC'):
@ -113,14 +116,14 @@ class TimeUTC:
@staticmethod
def get_utc_offset():
return int((datetime.now(pytz.utc).now() - datetime.now(pytz.utc).replace(tzinfo=None)).total_seconds() * 1000)
return int((datetime.now(UTC_ZI).now() - datetime.now(UTC_ZI).replace(tzinfo=None)).total_seconds() * 1000)
@staticmethod
def trunc_day(timestamp):
dt = TimeUTC.from_ms_timestamp(timestamp)
return TimeUTC.datetime_to_timestamp(dt
.replace(hour=0, minute=0, second=0, microsecond=0)
.astimezone(pytz.utc))
.astimezone(UTC_ZI))
@staticmethod
def trunc_week(timestamp):
@ -128,4 +131,4 @@ class TimeUTC:
start = dt - timedelta(days=dt.weekday())
return TimeUTC.datetime_to_timestamp(start
.replace(hour=0, minute=0, second=0, microsecond=0)
.astimezone(pytz.utc))
.astimezone(UTC_ZI))

View file

@ -1,10 +1,10 @@
from chalicelib.utils.helper import environ as env
from decouple import config
import requests
from chalicelib.utils import helper
def __get_captcha_config():
return env["captcha_server"], env["captcha_key"]
return config("captcha_server"), config("captcha_key")
def is_valid(response):

View file

@ -6,7 +6,7 @@ from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from chalicelib.utils import helper, smtp
from chalicelib.utils.helper import environ
from decouple import config
def __get_subject(subject):
@ -16,7 +16,7 @@ def __get_subject(subject):
def __get_html_from_file(source, formatting_variables):
if formatting_variables is None:
formatting_variables = {}
formatting_variables["frontend_url"] = environ["SITE_URL"]
formatting_variables["frontend_url"] = config("SITE_URL")
with open(source, "r") as body:
BODY_HTML = body.read()
if formatting_variables is not None and len(formatting_variables.keys()) > 0:
@ -50,7 +50,7 @@ def send_html(BODY_HTML, SUBJECT, recipient, bcc=None):
recipient = [recipient]
msg = MIMEMultipart()
msg['Subject'] = Header(__get_subject(SUBJECT), 'utf-8')
msg['From'] = environ["EMAIL_FROM"]
msg['From'] = config("EMAIL_FROM")
msg['To'] = ""
body = MIMEText(BODY_HTML.encode('utf-8'), 'html', "utf-8")
msg.attach(body)
@ -75,7 +75,7 @@ def send_text(recipients, text, subject):
with smtp.SMTPClient() as s:
msg = MIMEMultipart()
msg['Subject'] = Header(__get_subject(subject), 'utf-8')
msg['From'] = environ["EMAIL_FROM"]
msg['From'] = config("EMAIL_FROM")
msg['To'] = ", ".join(recipients)
body = MIMEText(text)
msg.attach(body)

View file

@ -1,22 +1,23 @@
import random
import re
import string
from typing import Union
import math
import requests
local_prefix = 'local-'
from os import environ, path
import schemas
import json
local_prefix = 'local-'
from decouple import config
def get_version_number():
return environ["version"]
return config("version")
def get_stage_name():
stage = environ["stage"]
stage = config("stage")
return stage[len(local_prefix):] if stage.startswith(local_prefix) else stage
@ -33,7 +34,7 @@ def is_onprem():
def is_local():
return environ["stage"].startswith(local_prefix)
return config("stage").startswith(local_prefix)
def generate_salt():
@ -135,16 +136,16 @@ def __sbool_to_bool(value):
def allow_captcha():
return environ.get("captcha_server") is not None and environ.get("captcha_key") is not None \
and len(environ["captcha_server"]) > 0 and len(environ["captcha_key"]) > 0
return config("captcha_server", default=None) is not None and config("captcha_key", default=None) is not None \
and len(config("captcha_server")) > 0 and len(config("captcha_key")) > 0
def allow_sentry():
return environ.get("sentryURL") is not None and len(environ["sentryURL"]) > 0
return config("sentryURL", default=None) is not None and len(config("sentryURL")) > 0
def async_post(endpoint, data):
data["auth"] = environ["async_Token"]
data["auth"] = config("async_Token")
try:
requests.post(endpoint, timeout=1, json=data)
except requests.exceptions.ReadTimeout:
@ -168,25 +169,56 @@ def string_to_sql_like(value):
def string_to_sql_like_with_op(value, op):
if isinstance(value, list) and len(value) > 0:
_value = value[0]
if isinstance(value, list):
r = []
for v in value:
r.append(string_to_sql_like_with_op(v, op))
return r
else:
_value = value
if _value is None:
return _value
if op.lower() != 'ilike':
return _value.replace("%", "%%")
_value = _value.replace("*", "%")
if _value.startswith("^"):
_value = _value[1:]
elif not _value.startswith("%"):
_value = '%' + _value
if _value is None:
return _value
if op.upper() != 'ILIKE':
return _value.replace("%", "%%")
_value = _value.replace("*", "%")
if _value.startswith("^"):
_value = _value[1:]
elif not _value.startswith("%"):
_value = '%' + _value
if _value.endswith("$"):
_value = _value[:-1]
elif not _value.endswith("%"):
_value = _value + '%'
return _value.replace("%", "%%")
if _value.endswith("$"):
_value = _value[:-1]
elif not _value.endswith("%"):
_value = _value + '%'
return _value.replace("%", "%%")
likable_operators = [schemas.SearchEventOperator._starts_with, schemas.SearchEventOperator._ends_with,
schemas.SearchEventOperator._contains, schemas.SearchEventOperator._not_contains]
def is_likable(op: schemas.SearchEventOperator):
return op in likable_operators
def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator):
if not is_likable(op):
return value
if isinstance(value, list):
r = []
for v in value:
r.append(values_for_operator(v, op))
return r
else:
if value is None:
return value
if op == schemas.SearchEventOperator._starts_with:
return value + '%'
elif op == schemas.SearchEventOperator._ends_with:
return '%' + value
elif op == schemas.SearchEventOperator._contains:
return '%' + value + '%'
return value
def is_valid_email(email):
@ -328,46 +360,9 @@ def __decimal_limit(value, limit):
return value / factor
def is_free_open_source_edition():
return __sbool_to_bool(environ.get("isFOS"))
def is_enterprise_edition():
return __sbool_to_bool(environ.get("isEE"))
stag_config_file = f"chalicelib/.configs/{environ['stage']}.json"
if not path.isfile(stag_config_file):
print("!! stage config file not found, using .chalice/config.json only")
else:
print("!! stage config file found, merging with priority to .chalice/config.json")
with open(stag_config_file) as json_file:
config = json.load(json_file)
environ = {**config, **environ}
if (is_free_open_source_edition() or is_enterprise_edition()) and environ.get("config_file"):
if not path.isfile(environ.get("config_file")):
print("!! config file not found, using default environment")
else:
with open(environ.get("config_file")) as json_file:
config = json.load(json_file)
environ = {**environ, **config}
def get_internal_project_id(project_id64):
if project_id64 < 0x10000000000000 or project_id64 >= 0x20000000000000:
return None
project_id64 = (project_id64 - 0x10000000000000) * 4212451012670231 & 0xfffffffffffff
if project_id64 > 0xffffffff:
return None
project_id = int(project_id64)
return project_id
def has_smtp():
return environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0
return config("EMAIL_HOST") is not None and len(config("EMAIL_HOST")) > 0
def get_edition():
return "foss" if is_free_open_source_edition() else "ee"
return "ee" if "ee" in config("ENTERPRISE_BUILD", default="").lower() else "foss"

View file

@ -38,7 +38,7 @@
<div style="border-top:1px dotted rgba(0,0,0,0.2); display: block; margin-top: 20px"></div>
<center>
<p style="font-size: 12px; font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
Sent with &#9825; from OpenReplay &copy; 2021 - All rights reserved.<br><br>
Sent with &#9825; from OpenReplay &copy; 2022 - All rights reserved.<br><br>
<a href="https://openreplay.com" target="_blank"
style="text-decoration: none; color: #6c757d">https://openreplay.com/</a>
</p>

View file

@ -1,15 +1,17 @@
from threading import Semaphore
import psycopg2
import psycopg2.extras
from chalicelib.utils.helper import environ
PG_CONFIG = {"host": environ["pg_host"],
"database": environ["pg_dbname"],
"user": environ["pg_user"],
"password": environ["pg_password"],
"port": int(environ["pg_port"])}
from decouple import config
from psycopg2 import pool
from threading import Semaphore
PG_CONFIG = {"host": config("pg_host"),
"database": config("pg_dbname"),
"user": config("pg_user"),
"password": config("pg_password"),
"port": config("pg_port", cast=int)}
if config("pg_timeout", cast=int, default=0) > 0:
PG_CONFIG["options"] = f"-c statement_timeout={config('pg_timeout', cast=int) * 1000}"
class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
@ -19,28 +21,51 @@ class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
def getconn(self, *args, **kwargs):
self._semaphore.acquire()
return super().getconn(*args, **kwargs)
try:
return super().getconn(*args, **kwargs)
except psycopg2.pool.PoolError as e:
if str(e) == "connection pool is closed":
make_pool()
raise e
def putconn(self, *args, **kwargs):
super().putconn(*args, **kwargs)
self._semaphore.release()
try:
postgreSQL_pool = ORThreadedConnectionPool(50, 100, **PG_CONFIG)
if (postgreSQL_pool):
print("Connection pool created successfully")
except (Exception, psycopg2.DatabaseError) as error:
print("Error while connecting to PostgreSQL", error)
raise error
postgreSQL_pool: ORThreadedConnectionPool = None
def make_pool():
global postgreSQL_pool
if postgreSQL_pool is not None:
try:
postgreSQL_pool.closeall()
except (Exception, psycopg2.DatabaseError) as error:
print("Error while closing all connexions to PostgreSQL", error)
try:
postgreSQL_pool = ORThreadedConnectionPool(config("pg_minconn", cast=int, default=20), 100, **PG_CONFIG)
if (postgreSQL_pool):
print("Connection pool created successfully")
except (Exception, psycopg2.DatabaseError) as error:
print("Error while connecting to PostgreSQL", error)
raise error
make_pool()
class PostgresClient:
connection = None
cursor = None
long_query = False
def __init__(self):
self.connection = postgreSQL_pool.getconn()
def __init__(self, long_query=False):
self.long_query = long_query
if long_query:
self.connection = psycopg2.connect(**PG_CONFIG)
else:
self.connection = postgreSQL_pool.getconn()
def __enter__(self):
if self.cursor is None:
@ -51,11 +76,18 @@ class PostgresClient:
try:
self.connection.commit()
self.cursor.close()
if self.long_query:
self.connection.close()
except Exception as error:
print("Error while committing/closing PG-connection", error)
raise error
if str(error) == "connection already closed":
print("Recreating the connexion pool")
make_pool()
else:
raise error
finally:
postgreSQL_pool.putconn(self.connection)
if not self.long_query:
postgreSQL_pool.putconn(self.connection)
def close():

View file

@ -1,24 +1,24 @@
from botocore.exceptions import ClientError
from chalicelib.utils.helper import environ
from decouple import config
from datetime import datetime, timedelta
import boto3
import botocore
from botocore.client import Config
client = boto3.client('s3', endpoint_url=environ["S3_HOST"],
aws_access_key_id=environ["S3_KEY"],
aws_secret_access_key=environ["S3_SECRET"],
client = boto3.client('s3', endpoint_url=config("S3_HOST"),
aws_access_key_id=config("S3_KEY"),
aws_secret_access_key=config("S3_SECRET"),
config=Config(signature_version='s3v4'),
region_name=environ["sessions_region"])
region_name=config("sessions_region"))
def exists(bucket, key):
try:
boto3.resource('s3', endpoint_url=environ["S3_HOST"],
aws_access_key_id=environ["S3_KEY"],
aws_secret_access_key=environ["S3_SECRET"],
boto3.resource('s3', endpoint_url=config("S3_HOST"),
aws_access_key_id=config("S3_KEY"),
aws_secret_access_key=config("S3_SECRET"),
config=Config(signature_version='s3v4'),
region_name=environ["sessions_region"]) \
region_name=config("sessions_region")) \
.Object(bucket, key).load()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404":
@ -73,21 +73,21 @@ def get_file(source_bucket, source_key):
def rename(source_bucket, source_key, target_bucket, target_key):
s3 = boto3.resource('s3', endpoint_url=environ["S3_HOST"],
aws_access_key_id=environ["S3_KEY"],
aws_secret_access_key=environ["S3_SECRET"],
s3 = boto3.resource('s3', endpoint_url=config("S3_HOST"),
aws_access_key_id=config("S3_KEY"),
aws_secret_access_key=config("S3_SECRET"),
config=Config(signature_version='s3v4'),
region_name=environ["sessions_region"])
region_name=config("sessions_region"))
s3.Object(target_bucket, target_key).copy_from(CopySource=f'{source_bucket}/{source_key}')
s3.Object(source_bucket, source_key).delete()
def schedule_for_deletion(bucket, key):
s3 = boto3.resource('s3', endpoint_url=environ["S3_HOST"],
aws_access_key_id=environ["S3_KEY"],
aws_secret_access_key=environ["S3_SECRET"],
s3 = boto3.resource('s3', endpoint_url=config("S3_HOST"),
aws_access_key_id=config("S3_KEY"),
aws_secret_access_key=config("S3_SECRET"),
config=Config(signature_version='s3v4'),
region_name=environ["sessions_region"])
region_name=config("sessions_region"))
s3_object = s3.Object(bucket, key)
s3_object.copy_from(CopySource={'Bucket': bucket, 'Key': key},
Expires=datetime.now() + timedelta(days=7),

View file

@ -1,120 +0,0 @@
import re
from urllib.parse import urlparse
def style(url):
""" Determine 'style' of a given S3 url
>>> style("s3://my-bucket/my-key/")
's3'
>>> style("s3://user@my-bucket/my-key/")
's3-credential'
>>> style("https://my-bucket.s3.amazonaws.com/my-key/")
'bucket-in-netloc'
>>> style("https://s3.amazonaws.com/my-bucket/my-key/")
'bucket-in-path'
"""
o = urlparse(url)
if o.scheme == 's3':
if '@' in o.netloc:
return 's3-credential'
else:
return 's3'
if re.search(r'^s3[.-](\w{2}-\w{4,9}-\d\.)?amazonaws\.com', o.netloc):
return 'bucket-in-path'
if re.search(r'\.s3[.-](\w{2}-\w{4,9}-\d\.)?amazonaws\.com', o.netloc):
return 'bucket-in-netloc'
raise ValueError(f'Unknown url style: {url}')
def build_url(url_type, bucket, key=None, region=None, credential_name=None):
""" Construct an S3 URL
Args:
url_type: one of 's3', 's3-credential', 'bucket-in-path', 'bucket-in-netloc'
bucket: S3 bucket name
key: Key within bucket (optional)
region: S3 region name (optional)
credential_name: user/credential name to use in S3 scheme url (optional)
Returns
(string) S3 URL
"""
if url_type == 's3':
credential = f'{credential_name}@' if credential_name else ""
return f's3://{credential}{bucket}/{key or ""}'
if url_type == 'bucket-in-path':
return f'https://s3{"-" if region else ""}{region or ""}.amazonaws.com/{bucket}/{key}'
if url_type == 'bucket-in-netloc':
return f'https://{bucket}.s3.amazonaws.com/{key}'
raise ValueError(f'Invalid url_type: {url_type}')
def parse_s3_credential_url(url):
""" Parse S3 scheme url containing a user/credential name
>>> parse_s3_url("s3://user@my-bucket/my-key")
{'bucket': 'my-bucket', 'key': 'my-key/', 'credential_name': 'user'}
"""
o = urlparse(url)
cred_name, bucket = o.netloc.split('@')
key = o.path if o.path[0] != '/' else o.path[1:]
return {'bucket': bucket, 'key': key, 'credential_name': cred_name}
def parse_s3_url(url):
""" Parse S3 scheme url
>>> parse_s3_url("s3://my-bucket/my-key")
{'bucket': 'my-bucket', 'key': 'my-key/'}
"""
o = urlparse(url)
bucket = o.netloc
key = o.path if o.path[0] != '/' else o.path[1:]
return {'bucket': bucket, 'key': key}
def parse_bucket_in_path_url(url):
""" Parse url with bucket name path
>>> parse_bucket_in_path_url("https://s3-eu-west-1.amazonaws.com/my-bucket/my-key/")
{'bucket': 'my-bucket', 'key': 'my-key/'}
"""
path = urlparse(url).path
bucket = path.split('/')[1]
key = '/'.join(path.split('/')[2:])
return {'bucket': bucket, 'key': key}
def parse_bucket_in_netloc_url(url):
""" Parse url with bucket name in host/netloc
>>> parse_bucket_in_netloc_url("https://my-bucket.s3.amazonaws.com/my-key/")
{'bucket': 'my-bucket', 'key': 'my-key/'}
"""
o = urlparse(url)
bucket = o.netloc.split('.')[0]
key = o.path if o.path[0] != '/' else o.path[1:]
return {'bucket': bucket, 'key': key}
def parse_url(url):
url_style = style(url)
if url_style == 's3-credential':
return parse_s3_credential_url(url)
if url_style == 's3':
return parse_s3_url(url)
if url_style == 'bucket-in-path':
return parse_bucket_in_path_url(url)
if url_style == 'bucket-in-netloc':
return parse_bucket_in_netloc_url(url)

View file

@ -1,5 +1,5 @@
import smtplib
from chalicelib.utils.helper import environ
from decouple import config
class EmptySMTP:
@ -11,26 +11,26 @@ class SMTPClient:
server = None
def __init__(self):
if environ["EMAIL_HOST"] is None or len(environ["EMAIL_HOST"]) == 0:
if config("EMAIL_HOST") is None or len(config("EMAIL_HOST")) == 0:
return
elif environ["EMAIL_USE_SSL"].lower() == "false":
self.server = smtplib.SMTP(host=environ["EMAIL_HOST"], port=int(environ["EMAIL_PORT"]))
elif config("EMAIL_USE_SSL").lower() == "false":
self.server = smtplib.SMTP(host=config("EMAIL_HOST"), port=int(config("EMAIL_PORT")))
else:
if len(environ["EMAIL_SSL_KEY"]) == 0 or len(environ["EMAIL_SSL_CERT"]) == 0:
self.server = smtplib.SMTP_SSL(host=environ["EMAIL_HOST"], port=int(environ["EMAIL_PORT"]))
if len(config("EMAIL_SSL_KEY")) == 0 or len(config("EMAIL_SSL_CERT")) == 0:
self.server = smtplib.SMTP_SSL(host=config("EMAIL_HOST"), port=int(config("EMAIL_PORT")))
else:
self.server = smtplib.SMTP_SSL(host=environ["EMAIL_HOST"], port=int(environ["EMAIL_PORT"]),
keyfile=environ["EMAIL_SSL_KEY"], certfile=environ["EMAIL_SSL_CERT"])
self.server = smtplib.SMTP_SSL(host=config("EMAIL_HOST"), port=int(config("EMAIL_PORT")),
keyfile=config("EMAIL_SSL_KEY"), certfile=config("EMAIL_SSL_CERT"))
def __enter__(self):
if self.server is None:
return EmptySMTP()
self.server.ehlo()
if environ["EMAIL_USE_SSL"].lower() == "false" and environ["EMAIL_USE_TLS"].lower() == "true":
if config("EMAIL_USE_SSL").lower() == "false" and config("EMAIL_USE_TLS").lower() == "true":
self.server.starttls()
# stmplib docs recommend calling ehlo() before & after starttls()
self.server.ehlo()
self.server.login(user=environ["EMAIL_USER"], password=environ["EMAIL_PASSWORD"])
self.server.login(user=config("EMAIL_USER"), password=config("EMAIL_PASSWORD"))
return self.server
def __exit__(self, *args):

View file

@ -1,3 +1,2 @@
#!/bin/bash
python env_handler.py
chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD}
uvicorn app:app --host 0.0.0.0

View file

@ -1,13 +0,0 @@
from os import environ
import json
with open('.chalice/config.json') as json_file:
data = json.load(json_file)
stages = data.get("stages", {})
for s in stages.keys():
if environ.get("SITE_URL") is None or environ["SITE_URL"] == '':
environ["SITE_URL"] = environ.get("S3_HOST", "")
data["stages"][s]["environment_variables"] = {**stages[s].get("environment_variables", {}), **environ}
with open('.chalice/config.json', 'w') as outfile:
json.dump(data, outfile, indent=2, sort_keys=True)
print("override config.json")

43
api/or_dependencies.py Normal file
View file

@ -0,0 +1,43 @@
import json
from typing import Callable
from fastapi.routing import APIRoute
from starlette import status
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import Response, JSONResponse
import schemas
async def OR_context(request: Request) -> schemas.CurrentContext:
if hasattr(request.state, "currentContext"):
return request.state.currentContext
else:
raise Exception("currentContext not found")
class ORRoute(APIRoute):
def get_route_handler(self) -> Callable:
original_route_handler = super().get_route_handler()
async def custom_route_handler(request: Request) -> Response:
try:
response: Response = await original_route_handler(request)
except HTTPException as e:
if e.status_code // 100 == 4:
return JSONResponse(content={"errors": [e.detail]}, status_code=e.status_code)
else:
raise e
if isinstance(response, JSONResponse):
response: JSONResponse = response
body = json.loads(response.body.decode('utf8'))
if response.status_code == 200 and body is not None and body.get("errors") is not None:
if "not found" in body["errors"][0]:
response.status_code = status.HTTP_404_NOT_FOUND
else:
response.status_code = status.HTTP_400_BAD_REQUEST
return response
return custom_route_handler

View file

@ -3,9 +3,13 @@ urllib3==1.26.6
boto3==1.16.1
pyjwt==1.7.1
psycopg2-binary==2.8.6
pytz==2020.1
sentry-sdk==0.19.1
elasticsearch==7.9.1
jira==2.0.0
schedule==1.1.0
croniter==1.0.12
fastapi==0.70.1
uvicorn[standard]==0.16.0
python-decouple==3.5
pydantic[email]==1.8.2
apscheduler==3.8.1

120
api/routers/app/v1_api.py Normal file
View file

@ -0,0 +1,120 @@
from fastapi import Depends, Body
import schemas
from chalicelib.core import sessions, events, jobs, projects
from chalicelib.utils.TimeUTC import TimeUTC
from or_dependencies import OR_context
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app_apikey.get('/v1/{projectKey}/users/{userId}/sessions', tags=["api"])
def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_date: int = None):
projectId = projects.get_internal_project_id(projectKey)
return {
'data': sessions.get_user_sessions(
project_id=projectId,
user_id=userId,
start_date=start_date,
end_date=end_date
)
}
@app_apikey.get('/v1/{projectKey}/sessions/{sessionId}/events', tags=["api"])
def get_session_events(projectKey: str, sessionId: int):
projectId = projects.get_internal_project_id(projectKey)
return {
'data': events.get_by_sessionId2_pg(
project_id=projectId,
session_id=sessionId
)
}
@app_apikey.get('/v1/{projectKey}/users/{userId}', tags=["api"])
def get_user_details(projectKey: str, userId: str):
projectId = projects.get_internal_project_id(projectKey)
return {
'data': sessions.get_session_user(
project_id=projectId,
user_id=userId
)
}
@app_apikey.delete('/v1/{projectKey}/users/{userId}', tags=["api"])
def schedule_to_delete_user_data(projectKey: str, userId: str):
projectId = projects.get_internal_project_id(projectKey)
data = {"action": "delete_user_data",
"reference_id": userId,
"description": f"Delete user sessions of userId = {userId}",
"start_at": TimeUTC.to_human_readable(TimeUTC.midnight(1))}
record = jobs.create(project_id=projectId, data=data)
return {
'data': record
}
@app_apikey.get('/v1/{projectKey}/jobs', tags=["api"])
def get_jobs(projectKey: str):
projectId = projects.get_internal_project_id(projectKey)
return {
'data': jobs.get_all(project_id=projectId)
}
@app_apikey.get('/v1/{projectKey}/jobs/{jobId}', tags=["api"])
def get_job(projectKey: str, jobId: int):
return {
'data': jobs.get(job_id=jobId)
}
@app_apikey.delete('/v1/{projectKey}/jobs/{jobId}', tags=["api"])
def cancel_job(projectKey: str, jobId: int):
job = jobs.get(job_id=jobId)
job_not_found = len(job.keys()) == 0
if job_not_found:
return {"errors": ["Job not found."]}
if job["status"] == jobs.JobStatus.COMPLETED or job["status"] == jobs.JobStatus.CANCELLED:
return {"errors": ["The request job has already been canceled/completed."]}
job["status"] = "cancelled"
return {
'data': jobs.update(job_id=jobId, job=job)
}
@app_apikey.get('/v1/projects', tags=["api"])
def get_projects(context: schemas.CurrentContext = Depends(OR_context)):
records = projects.get_projects(tenant_id=context.tenant_id)
for record in records:
del record['projectId']
return {
'data': records
}
@app_apikey.get('/v1/projects/{projectKey}', tags=["api"])
def get_project(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)):
return {
'data': projects.get_project_by_key(tenant_id=context.tenant_id, project_key=projectKey)
}
@app_apikey.post('/v1/projects', tags=["api"])
def create_project(data: schemas.CreateProjectSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
record = projects.create(
tenant_id=context.tenant_id,
user_id=None,
data=data,
skip_authorization=True
)
del record['data']['projectId']
return record

12
api/routers/base.py Normal file
View file

@ -0,0 +1,12 @@
from fastapi import APIRouter, Depends
from auth.auth_apikey import APIKeyAuth
from auth.auth_jwt import JWTAuth
from or_dependencies import ORRoute
def get_routers() -> (APIRouter, APIRouter, APIRouter):
public_app = APIRouter(route_class=ORRoute)
app = APIRouter(dependencies=[Depends(JWTAuth())], route_class=ORRoute)
app_apikey = APIRouter(dependencies=[Depends(APIKeyAuth())], route_class=ORRoute)
return public_app, app, app_apikey

1144
api/routers/core.py Normal file

File diff suppressed because it is too large Load diff

233
api/routers/core_dynamic.py Normal file
View file

@ -0,0 +1,233 @@
from typing import Optional
from decouple import config
from fastapi import Body, Depends, HTTPException, status, BackgroundTasks
from starlette.responses import RedirectResponse
import schemas
from chalicelib.core import assist
from chalicelib.core import integrations_manager
from chalicelib.core import sessions
from chalicelib.core import tenants, users, metadata, projects, license
from chalicelib.core import webhook
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import captcha
from chalicelib.utils import helper
from or_dependencies import OR_context
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@public_app.get('/signup', tags=['signup'])
def get_all_signup():
return {"data": {"tenants": tenants.tenants_exists(),
"sso": None,
"ssoProvider": None,
"edition": helper.get_edition()}}
@public_app.post('/login', tags=["authentication"])
def login(data: schemas.UserLoginSchema = Body(...)):
if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid captcha."
)
r = users.authenticate(data.email, data.password, for_plugin=False)
if r is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Youve entered invalid Email or Password."
)
tenant_id = r.pop("tenantId")
r["limits"] = {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)}
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True, version=True)
c["smtp"] = helper.has_smtp()
c["iceServers"] = assist.get_ice_servers()
r["smtp"] = c["smtp"]
r["iceServers"] = c["iceServers"]
return {
'jwt': r.pop('jwt'),
'data': {
"user": r,
"client": c
}
}
@app.get('/account', tags=['accounts'])
def get_account(context: schemas.CurrentContext = Depends(OR_context)):
r = users.get(tenant_id=context.tenant_id, user_id=context.user_id)
return {
'data': {
**r,
"limits": {
"teamMember": -1,
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(context.tenant_id)
},
**license.get_status(context.tenant_id),
"smtp": helper.has_smtp(),
"iceServers": assist.get_ice_servers()
}
}
@app.get('/projects/limit', tags=['projects'])
def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)):
return {"data": {
"current": projects.count_by_tenant(tenant_id=context.tenant_id),
"remaining": -1
}}
@app.get('/projects/{projectId}', tags=['projects'])
def get_project(projectId: int, last_tracker_version: Optional[str] = None,
context: schemas.CurrentContext = Depends(OR_context)):
data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True,
include_gdpr=True, last_tracker_version=last_tracker_version)
if data is None:
return {"errors": ["project not found"]}
return {"data": data}
@app.put('/integrations/slack', tags=['integrations'])
@app.post('/integrations/slack', tags=['integrations'])
def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentContext = Depends(OR_context)):
n = Slack.add_channel(tenant_id=context.tenant_id, url=data.url, name=data.name)
if n is None:
return {
"errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
}
return {"data": n}
@app.put('/integrations/slack/{integrationId}', tags=['integrations'])
@app.post('/integrations/slack/{integrationId}', tags=['integrations'])
def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
if len(data.url) > 0:
old = webhook.get(tenant_id=context.tenant_id, webhook_id=integrationId)
if old["endpoint"] != data.url:
if not Slack.say_hello(data.url):
return {
"errors": [
"We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
}
return {"data": webhook.update(tenant_id=context.tenant_id, webhook_id=integrationId,
changes={"name": data.name, "endpoint": data.url})}
# this endpoint supports both jira & github based on `provider` attribute
@app.post('/integrations/issues', tags=["integrations"])
def add_edit_jira_cloud_github(data: schemas.JiraGithubSchema,
context: schemas.CurrentContext = Depends(OR_context)):
provider = data.provider.upper()
error, integration = integrations_manager.get_integration(tool=provider, tenant_id=context.tenant_id,
user_id=context.user_id)
if error is not None:
return error
return {"data": integration.add_edit(data=data.dict())}
@app.post('/client/members', tags=["client"])
@app.put('/client/members', tags=["client"])
def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...),
context: schemas.CurrentContext = Depends(OR_context)):
return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(),
background_tasks=background_tasks)
@public_app.get('/users/invitation', tags=['users'])
def process_invitation_link(token: str):
if token is None or len(token) < 64:
return {"errors": ["please provide a valid invitation"]}
user = users.get_by_invitation_token(token)
if user is None:
return {"errors": ["invitation not found"]}
if user["expiredInvitation"]:
return {"errors": ["expired invitation, please ask your admin to send a new one"]}
if user["expiredChange"] is not None and not user["expiredChange"] \
and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60:
pass_token = user["changePwdToken"]
else:
pass_token = users.allow_password_change(user_id=user["userId"])
return RedirectResponse(url=config("SITE_URL") + config("change_password_link") % (token, pass_token))
@public_app.post('/password/reset', tags=["users"])
@public_app.put('/password/reset', tags=["users"])
def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)):
if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8:
return {"errors": ["please provide a valid invitation & pass"]}
user = users.get_by_invitation_token(token=data.invitation, pass_token=data.passphrase)
if user is None:
return {"errors": ["invitation not found"]}
if user["expiredChange"]:
return {"errors": ["expired change, please re-use the invitation link"]}
return users.set_password_invitation(new_password=data.password, user_id=user["userId"])
@app.put('/client/members/{memberId}', tags=["client"])
@app.post('/client/members/{memberId}', tags=["client"])
def edit_member(memberId: int, data: schemas.EditMemberSchema,
context: schemas.CurrentContext = Depends(OR_context)):
return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data.dict(),
user_id_to_update=memberId)
@app.get('/metadata/session_search', tags=["metadata"])
def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None,
context: schemas.CurrentContext = Depends(OR_context)):
if key is None or value is None or len(value) == 0 and len(key) == 0:
return {"errors": ["please provide a key&value for search"]}
if len(value) == 0:
return {"errors": ["please provide a value for search"]}
if len(key) == 0:
return {"errors": ["please provide a key for search"]}
return {
"data": sessions.search_by_metadata(tenant_id=context.tenant_id, user_id=context.user_id, m_value=value,
m_key=key, project_id=projectId)}
@app.get('/plans', tags=["plan"])
def get_current_plan(context: schemas.CurrentContext = Depends(OR_context)):
return {
"data": license.get_status(context.tenant_id)
}
@public_app.get('/general_stats', tags=["private"], include_in_schema=False)
def get_general_stats():
return {"data": {"sessions:": sessions.count_all()}}
@app.get('/client', tags=['projects'])
def get_client(context: schemas.CurrentContext = Depends(OR_context)):
r = tenants.get_by_tenant_id(context.tenant_id)
if r is not None:
r.pop("createdAt")
r["projects"] = projects.get_projects(tenant_id=context.tenant_id, recording_state=True, recorded=True,
stack_integrations=True, version=True)
return {
'data': r
}
@app.get('/projects', tags=['projects'])
def get_projects(last_tracker_version: Optional[str] = None, context: schemas.CurrentContext = Depends(OR_context)):
return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True,
stack_integrations=True, version=True,
last_tracker_version=last_tracker_version)}

View file

@ -0,0 +1,15 @@
from chalicelib.core import weekly_report, jobs
async def run_scheduled_jobs() -> None:
jobs.execute_jobs()
async def weekly_report2() -> None:
weekly_report.cron()
cron_jobs = [
{"func": run_scheduled_jobs, "trigger": "interval", "seconds": 60, "misfire_grace_time": 20},
{"func": weekly_report2, "trigger": "cron", "day_of_week": "mon", "hour": 5, "misfire_grace_time": 60 * 60}
]

View file

@ -0,0 +1,10 @@
from chalicelib.core import telemetry
def telemetry_cron() -> None:
telemetry.compute()
cron_jobs = [
{"func": telemetry_cron, "trigger": "cron", "day_of_week": "*"}
]

View file

@ -0,0 +1,346 @@
from fastapi import Body
import schemas
from chalicelib.core import dashboard
from chalicelib.core import metadata
from chalicelib.utils import helper
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.get('/{projectId}/dashboard/metadata', tags=["dashboard", "metrics"])
def get_metadata_map(projectId: int):
metamap = []
for m in metadata.get(project_id=projectId):
metamap.append({"name": m["key"], "key": f"metadata{m['index']}"})
return {"data": metamap}
@app.post('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"])
def get_dashboard_processed_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_processed_sessions(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"])
def get_dashboard_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_errors(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"])
def get_dashboard_errors_trend(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_errors_trend(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"])
def get_dashboard_application_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_application_activity(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"])
def get_dashboard_page_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_page_metrics(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"])
def get_dashboard_user_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_user_activity(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"])
def get_dashboard_performance(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_performance(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"])
def get_dashboard_slowest_images(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_slowest_images(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"])
def get_performance_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_missing_resources_trend(project_id=projectId, **data.dict())}
@app.post('/{projectId}/dashboard/network', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/network', tags=["dashboard", "metrics"])
def get_network_widget(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_network(project_id=projectId, **data.dict())}
@app.get('/{projectId}/dashboard/{widget}/search', tags=["dashboard", "metrics"])
def get_dashboard_autocomplete(projectId: int, widget: str, q: str, type: str = "", platform: str = None,
key: str = ""):
if q is None or len(q) == 0:
return {"data": []}
q = '^' + q
if widget in ['performance']:
data = dashboard.search(q, type, project_id=projectId,
platform=platform, performance=True)
elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render',
'impacted_sessions_by_slow_pages', 'pages_response_time']:
data = dashboard.search(q, type, project_id=projectId,
platform=platform, pages_only=True)
elif widget in ['resources_loading_time']:
data = dashboard.search(q, type, project_id=projectId,
platform=platform, performance=False)
elif widget in ['time_between_events', 'events']:
data = dashboard.search(q, type, project_id=projectId,
platform=platform, performance=False, events_only=True)
elif widget in ['metadata']:
data = dashboard.search(q, None, project_id=projectId,
platform=platform, metadata=True, key=key)
else:
return {"errors": [f"unsupported widget: {widget}"]}
return {'data': data}
# 1
@app.post('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"])
def get_dashboard_slowest_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_slowest_resources(project_id=projectId, **data.dict())}
# 2
@app.post('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"])
def get_dashboard_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_resources_loading_time(project_id=projectId, **data.dict())}
# 3
@app.post('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"])
def get_dashboard_pages_dom(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict())}
# 4
@app.post('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"])
def get_dashboard_busiest_time_of_day(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_busiest_time_of_day(project_id=projectId, **data.dict())}
# 5
@app.post('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"])
def get_dashboard_sessions_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_sessions_location(project_id=projectId, **data.dict())}
# 6
@app.post('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"])
def get_dashboard_speed_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_speed_index_location(project_id=projectId, **data.dict())}
# 7
@app.post('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"])
def get_dashboard_pages_response_time(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_pages_response_time(project_id=projectId, **data.dict())}
# 8
@app.post('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"])
def get_dashboard_pages_response_time_distribution(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_pages_response_time_distribution(project_id=projectId, **data.dict())}
# 9
@app.post('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"])
def get_dashboard_top_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_top_metrics(project_id=projectId, **data.dict())}
# 10
@app.post('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"])
def get_dashboard_time_to_render(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_time_to_render(project_id=projectId, **data.dict())}
# 11
@app.post('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_impacted_sessions_by_slow_pages(project_id=projectId, **data.dict())}
# 12
@app.post('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"])
def get_dashboard_memory_consumption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_memory_consumption(project_id=projectId, **data.dict())}
# 12.1
@app.post('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"])
def get_dashboard_avg_fps(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_avg_fps(project_id=projectId, **data.dict())}
# 12.2
@app.post('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"])
def get_dashboard_avg_cpu(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_avg_cpu(project_id=projectId, **data.dict())}
# 13
@app.post('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_crashes(project_id=projectId, **data.dict())}
# 14
@app.post('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_domains_errors(project_id=projectId, **data.dict())}
# 14.1
@app.post('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_domains_errors_4xx(project_id=projectId, **data.dict())}
# 14.2
@app.post('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"])
def get_dashboard_domains_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_domains_errors_5xx(project_id=projectId, **data.dict())}
# 15
@app.post('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"])
def get_dashboard_slowest_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_slowest_domains(project_id=projectId, **data.dict())}
# 16
@app.post('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_errors_per_domains(project_id=projectId, **data.dict())}
# 17
@app.post('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"])
def get_dashboard_sessions_per_browser(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_sessions_per_browser(project_id=projectId, **data.dict())}
# 18
@app.post('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_calls_errors(project_id=projectId, **data.dict())}
# 18.1
@app.post('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_calls_errors_4xx(project_id=projectId, **data.dict())}
# 18.2
@app.post('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"])
def get_dashboard_calls_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_calls_errors_5xx(project_id=projectId, **data.dict())}
# 19
@app.post('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_errors_per_type(project_id=projectId, **data.dict())}
# 20
@app.post('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"])
def get_dashboard_resources_by_party(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_resources_by_party(project_id=projectId, **data.dict())}
# 21
@app.post('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"])
def get_dashboard_errors_per_resource_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.resource_type_vs_response_end(project_id=projectId, **data.dict())}
# 22
@app.post('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"])
def get_dashboard_resources_vs_visually_complete(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_resources_vs_visually_complete(project_id=projectId, **data.dict())}
# 23
@app.post('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"])
def get_dashboard_impacted_sessions_by_js_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_impacted_sessions_by_js_errors(project_id=projectId, **data.dict())}
# 24
@app.post('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"])
def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": dashboard.get_resources_count_by_type(project_id=projectId, **data.dict())}
# # 25
# @app.post('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"])
# @app.get('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"])
# def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
# return {"errors": ["please choose 2 events"]}
@app.post('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
@app.get('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"])
def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": [
*helper.explode_widget(key="count_sessions",
data=dashboard.get_processed_sessions(project_id=projectId, **data.dict())),
*helper.explode_widget(data={**dashboard.get_application_activity(project_id=projectId, **data.dict()),
"chart": dashboard.get_performance(project_id=projectId, **data.dict())
.get("chart", [])}),
*helper.explode_widget(data=dashboard.get_page_metrics(project_id=projectId, **data.dict())),
*helper.explode_widget(data=dashboard.get_user_activity(project_id=projectId, **data.dict())),
*helper.explode_widget(data=dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict()),
key="avg_pages_dom_buildtime"),
*helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **data.dict()),
key="avg_pages_response_time"),
*helper.explode_widget(dashboard.get_top_metrics(project_id=projectId, **data.dict())),
*helper.explode_widget(data=dashboard.get_time_to_render(project_id=projectId, **data.dict()),
key="avg_time_to_render"),
*helper.explode_widget(dashboard.get_memory_consumption(project_id=projectId, **data.dict())),
*helper.explode_widget(dashboard.get_avg_cpu(project_id=projectId, **data.dict())),
*helper.explode_widget(dashboard.get_avg_fps(project_id=projectId, **data.dict())),
]}

View file

@ -0,0 +1,108 @@
from fastapi import Body
import schemas
from chalicelib.core import insights
from routers.base import get_routers
public_app, app, app_apikey = get_routers()
@app.post('/{projectId}/insights/journey', tags=["insights"])
@app.get('/{projectId}/insights/journey', tags=["insights"])
def get_insights_journey(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.journey(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/users_acquisition', tags=["insights"])
@app.get('/{projectId}/insights/users_acquisition', tags=["insights"])
def get_users_acquisition(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.users_acquisition(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/users_retention', tags=["insights"])
@app.get('/{projectId}/insights/users_retention', tags=["insights"])
def get_users_retention(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.users_retention(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/feature_retention', tags=["insights"])
@app.get('/{projectId}/insights/feature_retention', tags=["insights"])
def get_feature_rentention(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.feature_retention(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/feature_acquisition', tags=["insights"])
@app.get('/{projectId}/insights/feature_acquisition', tags=["insights"])
def get_feature_acquisition(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.feature_acquisition(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/feature_popularity_frequency', tags=["insights"])
@app.get('/{projectId}/insights/feature_popularity_frequency', tags=["insights"])
def get_feature_popularity_frequency(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.feature_popularity_frequency(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/feature_intensity', tags=["insights"])
@app.get('/{projectId}/insights/feature_intensity', tags=["insights"])
def get_feature_intensity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.feature_intensity(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/feature_adoption', tags=["insights"])
@app.get('/{projectId}/insights/feature_adoption', tags=["insights"])
def get_feature_adoption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.feature_adoption(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/feature_adoption_top_users', tags=["insights"])
@app.get('/{projectId}/insights/feature_adoption_top_users', tags=["insights"])
def get_feature_adoption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.feature_adoption_top_users(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/users_active', tags=["insights"])
@app.get('/{projectId}/insights/users_active', tags=["insights"])
def get_users_active(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.users_active(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/users_power', tags=["insights"])
@app.get('/{projectId}/insights/users_power', tags=["insights"])
def get_users_power(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.users_power(project_id=projectId, **data.dict())}
@app.post('/{projectId}/insights/users_slipping', tags=["insights"])
@app.get('/{projectId}/insights/users_slipping', tags=["insights"])
def get_users_slipping(projectId: int, data: schemas.MetricPayloadSchema = Body(...)):
return {"data": insights.users_slipping(project_id=projectId, **data.dict())}
#
#
# @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET'])
# def get_dashboard_autocomplete(projectId:int, widget):
# params = app.current_request.query_params
# if params is None or params.get('q') is None or len(params.get('q')) == 0:
# return {"data": []}
# params['q'] = '^' + params['q']
#
# if widget in ['performance']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), performance=True)
# elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render',
# 'impacted_sessions_by_slow_pages', 'pages_response_time']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), pages_only=True)
# elif widget in ['resources_loading_time']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), performance=False)
# elif widget in ['time_between_events', 'events']:
# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
# platform=params.get('platform', None), performance=False, events_only=True)
# elif widget in ['metadata']:
# data = dashboard.search(params.get('q', ''), None, project_id=projectId,
# platform=params.get('platform', None), metadata=True, key=params.get("key"))
# else:
# return {"errors": [f"unsupported widget: {widget}"]}
# return {'data': data}

3
api/run-dev.sh Executable file
View file

@ -0,0 +1,3 @@
#!/bin/zsh
uvicorn app:app --reload

655
api/schemas.py Normal file
View file

@ -0,0 +1,655 @@
from enum import Enum
from typing import Optional, List, Union, Literal
from pydantic import BaseModel, Field, EmailStr, HttpUrl, root_validator
from chalicelib.utils.TimeUTC import TimeUTC
def attribute_to_camel_case(snake_str):
components = snake_str.split("_")
return components[0] + ''.join(x.title() for x in components[1:])
class _Grecaptcha(BaseModel):
g_recaptcha_response: Optional[str] = Field(None, alias='g-recaptcha-response')
class UserLoginSchema(_Grecaptcha):
email: EmailStr = Field(...)
password: str = Field(...)
class UserSignupSchema(UserLoginSchema):
fullname: str = Field(...)
organizationName: str = Field(...)
projectName: str = Field(default="my first project")
class Config:
alias_generator = attribute_to_camel_case
class EditUserSchema(BaseModel):
name: Optional[str] = Field(None)
email: Optional[str] = Field(None)
admin: Optional[bool] = Field(False)
appearance: Optional[dict] = Field({})
class ForgetPasswordPayloadSchema(_Grecaptcha):
email: str = Field(...)
class EditUserPasswordSchema(BaseModel):
old_password: str = Field(...)
new_password: str = Field(...)
class Config:
alias_generator = attribute_to_camel_case
class UpdateTenantSchema(BaseModel):
name: Optional[str] = Field(None)
opt_out: Optional[bool] = Field(None)
class Config:
alias_generator = attribute_to_camel_case
class CreateProjectSchema(BaseModel):
name: str = Field("my first project")
class CurrentAPIContext(BaseModel):
tenant_id: int = Field(...)
class CurrentContext(CurrentAPIContext):
user_id: int = Field(...)
email: str = Field(...)
class AddSlackSchema(BaseModel):
name: str = Field(...)
url: HttpUrl = Field(...)
class EditSlackSchema(BaseModel):
name: Optional[str] = Field(None)
url: HttpUrl = Field(...)
class SearchErrorsSchema(BaseModel):
platform: Optional[str] = Field(None)
startDate: Optional[int] = Field(TimeUTC.now(-7))
endDate: Optional[int] = Field(TimeUTC.now())
density: Optional[int] = Field(7)
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
class CreateNotificationSchema(BaseModel):
token: str = Field(...)
notifications: List = Field(...)
class NotificationsViewSchema(BaseModel):
ids: Optional[List] = Field(default=[])
startTimestamp: Optional[int] = Field(default=None)
endTimestamp: Optional[int] = Field(default=None)
class JiraGithubSchema(BaseModel):
provider: str = Field(...)
username: str = Field(...)
token: str = Field(...)
url: str = Field(...)
class CreateEditWebhookSchema(BaseModel):
webhookId: Optional[int] = Field(None)
endpoint: str = Field(...)
authHeader: Optional[str] = Field(None)
name: Optional[str] = Field(...)
class CreateMemberSchema(BaseModel):
userId: Optional[int] = Field(None)
name: str = Field(...)
email: str = Field(...)
admin: bool = Field(False)
class EditMemberSchema(BaseModel):
name: str = Field(...)
email: str = Field(...)
admin: bool = Field(False)
class EditPasswordByInvitationSchema(BaseModel):
invitation: str = Field(...)
passphrase: str = Field(..., alias="pass")
password: str = Field(...)
class AssignmentSchema(BaseModel):
assignee: str = Field(...)
description: str = Field(...)
title: str = Field(...)
issue_type: str = Field(...)
class Config:
alias_generator = attribute_to_camel_case
class CommentAssignmentSchema(BaseModel):
message: str = Field(...)
class IntegrationNotificationSchema(BaseModel):
comment: Optional[str] = Field(None)
class GdprSchema(BaseModel):
maskEmails: bool = Field(...)
sampleRate: int = Field(...)
maskNumbers: bool = Field(...)
defaultInputMode: str = Field(...)
class SampleRateSchema(BaseModel):
rate: int = Field(...)
captureAll: bool = Field(False)
class WeeklyReportConfigSchema(BaseModel):
weekly_report: bool = Field(True)
class Config:
alias_generator = attribute_to_camel_case
class GetHeatmapPayloadSchema(BaseModel):
startDate: int = Field(TimeUTC.now(delta_days=-30))
endDate: int = Field(TimeUTC.now())
url: str = Field(...)
class DatadogSchema(BaseModel):
apiKey: str = Field(...)
applicationKey: str = Field(...)
class StackdriverSchema(BaseModel):
serviceAccountCredentials: str = Field(...)
logName: str = Field(...)
class NewrelicSchema(BaseModel):
applicationId: str = Field(...)
xQueryKey: str = Field(...)
region: str = Field(...)
class RollbarSchema(BaseModel):
accessToken: str = Field(...)
class BugsnagBasicSchema(BaseModel):
authorizationToken: str = Field(...)
class BugsnagSchema(BugsnagBasicSchema):
bugsnagProjectId: str = Field(...)
class CloudwatchBasicSchema(BaseModel):
awsAccessKeyId: str = Field(...)
awsSecretAccessKey: str = Field(...)
region: str = Field(...)
class CloudwatchSchema(CloudwatchBasicSchema):
logGroupName: str = Field(...)
class ElasticsearchBasicSchema(BaseModel):
host: str = Field(...)
port: int = Field(...)
apiKeyId: str = Field(...)
apiKey: str = Field(...)
class ElasticsearchSchema(ElasticsearchBasicSchema):
indexes: str = Field(...)
class SumologicSchema(BaseModel):
accessId: str = Field(...)
accessKey: str = Field(...)
region: str = Field(...)
class MetadataBasicSchema(BaseModel):
index: Optional[int] = Field(None)
key: str = Field(...)
class MetadataListSchema(BaseModel):
list: List[MetadataBasicSchema] = Field(...)
class EmailPayloadSchema(BaseModel):
auth: str = Field(...)
email: EmailStr = Field(...)
link: str = Field(...)
message: str = Field(...)
class MemberInvitationPayloadSchema(BaseModel):
auth: str = Field(...)
email: EmailStr = Field(...)
invitation_link: str = Field(...)
client_id: str = Field(...)
sender_name: str = Field(...)
class Config:
alias_generator = attribute_to_camel_case
class ErrorIdsPayloadSchema(BaseModel):
errors: List[str] = Field([])
class _AlertMessageSchema(BaseModel):
type: str = Field(...)
value: str = Field(...)
class AlertDetectionChangeType(str, Enum):
percent = "percent"
change = "change"
class _AlertOptionSchema(BaseModel):
message: List[_AlertMessageSchema] = Field([])
currentPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(...)
previousPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(15)
lastNotification: Optional[int] = Field(None)
renotifyInterval: Optional[int] = Field(720)
change: Optional[AlertDetectionChangeType] = Field(None)
class AlertColumn(str, Enum):
performance__dom_content_loaded__average = "performance.dom_content_loaded.average"
performance__first_meaningful_paint__average = "performance.first_meaningful_paint.average"
performance__page_load_time__average = "performance.page_load_time.average"
performance__dom_build_time__average = "performance.dom_build_time.average"
performance__speed_index__average = "performance.speed_index.average"
performance__page_response_time__average = "performance.page_response_time.average"
performance__ttfb__average = "performance.ttfb.average"
performance__time_to_render__average = "performance.time_to_render.average"
performance__image_load_time__average = "performance.image_load_time.average"
performance__request_load_time__average = "performance.request_load_time.average"
resources__load_time__average = "resources.load_time.average"
resources__missing__count = "resources.missing.count"
errors__4xx_5xx__count = "errors.4xx_5xx.count"
errors__4xx__count = "errors.4xx.count"
errors__5xx__count = "errors.5xx.count"
errors__javascript__impacted_sessions__count = "errors.javascript.impacted_sessions.count"
performance__crashes__count = "performance.crashes.count"
errors__javascript__count = "errors.javascript.count"
errors__backend__count = "errors.backend.count"
custom = "CUSTOM"
class MathOperator(str, Enum):
_equal = "="
_less = "<"
_greater = ">"
_less_eq = "<="
_greater_eq = ">="
class _AlertQuerySchema(BaseModel):
left: AlertColumn = Field(...)
right: float = Field(...)
# operator: Literal["<", ">", "<=", ">="] = Field(...)
operator: MathOperator = Field(...)
class AlertDetectionMethod(str, Enum):
threshold = "threshold"
change = "change"
class AlertSchema(BaseModel):
name: str = Field(...)
detection_method: AlertDetectionMethod = Field(...)
description: Optional[str] = Field(None)
options: _AlertOptionSchema = Field(...)
query: _AlertQuerySchema = Field(...)
series_id: Optional[int] = Field(None)
@root_validator
def alert_validator(cls, values):
if values.get("query") is not None and values["query"].left == AlertColumn.custom:
assert values.get("series_id") is not None, "series_id should not be null for CUSTOM alert"
if values.get("detectionMethod") is not None \
and values["detectionMethod"] == AlertDetectionMethod.change \
and values.get("options") is not None:
assert values["options"].change is not None, \
"options.change should not be null for detection method 'change'"
return values
class Config:
alias_generator = attribute_to_camel_case
class SourcemapUploadPayloadSchema(BaseModel):
urls: List[str] = Field(..., alias="URL")
class ErrorSource(str, Enum):
js_exception = "js_exception"
bugsnag = "bugsnag"
cloudwatch = "cloudwatch"
datadog = "datadog"
newrelic = "newrelic"
rollbar = "rollbar"
sentry = "sentry"
stackdriver = "stackdriver"
sumologic = "sumologic"
class EventType(str, Enum):
click = "CLICK"
input = "INPUT"
location = "LOCATION"
custom = "CUSTOM"
request = "REQUEST"
graphql = "GRAPHQL"
state_action = "STATEACTION"
error = "ERROR"
metadata = "METADATA"
click_ios = "CLICK_IOS"
input_ios = "INPUT_IOS"
view_ios = "VIEW_IOS"
custom_ios = "CUSTOM_IOS"
request_ios = "REQUEST_IOS"
error_ios = "ERROR_IOS"
class PerformanceEventType(str, Enum):
location_dom_complete = "DOM_COMPLETE"
location_largest_contentful_paint_time = "LARGEST_CONTENTFUL_PAINT_TIME"
time_between_events = "TIME_BETWEEN_EVENTS"
location_ttfb = "TTFB"
location_avg_cpu_load = "AVG_CPU_LOAD"
location_avg_memory_usage = "AVG_MEMORY_USAGE"
fetch_failed = "FETCH_FAILED"
# fetch_duration = "FETCH_DURATION"
class FilterType(str, Enum):
user_os = "USEROS"
user_browser = "USERBROWSER"
user_device = "USERDEVICE"
user_country = "USERCOUNTRY"
user_id = "USERID"
user_anonymous_id = "USERANONYMOUSID"
referrer = "REFERRER"
rev_id = "REVID"
# IOS
user_os_ios = "USEROS_IOS"
user_device_ios = "USERDEVICE_IOS"
user_country_ios = "USERCOUNTRY_IOS"
user_id_ios = "USERID_IOS"
user_anonymous_id_ios = "USERANONYMOUSID_IOS"
rev_id_ios = "REVID_IOS"
#
duration = "DURATION"
platform = "PLATFORM"
metadata = "METADATA"
issue = "ISSUE"
events_count = "EVENTS_COUNT"
utm_source = "UTM_SOURCE"
utm_medium = "UTM_MEDIUM"
utm_campaign = "UTM_CAMPAIGN"
class SearchEventOperator(str, Enum):
_is = "is"
_is_any = "isAny"
_on = "on"
_on_any = "onAny"
_is_not = "isNot"
_not_on = "notOn"
_contains = "contains"
_not_contains = "notContains"
_starts_with = "startsWith"
_ends_with = "endsWith"
class PlatformType(str, Enum):
mobile = "mobile"
desktop = "desktop"
tablet = "tablet"
class SearchEventOrder(str, Enum):
_then = "then"
_or = "or"
_and = "and"
class IssueType(str, Enum):
click_rage = 'click_rage'
dead_click = 'dead_click'
excessive_scrolling = 'excessive_scrolling'
bad_request = 'bad_request'
missing_resource = 'missing_resource'
memory = 'memory'
cpu = 'cpu'
slow_resource = 'slow_resource'
slow_page_load = 'slow_page_load'
crash = 'crash'
custom = 'custom'
js_exception = 'js_exception'
class _SessionSearchEventRaw(BaseModel):
custom: Optional[List[Union[int, str]]] = Field(None, min_items=1)
customOperator: Optional[MathOperator] = Field(None)
key: Optional[str] = Field(None)
value: Union[str, List[str]] = Field(...)
type: Union[EventType, PerformanceEventType] = Field(...)
operator: SearchEventOperator = Field(...)
source: Optional[ErrorSource] = Field(default=ErrorSource.js_exception)
@root_validator
def event_validator(cls, values):
if isinstance(values.get("type"), PerformanceEventType):
if values.get("type") == PerformanceEventType.fetch_failed:
return values
assert values.get("custom") is not None, "custom should not be null for PerformanceEventType"
assert values.get("customOperator") is not None \
, "customOperator should not be null for PerformanceEventType"
if values["type"] == PerformanceEventType.time_between_events:
assert len(values.get("value", [])) == 2, \
f"must provide 2 Events as value for {PerformanceEventType.time_between_events}"
assert isinstance(values["value"][0], _SessionSearchEventRaw) \
and isinstance(values["value"][1], _SessionSearchEventRaw) \
, f"event should be of type _SessionSearchEventRaw for {PerformanceEventType.time_between_events}"
else:
for c in values["custom"]:
assert isinstance(c, int), f"custom value should be of type int for {values.get('type')}"
return values
class _SessionSearchEventSchema(_SessionSearchEventRaw):
value: Union[List[_SessionSearchEventRaw], str, List[str]] = Field(...)
class _SessionSearchFilterSchema(BaseModel):
custom: Optional[List[str]] = Field(None)
key: Optional[str] = Field(None)
value: Union[Optional[Union[IssueType, PlatformType, int, str]],
Optional[List[Union[IssueType, PlatformType, int, str]]]] = Field(...)
type: FilterType = Field(...)
operator: Union[SearchEventOperator, MathOperator] = Field(...)
source: Optional[ErrorSource] = Field(default=ErrorSource.js_exception)
@root_validator
def filter_validator(cls, values):
if values.get("type") == FilterType.issue:
for v in values.get("value"):
assert isinstance(v, IssueType), f"value should be of type IssueType for {values.get('type')} filter"
elif values.get("type") == FilterType.platform:
for v in values.get("value"):
assert isinstance(v, PlatformType), \
f"value should be of type PlatformType for {values.get('type')} filter"
elif values.get("type") == FilterType.events_count:
assert isinstance(values.get("operator"), MathOperator), \
f"operator should be of type MathOperator for {values.get('type')} filter"
for v in values.get("value"):
assert isinstance(v, int), f"value should be of type int for {values.get('type')} filter"
else:
assert isinstance(values.get("operator"), SearchEventOperator), \
f"operator should be of type SearchEventOperator for {values.get('type')} filter"
return values
class SessionsSearchPayloadSchema(BaseModel):
events: List[_SessionSearchEventSchema] = Field([])
filters: List[_SessionSearchFilterSchema] = Field([])
# custom:dict=Field(...)
# rangeValue:str=Field(...)
startDate: int = Field(None)
endDate: int = Field(None)
sort: str = Field(...)
order: str = Field(default="DESC")
# platform: Optional[PlatformType] = Field(None)
events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then)
class Config:
alias_generator = attribute_to_camel_case
class SessionsSearchCountSchema(SessionsSearchPayloadSchema):
sort: Optional[str] = Field(default=None)
order: Optional[str] = Field(default=None)
class FunnelSearchPayloadSchema(SessionsSearchPayloadSchema):
range_value: Optional[str] = Field(None)
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
class Config:
alias_generator = attribute_to_camel_case
class FunnelSchema(BaseModel):
name: str = Field(...)
filter: FunnelSearchPayloadSchema = Field([])
is_public: bool = Field(False)
class Config:
alias_generator = attribute_to_camel_case
class UpdateFunnelSchema(FunnelSchema):
name: Optional[str] = Field(None)
filter: Optional[FunnelSearchPayloadSchema] = Field(None)
is_public: Optional[bool] = Field(None)
class FunnelInsightsPayloadSchema(SessionsSearchPayloadSchema):
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
class MetricPayloadSchema(BaseModel):
startTimestamp: int = Field(TimeUTC.now(delta_days=-1))
endTimestamp: int = Field(TimeUTC.now())
density: int = Field(7)
filters: List[dict] = Field([])
type: Optional[str] = Field(None)
class Config:
alias_generator = attribute_to_camel_case
class AssistSearchPayloadSchema(BaseModel):
filters: List[dict] = Field([])
class SentrySchema(BaseModel):
projectSlug: str = Field(...)
organizationSlug: str = Field(...)
token: str = Field(...)
class MobileSignPayloadSchema(BaseModel):
keys: List[str] = Field(...)
class CustomMetricSeriesFilterSchema(SessionsSearchPayloadSchema):
startDate: Optional[int] = Field(None)
endDate: Optional[int] = Field(None)
sort: Optional[str] = Field(None)
order: Optional[str] = Field(None)
class CustomMetricCreateSeriesSchema(BaseModel):
name: Optional[str] = Field(None)
index: Optional[int] = Field(None)
filter: Optional[CustomMetricSeriesFilterSchema] = Field([])
class CreateCustomMetricsSchema(BaseModel):
name: str = Field(...)
series: List[CustomMetricCreateSeriesSchema] = Field(..., min_items=1)
is_public: Optional[bool] = Field(False)
class Config:
alias_generator = attribute_to_camel_case
class MetricViewType(str, Enum):
line_chart = "lineChart"
progress = "progress"
class CustomMetricChartPayloadSchema(BaseModel):
startDate: int = Field(TimeUTC.now(-7))
endDate: int = Field(TimeUTC.now())
density: int = Field(7)
viewType: MetricViewType = Field(MetricViewType.line_chart)
class Config:
alias_generator = attribute_to_camel_case
class CustomMetricChartPayloadSchema2(CustomMetricChartPayloadSchema):
metric_id: int = Field(...)
class TryCustomMetricsSchema(CreateCustomMetricsSchema, CustomMetricChartPayloadSchema):
name: Optional[str] = Field(None)
class CustomMetricUpdateSeriesSchema(CustomMetricCreateSeriesSchema):
series_id: Optional[int] = Field(None)
class Config:
alias_generator = attribute_to_camel_case
class UpdateCustomMetricsSchema(CreateCustomMetricsSchema):
series: List[CustomMetricUpdateSeriesSchema] = Field(..., min_items=1)
class SavedSearchSchema(FunnelSchema):
pass

View file

@ -1,5 +1,7 @@
package types
import "log"
type Project struct {
ProjectID uint32
ProjectKey string
@ -19,6 +21,10 @@ type Project struct {
func (p *Project) GetMetadataNo(key string) uint {
if p == nil {
log.Printf("GetMetadataNo: Project is nil")
return 0
}
if p.Metadata1 != nil && *(p.Metadata1) == key {
return 1
}

View file

@ -43,7 +43,8 @@ func isCachable(rawurl string) bool {
ext == ".woff" ||
ext == ".woff2" ||
ext == ".ttf" ||
ext == ".otf"
ext == ".otf" ||
ext == ".eot"
}
func GetFullCachableURL(baseURL string, relativeURL string) (string, bool) {

View file

@ -1,86 +0,0 @@
package main
import (
"database/sql"
"log"
"os"
"os/signal"
"syscall"
"time"
"openreplay/backend/pkg/db/postgres"
"openreplay/backend/pkg/env"
_ "github.com/lib/pq"
)
func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
POSTGRES_STRING := env.String("POSTGRES_STRING")
NOTIFICATIONS_STRING := env.String("ALERT_NOTIFICATION_STRING")
log.Printf("Notifications: %s \nPG: %s\n", NOTIFICATIONS_STRING, POSTGRES_STRING)
pg := postgres.NewConn(POSTGRES_STRING)
defer pg.Close()
pgs, err := sql.Open("postgres", POSTGRES_STRING+ "?sslmode=disable")
if err != nil {
log.Fatal(err)
}
defer pgs.Close()
manager := NewManager(NOTIFICATIONS_STRING, POSTGRES_STRING, pgs, pg)
if err := pg.IterateAlerts(func(a *postgres.Alert, err error) {
if err != nil {
log.Printf("Postgres error: %v\n", err)
return
}
log.Printf("Alert initialization: %+v\n", *a)
//log.Printf("CreatedAt: %s\n", *a.CreatedAt)
err = manager.Update(a)
if err != nil {
log.Printf("Alert parse error: %v | Alert: %+v\n", err, *a)
return
}
}); err != nil {
log.Fatalf("Postgres error: %v\n", err)
}
listener, err := postgres.NewAlertsListener(POSTGRES_STRING)
if err != nil {
log.Fatalf("Postgres listener error: %v\n", err)
}
defer listener.Close()
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)
tickAlert := time.Tick(1 * time.Minute)
log.Printf("Alert service started\n")
manager.RequestAll()
//return
for {
select {
case sig := <-sigchan:
log.Printf("Caught signal %v: terminating\n", sig)
listener.Close()
pg.Close()
os.Exit(0)
case <-tickAlert:
log.Printf("Requesting all...%d alerts\n", manager.Length())
manager.RequestAll()
case iPointer := <-listener.Alerts:
log.Printf("Alert update: %+v\n", *iPointer)
//log.Printf("CreatedAt: %s\n", *iPointer.CreatedAt)
//log.Printf("Notification received for AlertId: %d\n", iPointer.AlertID)
err := manager.Update(iPointer)
if err != nil {
log.Printf("Alert parse error: %+v | Alert: %v\n", err, *iPointer)
}
case err := <-listener.Errors:
log.Printf("listener error: %v\n", err)
if err.Error() == "conn closed" {
panic("Listener conn lost")
}
}
}
}

View file

@ -1,171 +0,0 @@
package main
import (
"database/sql"
"fmt"
"log"
"sync"
"sync/atomic"
"time"
"openreplay/backend/pkg/db/postgres"
)
const PGParallelLimit = 2
var pgCount int64
type manager struct {
postgresString string
notificationsUrl string
alertsCache map[uint32]*postgres.Alert
cacheMutex sync.Mutex
pgParallel chan bool
pgs *sql.DB
pg *postgres.Conn
pgMutex sync.Mutex
notifications map[uint32]*postgres.TenantNotification
notificationsGo *sync.WaitGroup
notificationsMutex sync.Mutex
}
func NewManager(notificationsUrl string, postgresString string, pgs *sql.DB, pg *postgres.Conn) *manager {
return &manager{
postgresString: postgresString,
notificationsUrl: notificationsUrl,
alertsCache: make(map[uint32]*postgres.Alert),
cacheMutex: sync.Mutex{},
pgParallel: make(chan bool, PGParallelLimit),
pgs: pgs,
pg: pg,
pgMutex: sync.Mutex{},
notifications: make(map[uint32]*postgres.TenantNotification),
notificationsGo: &sync.WaitGroup{},
notificationsMutex: sync.Mutex{},
}
}
func (m *manager) Length() int {
return len(m.alertsCache)
}
func (m *manager) Update(a *postgres.Alert) error {
m.cacheMutex.Lock()
defer m.cacheMutex.Unlock()
_, exists := m.alertsCache[a.AlertID]
if exists && a.DeletedAt != nil {
log.Println("deleting alert from memory")
delete(m.alertsCache, a.AlertID)
return nil
} else {
m.alertsCache[a.AlertID] = a
}
return nil
}
func (m *manager) processAlert(a *postgres.Alert) {
defer func() {
defer m.notificationsGo.Done()
<-m.pgParallel
}()
if !a.CanCheck() {
log.Printf("cannot check %s", a.Name)
return
}
//log.Printf("checking %+v", a)
log.Printf("quering %s", a.Name)
//--- For stats:
atomic.AddInt64(&pgCount, 1)
q, err := a.Build()
if err != nil {
log.Println(err)
return
}
rows, err := q.RunWith(m.pgs).Query()
if err != nil {
log.Println(err)
return
}
defer rows.Close()
for rows.Next() {
var (
value sql.NullFloat64
valid bool
)
if err := rows.Scan(&value, &valid); err != nil {
log.Println(err)
continue
}
if valid && value.Valid {
log.Printf("%s: valid", a.Name)
m.notificationsMutex.Lock()
m.notifications[a.AlertID] = &postgres.TenantNotification{
TenantId: a.TenantId,
Title: a.Name,
Description: fmt.Sprintf("has been triggered, %s = %.0f (%s %.0f).", a.Query.Left, value.Float64, a.Query.Operator, a.Query.Right),
ButtonText: "Check metrics for more details",
ButtonUrl: fmt.Sprintf("/%d/metrics", a.ProjectID),
ImageUrl: nil,
Options: map[string]interface{}{"source": "ALERT", "sourceId": a.AlertID, "sourceMeta": a.DetectionMethod, "message": a.Options.Message, "projectId": a.ProjectID, "data": map[string]interface{}{"title": a.Name, "limitValue": a.Query.Right, "actualValue": value.Float64, "operator": a.Query.Operator, "trigger": a.Query.Left, "alertId": a.AlertID, "detectionMethod": a.DetectionMethod, "currentPeriod": a.Options.CurrentPeriod, "previousPeriod": a.Options.PreviousPeriod, "createdAt": time.Now().Unix() * 1000}},
}
m.notificationsMutex.Unlock()
}
}
}
func (m *manager) RequestAll() {
now := time.Now().Unix()
m.cacheMutex.Lock()
for _, a := range m.alertsCache {
m.pgParallel <- true
m.notificationsGo.Add(1)
go m.processAlert(a)
//m.processAlert(a)
}
//log.Println("releasing cache")
m.cacheMutex.Unlock()
//log.Println("waiting for all alerts to finish")
m.notificationsGo.Wait()
log.Printf("done %d PG queries in: %ds", pgCount, time.Now().Unix()-now)
pgCount = 0
//log.Printf("Processing %d Notifications", len(m.notifications))
m.notificationsMutex.Lock()
go m.ProcessNotifications(m.notifications)
m.notificationsMutex.Unlock()
m.notifications = make(map[uint32]*postgres.TenantNotification)
//log.Printf("Notifications purged: %d", len(m.notifications))
}
func (m *manager) ProcessNotifications(allNotifications map[uint32]*postgres.TenantNotification) {
if len(allNotifications) == 0 {
log.Println("No notifications to process")
return
}
log.Printf("sending %d notifications", len(allNotifications))
allIds := make([]uint32, 0, len(allNotifications))
toSend := postgres.Notifications{
Notifications: []*postgres.TenantNotification{},
}
for k, n := range allNotifications {
//log.Printf("notification for %d", k)
allIds = append(allIds, k)
toSend.Notifications = append(toSend.Notifications, n)
}
toSend.Send(m.notificationsUrl)
if err := m.pg.SaveLastNotification(allIds); err != nil {
log.Printf("Error saving LastNotification time: %v", err)
if err.Error() == "conn closed" {
m.pg = postgres.NewConn(m.postgresString)
//if err != nil {
// panic(fmt.Sprintf("Postgres renew notifications connection error: %v\n", err))
//}
if err := m.pg.SaveLastNotification(allIds); err != nil {
panic(fmt.Sprintf("Error saving LastNotification time, suicide: %v", err))
}
}
}
}

View file

@ -3,6 +3,7 @@ package builder
import (
"net/url"
"strings"
"time"
"openreplay/backend/pkg/intervals"
. "openreplay/backend/pkg/messages"
@ -42,6 +43,7 @@ func getResourceType(initiator string, URL string) string {
type builder struct {
readyMsgs []Message
timestamp uint64
lastProcessedTimestamp int64
peBuilder *pageEventBuilder
ptaBuilder *performanceTrackAggrBuilder
ieBuilder *inputEventBuilder
@ -112,6 +114,10 @@ func (b *builder) handleMessage(message Message, messageID uint64) {
if b.timestamp <= timestamp { // unnecessary? TODO: test and remove
b.timestamp = timestamp
}
b.lastProcessedTimestamp = time.Now().UnixNano()/1e6
// Might happen before the first timestamp.
switch msg := message.(type) {
case *SessionStart,
@ -294,6 +300,7 @@ func (b *builder) checkTimeouts(ts int64) bool {
}
lastTsGap := ts - int64(b.timestamp)
//b.lastProcessedTimestamp
//log.Printf("checking timeouts for sess %v: %v now, %v sesstime; gap %v",b.sid, ts, b.timestamp, lastTsGap)
if lastTsGap > intervals.EVENTS_SESSION_END_TIMEOUT {
if rm := b.ddDetector.Build(); rm != nil {

View file

@ -20,8 +20,13 @@ import (
func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
writer := NewWriter(env.Uint16("FS_ULIMIT"), env.String("FS_DIR"))
FS_DIR := env.String("FS_DIR");
if _, err := os.Stat(FS_DIR); os.IsNotExist(err) {
log.Fatalf("%v doesn't exist. %v", FS_DIR, err)
}
writer := NewWriter(env.Uint16("FS_ULIMIT"), FS_DIR)
count := 0
consumer := queue.NewMessageConsumer(

View file

@ -1,81 +0,0 @@
{
"version": "2.0",
"app_name": "parrot",
"environment_variables": {
},
"stages": {
"default-ee": {
"api_gateway_stage": "default-ee",
"manage_iam_role": false,
"iam_role_arn": "",
"autogen_policy": true,
"environment_variables": {
"isFOS": "false",
"isEE": "true",
"stage": "default-ee",
"jwt_issuer": "openreplay-default-ee",
"sentryURL": "",
"pg_host": "127.0.0.1",
"pg_port": "9202",
"pg_dbname": "app",
"pg_user": "",
"pg_password": "",
"ch_host": "",
"ch_port": "",
"alert_ntf": "http://127.0.0.1:8000/async/alerts/notifications/%s",
"email_signup": "http://127.0.0.1:8000/async/email_signup/%s",
"email_funnel": "http://127.0.0.1:8000/async/funnel/%s",
"email_plans": "http://127.0.0.1:8000/async/plans/%s",
"email_basic": "http://127.0.0.1:8000/async/basic/%s",
"assign_link": "http://127.0.0.1:8000/async/email_assignment",
"captcha_server": "",
"captcha_key": "",
"sessions_bucket": "mobs",
"sessions_region": "us-east-1",
"put_S3_TTL": "20",
"sourcemaps_reader": "http://utilities-openreplay.app.svc.cluster.local:9000/sourcemaps",
"sourcemaps_bucket": "sourcemaps",
"peers": "http://utilities-openreplay.app.svc.cluster.local:9000/assist/%s/peers",
"js_cache_bucket": "sessions-assets",
"async_Token": "",
"EMAIL_HOST": "",
"EMAIL_PORT": "587",
"EMAIL_USER": "",
"EMAIL_PASSWORD": "",
"EMAIL_USE_TLS": "true",
"EMAIL_USE_SSL": "false",
"EMAIL_SSL_KEY": "",
"EMAIL_SSL_CERT": "",
"EMAIL_FROM": "OpenReplay<do-not-reply@openreplay.com>",
"SITE_URL": "",
"announcement_url": "",
"jwt_secret": "SET A RANDOM STRING HERE",
"jwt_algorithm": "HS512",
"jwt_exp_delta_seconds": "2592000",
"S3_HOST": "",
"S3_KEY": "",
"S3_SECRET": "",
"LICENSE_KEY": "",
"SAML2_MD_URL": "",
"idp_entityId": "",
"idp_sso_url": "",
"idp_x509cert": "",
"idp_sls_url": "",
"idp_name": "",
"sso_exp_delta_seconds": "172800",
"sso_landing": "/login?jwt=%s",
"invitation_link": "/api/users/invitation?token=%s",
"change_password_link": "/reset-password?invitation=%s&&pass=%s",
"iosBucket": "openreplay-ios-images",
"version_number": "1.3.6",
"assist_secret": ""
},
"lambda_timeout": 150,
"lambda_memory_size": 400,
"subnet_ids": [
],
"security_group_ids": [
]
}
}
}

53
ee/api/.env.default Normal file
View file

@ -0,0 +1,53 @@
EMAIL_FROM=OpenReplay<do-not-reply@openreplay.com>
EMAIL_HOST=
EMAIL_PASSWORD=
EMAIL_PORT=587
EMAIL_SSL_CERT=
EMAIL_SSL_KEY=
EMAIL_USER=
EMAIL_USE_SSL=false
EMAIL_USE_TLS=true
LICENSE_KEY=
S3_HOST=
S3_KEY=
S3_SECRET=
SAML2_MD_URL=
SITE_URL=
alert_ntf=http://127.0.0.1:8000/async/alerts/notifications/%s
announcement_url=
assign_link=http://127.0.0.1:8000/async/email_assignment
async_Token=
captcha_key=
captcha_server=
ch_host=
ch_port=
change_password_link=/reset-password?invitation=%s&&pass=%s
email_basic=http://127.0.0.1:8000/async/basic/%s
email_plans=http://127.0.0.1:8000/async/plans/%s
email_signup=http://127.0.0.1:8000/async/email_signup/%s
idp_entityId=
idp_sls_url=
idp_sso_url=
idp_x509cert=
invitation_link=/api/users/invitation?token=%s
isEE=true
isFOS=false
js_cache_bucket=sessions-assets
jwt_algorithm=HS512
jwt_exp_delta_seconds=2592000
jwt_issuer=openreplay-default-ee
jwt_secret="SET A RANDOM STRING HERE"
peers=http://utilities-openreplay.app.svc.cluster.local:9000/assist/%s/peers
pg_dbname=app
pg_host=127.0.0.1
pg_password=
pg_port=9202
pg_user=
put_S3_TTL=20
sentryURL=
sessions_bucket=mobs
sessions_region=us-east-1
sourcemaps_bucket=sourcemaps
sourcemaps_reader=http://utilities-openreplay.app.svc.cluster.local:9000/sourcemaps
stage=default-ee
version_number=1.0.0

34
ee/api/.gitignore vendored
View file

@ -178,6 +178,7 @@ README/*
Pipfile
/chalicelib/core/alerts.py
/chalicelib/core/alerts_processor.py
/chalicelib/core/announcements.py
/chalicelib/blueprints/bp_app_api.py
/chalicelib/blueprints/bp_core.py
@ -186,6 +187,7 @@ Pipfile
/chalicelib/core/errors_favorite_viewed.py
/chalicelib/core/events.py
/chalicelib/core/events_ios.py
/chalicelib/core/funnels.py
/chalicelib/core/integration_base.py
/chalicelib/core/integration_base_issue.py
/chalicelib/core/integration_github.py
@ -204,13 +206,13 @@ Pipfile
/chalicelib/core/log_tool_sentry.py
/chalicelib/core/log_tool_stackdriver.py
/chalicelib/core/log_tool_sumologic.py
/chalicelib/core/metadata.py
/chalicelib/core/mobile.py
/chalicelib/core/sessions.py
/chalicelib/core/sessions_assignments.py
/chalicelib/core/sessions_favorite_viewed.py
/chalicelib/core/sessions_metas.py
/chalicelib/core/sessions_mobs.py
/chalicelib/core/sessions.py
/chalicelib/core/significance.py
/chalicelib/core/slack.py
/chalicelib/core/socket_ios.py
@ -235,12 +237,30 @@ Pipfile
/chalicelib/utils/smtp.py
/chalicelib/utils/strings.py
/chalicelib/utils/TimeUTC.py
/chalicelib/core/heatmaps.py
/chalicelib/blueprints/app/__init__.py
/routers/app/__init__.py
/routers/crons/__init__.py
/routers/subs/__init__.py
/routers/__init__.py
/chalicelib/core/assist.py
/auth/auth_apikey.py
/auth/auth_jwt.py
/chalicelib/blueprints/subs/bp_insights.py
/build.sh
/routers/core.py
/routers/crons/core_crons.py
/routers/subs/dashboard.py
/db_changes.sql
/Dockerfile.bundle
/entrypoint.bundle.sh
/entrypoint.sh
/env_handler.py
/chalicelib/core/heatmaps.py
/routers/subs/insights.py
/schemas.py
/chalicelib/blueprints/app/v1_api.py
/build.sh
/chalicelib/core/assist.py
/chalicelib/blueprints/app/__init__.py
/Dockerfile.bundle
/routers/app/v1_api.py
/chalicelib/core/custom_metrics.py
/chalicelib/core/performance_event.py
/chalicelib/core/saved_search.py
/app_alerts.py
/build_alerts.sh

View file

@ -1,10 +1,11 @@
FROM python:3.6-slim
FROM python:3.9.7-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/*
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt -t ./vendor --upgrade
RUN pip install chalice==1.22.2
RUN pip install -r requirements.txt
RUN mv .env.default .env
# Add Tini
# Startup daemon

19
ee/api/Dockerfile.alerts Normal file
View file

@ -0,0 +1,19 @@
FROM python:3.9.7-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/*
WORKDIR /work
COPY . .
RUN pip install -r requirements.txt
RUN mv .env.default .env && mv app_alerts.py app.py
ENV pg_minconn 2
# Add Tini
# Startup daemon
ENV TINI_VERSION v0.19.0
ARG envarg
ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh

View file

@ -1,129 +1,86 @@
import sentry_sdk
from chalice import Chalice, Response
from sentry_sdk import configure_scope
import logging
import queue
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from decouple import config
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
from starlette import status
from starlette.responses import StreamingResponse, JSONResponse
from chalicelib import _overrides
from chalicelib.blueprints import bp_authorizers
from chalicelib.blueprints import bp_core, bp_core_crons
from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons
from chalicelib.blueprints import bp_ee, bp_ee_crons, bp_saml
from chalicelib.blueprints.app import v1_api, v1_api_ee
from chalicelib.blueprints.subs import bp_dashboard
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils.helper import environ
from routers import core, core_dynamic, ee, saml
from routers.app import v1_api, v1_api_ee
from routers.crons import core_crons
from routers.crons import core_dynamic_crons
from routers.subs import dashboard
app = Chalice(app_name='parrot')
app.debug = not helper.is_production() or helper.is_local()
sentry_sdk.init(environ["sentryURL"])
# Monkey-patch print for DataDog hack
import sys
import traceback
old_tb = traceback.print_exception
old_f = sys.stdout
old_e = sys.stderr
OR_SESSION_TOKEN = None
class F:
def write(self, x):
if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local():
old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}")
else:
old_f.write(x)
def flush(self):
pass
def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True):
if OR_SESSION_TOKEN is not None and not helper.is_local():
value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value))
old_tb(etype, value, tb, limit, file, chain)
if helper.is_production():
traceback.print_exception = tb_print_exception
sys.stdout = F()
sys.stderr = F()
# ---End Monkey-patch
_overrides.chalice_app(app)
app = FastAPI()
@app.middleware('http')
def or_middleware(event, get_response):
async def or_middleware(request: Request, call_next):
from chalicelib.core import unlock
if not unlock.is_valid():
return Response(body={"errors": ["expired license"]}, status_code=403)
if "{projectid}" in event.path.lower():
from chalicelib.core import projects
if event.context["authorizer"].get("authorizer_identity") == "api_key" \
and not projects.is_authorized(
project_id=projects.get_internal_project_id(event.uri_params["projectId"]),
tenant_id=event.context["authorizer"]["tenantId"]) \
or event.context["authorizer"].get("authorizer_identity", "jwt") == "jwt" \
and not projects.is_authorized(project_id=event.uri_params["projectId"],
tenant_id=event.context["authorizer"]["tenantId"]):
print("unauthorized project")
pg_client.close()
return Response(body={"errors": ["unauthorized project"]}, status_code=401)
global OR_SESSION_TOKEN
OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid',
app.current_request.headers.get('vnd.asayer.io.sid'))
if "authorizer" in event.context and event.context["authorizer"] is None:
print("Deleted user!!")
pg_client.close()
return Response(body={"errors": ["Deleted user"]}, status_code=403)
return JSONResponse(content={"errors": ["expired license"]}, status_code=status.HTTP_403_FORBIDDEN)
global OR_SESSION_TOKEN
OR_SESSION_TOKEN = request.headers.get('vnd.openreplay.com.sid', request.headers.get('vnd.asayer.io.sid'))
try:
if helper.TRACK_TIME:
import time
now = int(time.time() * 1000)
response = get_response(event)
if response.status_code == 200 and response.body is not None and response.body.get("errors") is not None:
if "not found" in response.body["errors"][0]:
response = Response(status_code=404, body=response.body)
else:
response = Response(status_code=400, body=response.body)
if response.status_code // 100 == 5 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
with configure_scope() as scope:
scope.set_tag('stage', environ["stage"])
scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
scope.set_extra("context", event.context)
sentry_sdk.capture_exception(Exception(response.body))
response: StreamingResponse = await call_next(request)
if helper.TRACK_TIME:
print(f"Execution time: {int(time.time() * 1000) - now} ms")
except Exception as e:
if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
with configure_scope() as scope:
scope.set_tag('stage', environ["stage"])
scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
scope.set_extra("context", event.context)
sentry_sdk.capture_exception(e)
response = Response(body={"Code": "InternalServerError",
"Message": "An internal server error occurred [level=Fatal]."},
status_code=500)
pg_client.close()
raise e
pg_client.close()
return response
# Open source
app.register_blueprint(bp_authorizers.app)
app.register_blueprint(bp_core.app)
app.register_blueprint(bp_core_crons.app)
app.register_blueprint(bp_core_dynamic.app)
app.register_blueprint(bp_core_dynamic_crons.app)
app.register_blueprint(bp_dashboard.app)
app.register_blueprint(v1_api.app)
app.register_blueprint(v1_api_ee.app)
# Enterprise
app.register_blueprint(bp_ee.app)
app.register_blueprint(bp_ee_crons.app)
app.register_blueprint(bp_saml.app)
origins = [
"*",
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(core.public_app)
app.include_router(core.app)
app.include_router(core.app_apikey)
app.include_router(core_dynamic.public_app)
app.include_router(core_dynamic.app)
app.include_router(core_dynamic.app_apikey)
app.include_router(ee.public_app)
app.include_router(ee.app)
app.include_router(ee.app_apikey)
app.include_router(saml.public_app)
app.include_router(saml.app)
app.include_router(saml.app_apikey)
app.include_router(dashboard.app)
# app.include_router(insights.app)
app.include_router(v1_api.app_apikey)
app.include_router(v1_api_ee.app_apikey)
app.queue_system = queue.Queue()
app.schedule = AsyncIOScheduler()
app.schedule.start()
for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs:
app.schedule.add_job(id=job["func"].__name__, **job)
from chalicelib.core import traces
app.schedule.add_job(id="trace_worker", **traces.cron_jobs[0])
for job in app.schedule.get_jobs():
print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)})
logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO))
logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO))

0
ee/api/auth/__init__.py Normal file
View file

View file

@ -0,0 +1,27 @@
from fastapi import Request
from starlette import status
from starlette.exceptions import HTTPException
import schemas
from chalicelib.core import projects
from or_dependencies import OR_context
class ProjectAuthorizer:
def __init__(self, project_identifier):
self.project_identifier: str = project_identifier
async def __call__(self, request: Request) -> None:
if len(request.path_params.keys()) == 0 or request.path_params.get(self.project_identifier) is None:
return
current_user: schemas.CurrentContext = await OR_context(request)
project_identifier = request.path_params[self.project_identifier]
user_id = current_user.user_id if request.state.authorizer_identity == "jwt" else None
if (self.project_identifier == "projectId" \
and not projects.is_authorized(project_id=project_identifier, tenant_id=current_user.tenant_id,
user_id=user_id)) \
or (self.project_identifier.lower() == "projectKey" \
and not projects.is_authorized(project_id=projects.get_internal_project_id(project_identifier),
tenant_id=current_user.tenant_id, user_id=user_id)):
print("unauthorized project")
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="unauthorized project.")

View file

@ -1,104 +0,0 @@
from chalice import Chalice, CORSConfig
from chalicelib.blueprints import bp_authorizers
from chalicelib.core import authorizers
import sched
import threading
import time
from datetime import datetime
import pytz
from croniter import croniter
base_time = datetime.now(pytz.utc)
cors_config = CORSConfig(
allow_origin='*',
allow_headers=['vnd.openreplay.com.sid', 'vnd.asayer.io.sid'],
# max_age=600,
# expose_headers=['X-Special-Header'],
allow_credentials=True
)
def chalice_app(app):
def app_route(self, path, **kwargs):
kwargs.setdefault('cors', cors_config)
kwargs.setdefault('authorizer', bp_authorizers.jwt_authorizer)
handler_type = 'route'
name = kwargs.pop('name', None)
registration_kwargs = {'path': path, 'kwargs': kwargs, 'authorizer': kwargs.get("authorizer")}
def _register_handler(user_handler):
handler_name = name
if handler_name is None:
handler_name = user_handler.__name__
if registration_kwargs is not None:
kwargs = registration_kwargs
else:
kwargs = {}
if kwargs['authorizer'] == bp_authorizers.jwt_authorizer \
or kwargs['authorizer'] == bp_authorizers.api_key_authorizer:
def _user_handler(context=None, **args):
if context is not None:
args['context'] = context
else:
authorizer_context = app.current_request.context['authorizer']
if kwargs['authorizer'] == bp_authorizers.jwt_authorizer:
args['context'] = authorizers.jwt_context(authorizer_context)
else:
args['context'] = authorizer_context
return user_handler(**args)
wrapped = self._wrap_handler(handler_type, handler_name, _user_handler)
self._register_handler(handler_type, handler_name, _user_handler, wrapped, kwargs)
else:
wrapped = self._wrap_handler(handler_type, handler_name, user_handler)
self._register_handler(handler_type, handler_name, user_handler, wrapped, kwargs)
return wrapped
return _register_handler
app.route = app_route.__get__(app, Chalice)
def app_schedule(self, expression, name=None, description=''):
handler_type = 'schedule'
registration_kwargs = {'expression': expression,
'description': description}
def _register_handler(user_handler):
handler_name = name
if handler_name is None:
handler_name = user_handler.__name__
kwargs = registration_kwargs
cron_expression = kwargs["expression"].to_string()[len("cron("):-1]
if len(cron_expression.split(" ")) > 5:
cron_expression = " ".join(cron_expression.split(" ")[:-1])
cron_expression = cron_expression.replace("?", "*")
cron_shell(user_handler, cron_expression)
wrapped = self._wrap_handler(handler_type, handler_name, user_handler)
self._register_handler(handler_type, handler_name, user_handler, wrapped, kwargs)
return wrapped
return _register_handler
app.schedule = app_schedule.__get__(app, Chalice)
def spawn(function, args):
th = threading.Thread(target=function, kwargs=args)
th.setDaemon(True)
th.start()
def cron_shell(function, cron_expression):
def to_start():
scheduler = sched.scheduler(time.time, time.sleep)
citer = croniter(cron_expression, base_time)
while True:
next_execution = citer.get_next(datetime)
print(f"{function.__name__} next execution: {next_execution}")
scheduler.enterabs(next_execution.timestamp(), 1, function, argument=(None,))
scheduler.run()
print(f"{function.__name__} executed: {next_execution}")
spawn(to_start, None)

View file

@ -1,16 +0,0 @@
from chalice import Blueprint
from chalicelib import _overrides
from chalicelib.blueprints import bp_authorizers
from chalicelib.utils import assist_helper
app = Blueprint(__name__)
_overrides.chalice_app(app)
@app.route('/v1/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer)
def get_assist_credentials(context):
credentials = assist_helper.get_temporary_credentials()
if "errors" in credentials:
return credentials
return {"data": credentials}

View file

@ -1,38 +0,0 @@
from chalice import Blueprint, AuthResponse
from chalicelib.utils import helper
from chalicelib.core import authorizers
from chalicelib.core import users
app = Blueprint(__name__)
@app.authorizer()
def api_key_authorizer(auth_request):
r = authorizers.api_key_authorizer(auth_request.token)
if r is None:
return AuthResponse(routes=[], principal_id=None)
r["authorizer_identity"] = "api_key"
print(r)
return AuthResponse(
routes=['*'],
principal_id=r['tenantId'],
context=r
)
@app.authorizer(ttl_seconds=60)
def jwt_authorizer(auth_request):
jwt_payload = authorizers.jwt_authorizer(auth_request.token)
if jwt_payload is None \
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"],
jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]):
return AuthResponse(routes=[], principal_id=None)
jwt_payload["authorizer_identity"] = "jwt"
print(jwt_payload)
return AuthResponse(
routes=['*'],
principal_id=jwt_payload['userId'],
context=jwt_payload
)

View file

@ -1,470 +0,0 @@
from chalice import Blueprint, Response
from chalicelib import _overrides
from chalicelib.core import assist
from chalicelib.core import boarding
from chalicelib.core import errors
from chalicelib.core import license
from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integrations_manager
from chalicelib.core import notifications
from chalicelib.core import projects
from chalicelib.core import signup
from chalicelib.core import tenants
from chalicelib.core import users
from chalicelib.core import webhook
from chalicelib.core.collaboration_slack import Slack
from chalicelib.utils import captcha, SAML2_helper
from chalicelib.utils import helper
from chalicelib.utils.helper import environ
app = Blueprint(__name__)
_overrides.chalice_app(app)
@app.route('/login', methods=['POST'], authorizer=None)
def login():
data = app.current_request.json_body
if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]):
return {"errors": ["Invalid captcha."]}
r = users.authenticate(data['email'], data['password'], for_plugin=False)
if r is None:
return Response(status_code=401, body={
'errors': ['Youve entered invalid Email or Password.']
})
elif "errors" in r:
return r
tenant_id = r.pop("tenantId")
# change this in open-source
r = {**r,
"limits": {
"teamMember": int(environ.get("numberOfSeats", 0)),
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)},
**license.get_status(tenant_id),
"smtp": environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0,
"saml2": SAML2_helper.is_saml2_available(),
"iceServers": assist.get_ice_servers()
}
c = tenants.get_by_tenant_id(tenant_id)
c.pop("createdAt")
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
stack_integrations=True, version=True)
return {
'jwt': r.pop('jwt'),
'data': {
"user": r,
"client": c
}
}
@app.route('/account', methods=['GET'])
def get_account(context):
r = users.get(tenant_id=context['tenantId'], user_id=context['userId'])
return {
'data': {
**r,
"limits": {
"teamMember": int(environ.get("numberOfSeats", 0)),
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(context['tenantId'])
},
**license.get_status(context["tenantId"]),
"smtp": environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0,
"saml2": SAML2_helper.is_saml2_available(),
"iceServers": assist.get_ice_servers()
}
}
@app.route('/projects', methods=['GET'])
def get_projects(context):
return {"data": projects.get_projects(tenant_id=context["tenantId"], recording_state=True, gdpr=True, recorded=True,
stack_integrations=True, version=True)}
@app.route('/projects', methods=['POST', 'PUT'])
def create_project(context):
data = app.current_request.json_body
return projects.create(tenant_id=context["tenantId"], user_id=context["userId"], data=data)
@app.route('/projects/{projectId}', methods=['POST', 'PUT'])
def create_edit_project(projectId, context):
data = app.current_request.json_body
return projects.edit(tenant_id=context["tenantId"], user_id=context["userId"], data=data, project_id=projectId)
@app.route('/projects/{projectId}', methods=['GET'])
def get_project(projectId, context):
data = projects.get_project(tenant_id=context["tenantId"], project_id=projectId, include_last_session=True,
include_gdpr=True)
if data is None:
return {"errors": ["project not found"]}
return {"data": data}
@app.route('/projects/{projectId}', methods=['DELETE'])
def delete_project(projectId, context):
return projects.delete(tenant_id=context["tenantId"], user_id=context["userId"], project_id=projectId)
@app.route('/projects/limit', methods=['GET'])
def get_projects_limit(context):
return {"data": {
"current": projects.count_by_tenant(tenant_id=context["tenantId"]),
"remaining": -1 # change this in open-source
}}
@app.route('/client', methods=['GET'])
def get_client(context):
r = tenants.get_by_tenant_id(context['tenantId'])
if r is not None:
r.pop("createdAt")
r["projects"] = projects.get_projects(tenant_id=context['tenantId'], recording_state=True, recorded=True,
stack_integrations=True, version=True)
return {
'data': r
}
@app.route('/client/new_api_key', methods=['GET'])
def generate_new_tenant_token(context):
return {
'data': tenants.generate_new_api_key(context['tenantId'])
}
@app.route('/client', methods=['PUT', 'POST'])
def put_client(context):
data = app.current_request.json_body
return tenants.update(tenant_id=context["tenantId"], user_id=context["userId"], data=data)
@app.route('/signup', methods=['GET'], authorizer=None)
def get_all_signup():
return {"data": {"tenants": tenants.tenants_exists(),
"sso": SAML2_helper.is_saml2_available(),
"ssoProvider": SAML2_helper.get_saml2_provider(),
"edition": helper.get_edition()}}
@app.route('/signup', methods=['POST', 'PUT'], authorizer=None)
def signup_handler():
data = app.current_request.json_body
return signup.create_step1(data)
@app.route('/integrations/slack', methods=['POST', 'PUT'])
def add_slack_client(context):
data = app.current_request.json_body
if "url" not in data or "name" not in data:
return {"errors": ["please provide a url and a name"]}
n = Slack.add_channel(tenant_id=context["tenantId"], url=data["url"], name=data["name"])
if n is None:
return {
"errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
}
return {"data": n}
@app.route('/integrations/slack/{integrationId}', methods=['POST', 'PUT'])
def edit_slack_integration(integrationId, context):
data = app.current_request.json_body
if data.get("url") and len(data["url"]) > 0:
old = webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId)
if old["endpoint"] != data["url"]:
if not Slack.say_hello(data["url"]):
return {
"errors": [
"We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
}
return {"data": webhook.update(tenant_id=context["tenantId"], webhook_id=integrationId,
changes={"name": data.get("name", ""), "endpoint": data["url"]})}
@app.route('/{projectId}/errors/search', methods=['POST'])
def errors_search(projectId, context):
data = app.current_request.json_body
params = app.current_request.query_params
if params is None:
params = {}
return errors.search(data, projectId, user_id=context["userId"], status=params.get("status", "ALL"),
favorite_only="favorite" in params)
@app.route('/{projectId}/errors/stats', methods=['GET'])
def errors_stats(projectId, context):
params = app.current_request.query_params
if params is None:
params = {}
return errors.stats(projectId, user_id=context["userId"], **params)
@app.route('/{projectId}/errors/{errorId}', methods=['GET'])
def errors_get_details(projectId, errorId, context):
params = app.current_request.query_params
if params is None:
params = {}
data = errors.get_details(project_id=projectId, user_id=context["userId"], error_id=errorId, **params)
if data.get("data") is not None:
errors_favorite_viewed.viewed_error(project_id=projectId, user_id=context['userId'], error_id=errorId)
return data
@app.route('/{projectId}/errors/{errorId}/stats', methods=['GET'])
def errors_get_details_right_column(projectId, errorId, context):
params = app.current_request.query_params
if params is None:
params = {}
data = errors.get_details_chart(project_id=projectId, user_id=context["userId"], error_id=errorId, **params)
return data
@app.route('/{projectId}/errors/{errorId}/sourcemaps', methods=['GET'])
def errors_get_details_sourcemaps(projectId, errorId, context):
data = errors.get_trace(project_id=projectId, error_id=errorId)
if "errors" in data:
return data
return {
'data': data
}
@app.route('/async/alerts/notifications/{step}', methods=['POST', 'PUT'], authorizer=None)
def send_alerts_notification_async(step):
data = app.current_request.json_body
if data.pop("auth") != environ["async_Token"]:
return {"errors": ["missing auth"]}
if step == "slack":
slack.send_batch(notifications_list=data.get("notifications"))
elif step == "email":
alerts.send_by_email_batch(notifications_list=data.get("notifications"))
elif step == "webhook":
webhook.trigger_batch(data_list=data.get("notifications"))
@app.route('/notifications', methods=['GET'])
def get_notifications(context):
return {"data": notifications.get_all(tenant_id=context['tenantId'], user_id=context['userId'])}
@app.route('/notifications/{notificationId}/view', methods=['GET'])
def view_notifications(notificationId, context):
return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context['userId'])}
@app.route('/notifications/view', methods=['POST', 'PUT'])
def batch_view_notifications(context):
data = app.current_request.json_body
return {"data": notifications.view_notification(notification_ids=data.get("ids", []),
startTimestamp=data.get("startTimestamp"),
endTimestamp=data.get("endTimestamp"),
user_id=context['userId'],
tenant_id=context["tenantId"])}
@app.route('/notifications', methods=['POST', 'PUT'], authorizer=None)
def create_notifications():
data = app.current_request.json_body
if data.get("token", "") != "nF46JdQqAM5v9KI9lPMpcu8o9xiJGvNNWOGL7TJP":
return {"errors": ["missing token"]}
return notifications.create(data.get("notifications", []))
@app.route('/boarding', methods=['GET'])
def get_boarding_state(context):
return {"data": boarding.get_state(tenant_id=context["tenantId"])}
@app.route('/boarding/installing', methods=['GET'])
def get_boarding_state_installing(context):
return {"data": boarding.get_state_installing(tenant_id=context["tenantId"])}
@app.route('/boarding/identify-users', methods=['GET'])
def get_boarding_state_identify_users(context):
return {"data": boarding.get_state_identify_users(tenant_id=context["tenantId"])}
@app.route('/boarding/manage-users', methods=['GET'])
def get_boarding_state_manage_users(context):
return {"data": boarding.get_state_manage_users(tenant_id=context["tenantId"])}
@app.route('/boarding/integrations', methods=['GET'])
def get_boarding_state_integrations(context):
return {"data": boarding.get_state_integrations(tenant_id=context["tenantId"])}
# this endpoint supports both jira & github based on `provider` attribute
@app.route('/integrations/issues', methods=['POST', 'PUT'])
def add_edit_jira_cloud_github(context):
data = app.current_request.json_body
provider = data.get("provider", "").upper()
error, integration = integrations_manager.get_integration(tool=provider, tenant_id=context["tenantId"],
user_id=context["userId"])
if error is not None:
return error
return {"data": integration.add_edit(data=data)}
@app.route('/integrations/slack/{integrationId}', methods=['GET'])
def get_slack_webhook(integrationId, context):
return {"data": webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId)}
@app.route('/integrations/slack/channels', methods=['GET'])
def get_slack_integration(context):
return {"data": webhook.get_by_type(tenant_id=context["tenantId"], webhook_type='slack')}
@app.route('/integrations/slack/{integrationId}', methods=['DELETE'])
def delete_slack_integration(integrationId, context):
return webhook.delete(context["tenantId"], integrationId)
@app.route('/webhooks', methods=['POST', 'PUT'])
def add_edit_webhook(context):
data = app.current_request.json_body
return {"data": webhook.add_edit(tenant_id=context["tenantId"], data=data, replace_none=True)}
@app.route('/webhooks', methods=['GET'])
def get_webhooks(context):
return {"data": webhook.get_by_tenant(tenant_id=context["tenantId"], replace_none=True)}
@app.route('/webhooks/{webhookId}', methods=['DELETE'])
def delete_webhook(webhookId, context):
return {"data": webhook.delete(tenant_id=context["tenantId"], webhook_id=webhookId)}
@app.route('/client/members', methods=['GET'])
def get_members(context):
return {"data": users.get_members(tenant_id=context['tenantId'])}
@app.route('/client/members', methods=['PUT', 'POST'])
def add_member(context):
# if SAML2_helper.is_saml2_available():
# return {"errors": ["please use your SSO server to add teammates"]}
data = app.current_request.json_body
return users.create_member(tenant_id=context['tenantId'], user_id=context['userId'], data=data)
@app.route('/users/invitation', methods=['GET'], authorizer=None)
def process_invitation_link():
params = app.current_request.query_params
if params is None or len(params.get("token", "")) < 64:
return {"errors": ["please provide a valid invitation"]}
user = users.get_by_invitation_token(params["token"])
if user is None:
return {"errors": ["invitation not found"]}
if user["expiredInvitation"]:
return {"errors": ["expired invitation, please ask your admin to send a new one"]}
if user["expiredChange"] is not None and not user["expiredChange"] \
and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60:
pass_token = user["changePwdToken"]
else:
pass_token = users.allow_password_change(user_id=user["userId"])
return Response(
status_code=307,
body='',
headers={'Location': environ["SITE_URL"] + environ["change_password_link"] % (params["token"], pass_token),
'Content-Type': 'text/plain'})
@app.route('/password/reset', methods=['POST', 'PUT'], authorizer=None)
def change_password_by_invitation():
data = app.current_request.json_body
if data is None or len(data.get("invitation", "")) < 64 or len(data.get("pass", "")) < 8:
return {"errors": ["please provide a valid invitation & pass"]}
user = users.get_by_invitation_token(token=data["invitation"], pass_token=data["pass"])
if user is None:
return {"errors": ["invitation not found"]}
if user["expiredChange"]:
return {"errors": ["expired change, please re-use the invitation link"]}
return users.set_password_invitation(new_password=data["password"], user_id=user["userId"],
tenant_id=user["tenantId"])
@app.route('/client/members/{memberId}', methods=['PUT', 'POST'])
def edit_member(memberId, context):
data = app.current_request.json_body
return users.edit(tenant_id=context['tenantId'], editor_id=context['userId'], changes=data,
user_id_to_update=memberId)
@app.route('/client/members/{memberId}/reset', methods=['GET'])
def reset_reinvite_member(memberId, context):
return users.reset_member(tenant_id=context['tenantId'], editor_id=context['userId'], user_id_to_update=memberId)
@app.route('/client/members/{memberId}', methods=['DELETE'])
def delete_member(memberId, context):
return users.delete_member(tenant_id=context["tenantId"], user_id=context['userId'], id_to_delete=memberId)
@app.route('/account/new_api_key', methods=['GET'])
def generate_new_user_token(context):
return {"data": users.generate_new_api_key(user_id=context['userId'])}
@app.route('/account', methods=['POST', 'PUT'])
def edit_account(context):
data = app.current_request.json_body
return users.edit(tenant_id=context['tenantId'], user_id_to_update=context['userId'], changes=data,
editor_id=context['userId'])
@app.route('/account/password', methods=['PUT', 'POST'])
def change_client_password(context):
data = app.current_request.json_body
return users.change_password(email=context['email'], old_password=data["oldPassword"],
new_password=data["newPassword"], tenant_id=context["tenantId"],
user_id=context["userId"])
@app.route('/metadata/session_search', methods=['GET'])
def search_sessions_by_metadata(context):
params = app.current_request.query_params
if params is None:
return {"errors": ["please provide a key&value for search"]}
value = params.get('value', '')
key = params.get('key', '')
project_id = params.get('projectId')
if project_id is not None \
and not projects.is_authorized(project_id=project_id, tenant_id=context["tenantId"]):
return {"errors": ["unauthorized project"]}
if len(value) == 0 and len(key) == 0:
return {"errors": ["please provide a key&value for search"]}
if len(value) == 0:
return {"errors": ["please provide a value for search"]}
if len(key) == 0:
return {"errors": ["please provide a key for search"]}
return {
"data": sessions.search_by_metadata(tenant_id=context["tenantId"], user_id=context["userId"], m_value=value,
m_key=key,
project_id=project_id)}
@app.route('/plans', methods=['GET'])
def get_current_plan(context):
return {
"data": license.get_status(context["tenantId"])
}
@app.route('/alerts/notifications', methods=['POST', 'PUT'], authorizer=None)
def send_alerts_notifications():
data = app.current_request.json_body
return {"data": alerts.process_notifications(data.get("notifications", []))}

Some files were not shown because too many files have changed in this diff Show more