API
1
.gitignore
vendored
|
|
@ -3,3 +3,4 @@ public
|
|||
node_modules
|
||||
*DS_Store
|
||||
*.env
|
||||
.idea
|
||||
72
api/.chalice/config.json
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
{
|
||||
"version": "2.0",
|
||||
"app_name": "parrot",
|
||||
"environment_variables": {
|
||||
"version": "0.0.0"
|
||||
},
|
||||
"stages": {
|
||||
"default-foss": {
|
||||
"api_gateway_stage": "default-fos",
|
||||
"manage_iam_role": false,
|
||||
"iam_role_arn": "",
|
||||
"autogen_policy": true,
|
||||
"environment_variables": {
|
||||
"isFOS": "true",
|
||||
"isEE": "false",
|
||||
"stage": "default-fos",
|
||||
"jwt_issuer": "asayer-default-fos",
|
||||
"allowCron": "true",
|
||||
"sentry": "false",
|
||||
"sentryURL": "",
|
||||
"pg_host": "",
|
||||
"pg_port": "5432",
|
||||
"pg_dbname": "",
|
||||
"pg_user": "",
|
||||
"pg_password": "",
|
||||
"alert_ntf": "http://127.0.0.1:8000/async/alerts/notifications/%s",
|
||||
"email_signup": "http://127.0.0.1:8000/async/email_signup/%s",
|
||||
"email_funnel": "http://127.0.0.1:8000/async/funnel/%s",
|
||||
"email_basic": "http://127.0.0.1:8000/async/basic/%s",
|
||||
"assign_link": "http://127.0.0.1:8000/async/email_assignment",
|
||||
"verification_link": "http://127.0.0.1:8000/email/validate/",
|
||||
"verification_success_redirect": "",
|
||||
"verification_fail_redirect": "",
|
||||
"captcha_server": "",
|
||||
"captcha_key": "",
|
||||
"jira_api": "http://127.0.0.1:3000/dev",
|
||||
"github_api": "http://127.0.0.1:4000/dev",
|
||||
"sessions_bucket": "",
|
||||
"sessions_region": "",
|
||||
"stack_reader": "",
|
||||
"put_S3_TTL": "20",
|
||||
"sourcemaps_bucket": "",
|
||||
"sourcemaps_bucket_key": "",
|
||||
"sourcemaps_bucket_secret": "",
|
||||
"sourcemaps_bucket_region": "",
|
||||
"js_cache_bucket": "",
|
||||
"web_mobs": "https://mobs-staging.asayer.io",
|
||||
"async_Token": "",
|
||||
"EMAIL_HOST": "",
|
||||
"EMAIL_PORT": "587",
|
||||
"EMAIL_USER": "",
|
||||
"EMAIL_PASSWORD": "",
|
||||
"EMAIL_USE_TLS": "true",
|
||||
"EMAIL_USE_SSL": "false",
|
||||
"EMAIL_SSL_KEY": "",
|
||||
"EMAIL_SSL_CERT": "",
|
||||
"EMAIL_FROM": "OpenReplay<do-not-reply@openreplay.com>",
|
||||
"SITE_URL": "",
|
||||
"announcement_bucket": "",
|
||||
"jwt_secret": "",
|
||||
"jwt_algorithm": "HS512",
|
||||
"jwt_exp_delta_seconds": "2592000"
|
||||
},
|
||||
"lambda_timeout": 150,
|
||||
"lambda_memory_size": 400,
|
||||
"subnet_ids": [
|
||||
],
|
||||
"security_group_ids": [
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
176
api/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
# Created by .ignore support plugin (hsz.mobi)
|
||||
### JetBrains template
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
|
||||
# User-specific stuff:
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/dictionaries
|
||||
|
||||
# Sensitive or high-churn files:
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.xml
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/sqlDataSources.xml
|
||||
.idea/**/dynamic.xml
|
||||
.idea/**/uiDesigner.xml
|
||||
|
||||
# Gradle:
|
||||
.idea/**/gradle.xml
|
||||
.idea/**/libraries
|
||||
|
||||
# CMake
|
||||
cmake-build-debug/
|
||||
|
||||
# Mongo Explorer plugin:
|
||||
.idea/**/mongoSettings.xml
|
||||
|
||||
## File-based project format:
|
||||
*.iws
|
||||
|
||||
## Plugin-specific files:
|
||||
|
||||
# IntelliJ
|
||||
out/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Cursive Clojure plugin
|
||||
.idea/replstate.xml
|
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||
com_crashlytics_export_strings.xml
|
||||
crashlytics.properties
|
||||
crashlytics-build.properties
|
||||
fabric.properties
|
||||
### Example user template template
|
||||
### Example user template
|
||||
|
||||
# IntelliJ project files
|
||||
.idea
|
||||
*.iml
|
||||
out
|
||||
gen### Python template
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
.static_storage/
|
||||
.media/
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
/.chalice/deployments/
|
||||
|
||||
vendor
|
||||
|
||||
|
||||
logs*.txt
|
||||
*.csv
|
||||
|
||||
*.p
|
||||
*.js
|
||||
SUBNETS.json
|
||||
|
||||
./chalicelib/.configs
|
||||
16
api/Dockerfile
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
FROM python:3.6-slim
|
||||
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
|
||||
WORKDIR /work
|
||||
COPY . .
|
||||
RUN pip install -r requirements.txt -t ./vendor --upgrade
|
||||
RUN pip install chalice==1.22.2
|
||||
|
||||
# Add Tini
|
||||
# Startup daemon
|
||||
ENV TINI_VERSION v0.19.0
|
||||
ARG envarg
|
||||
ENV ENTERPRISE_BUILD ${envarg}
|
||||
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
|
||||
RUN chmod +x /tini
|
||||
ENTRYPOINT ["/tini", "--"]
|
||||
CMD chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD}
|
||||
96
api/app.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
import sentry_sdk
|
||||
from chalice import Chalice, Response
|
||||
from sentry_sdk import configure_scope
|
||||
|
||||
from chalicelib import _overrides
|
||||
from chalicelib.blueprints import bp_authorizers
|
||||
from chalicelib.blueprints import bp_core, bp_core_crons
|
||||
from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons
|
||||
from chalicelib.blueprints.subs import bp_dashboard
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils.helper import environ
|
||||
|
||||
app = Chalice(app_name='parrot')
|
||||
app.debug = not helper.is_production() or helper.is_local()
|
||||
|
||||
sentry_sdk.init(environ["sentryURL"])
|
||||
|
||||
# Monkey-patch print for DataDog hack
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
old_tb = traceback.print_exception
|
||||
old_f = sys.stdout
|
||||
old_e = sys.stderr
|
||||
ASAYER_SESSION_ID = None
|
||||
|
||||
|
||||
class F:
|
||||
def write(self, x):
|
||||
if ASAYER_SESSION_ID is not None and x != '\n' and not helper.is_local():
|
||||
old_f.write(f"[asayer_session_id={ASAYER_SESSION_ID}] {x}")
|
||||
else:
|
||||
old_f.write(x)
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
|
||||
def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True):
|
||||
if ASAYER_SESSION_ID is not None and not helper.is_local():
|
||||
# bugsnag.notify(Exception(str(value)), meta_data={"special_info": {"asayerSessionId": ASAYER_SESSION_ID}})
|
||||
value = type(value)(f"[asayer_session_id={ASAYER_SESSION_ID}] " + str(value))
|
||||
|
||||
old_tb(etype, value, tb, limit, file, chain)
|
||||
|
||||
|
||||
if helper.is_production():
|
||||
traceback.print_exception = tb_print_exception
|
||||
|
||||
sys.stdout = F()
|
||||
sys.stderr = F()
|
||||
# ---End Monkey-patch
|
||||
|
||||
|
||||
_overrides.chalice_app(app)
|
||||
|
||||
# v0505
|
||||
@app.middleware('http')
|
||||
def asayer_middleware(event, get_response):
|
||||
global ASAYER_SESSION_ID
|
||||
ASAYER_SESSION_ID = app.current_request.headers.get('vnd.asayer.io.sid')
|
||||
if "authorizer" in event.context and event.context["authorizer"] is None:
|
||||
print("Deleted user!!")
|
||||
pg_client.close()
|
||||
return Response(body={"errors": ["Deleted user"]}, status_code=403)
|
||||
|
||||
try:
|
||||
if helper.TRACK_TIME:
|
||||
import time
|
||||
now = int(time.time() * 1000)
|
||||
response = get_response(event)
|
||||
if helper.TRACK_TIME:
|
||||
print(f"Execution time: {int(time.time() * 1000) - now} ms")
|
||||
except Exception as e:
|
||||
print("middleware exception handling")
|
||||
print(e)
|
||||
pg_client.close()
|
||||
if helper.allow_sentry() and ASAYER_SESSION_ID is not None and not helper.is_local():
|
||||
with configure_scope() as scope:
|
||||
scope.set_tag('stage', environ["stage"])
|
||||
scope.set_tag('openReplaySessionToken', ASAYER_SESSION_ID)
|
||||
scope.set_extra("context", event.context)
|
||||
sentry_sdk.capture_exception(e)
|
||||
raise e
|
||||
pg_client.close()
|
||||
return response
|
||||
|
||||
|
||||
# Open source
|
||||
app.register_blueprint(bp_authorizers.app)
|
||||
app.register_blueprint(bp_core.app)
|
||||
app.register_blueprint(bp_core_crons.app)
|
||||
app.register_blueprint(bp_core_dynamic.app)
|
||||
app.register_blueprint(bp_core_dynamic_crons.app)
|
||||
app.register_blueprint(bp_dashboard.app)
|
||||
31
api/build.sh
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Script to build api module
|
||||
# flags to accept:
|
||||
# envarg: build for enterprize edition.
|
||||
# Default will be OSS build.
|
||||
|
||||
# Usage: bash build.sh <ee>
|
||||
|
||||
git_sha1=$(git rev-parse HEAD)
|
||||
envarg="default-foss"
|
||||
check_prereq() {
|
||||
which docker || {
|
||||
echo "Docker not installed, please install docker."
|
||||
exit=1
|
||||
}
|
||||
[[ exit -eq 1 ]] && exit 1
|
||||
}
|
||||
|
||||
function build_api(){
|
||||
# Copy enterprize code
|
||||
[[ $1 == "ee" ]] && {
|
||||
cp -rf ../ee/api/* ./
|
||||
cp -rf ../ee/api/.chalice/* ./.chalice/
|
||||
envarg="default-ee"
|
||||
}
|
||||
docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/chalice:${git_sha1} .
|
||||
}
|
||||
|
||||
check_prereq
|
||||
build_api $1
|
||||
0
api/chalicelib/__init__.py
Normal file
104
api/chalicelib/_overrides.py
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
from chalice import Chalice, CORSConfig
|
||||
from chalicelib.blueprints import bp_authorizers
|
||||
from chalicelib.core import authorizers
|
||||
|
||||
import sched
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
from croniter import croniter
|
||||
|
||||
base_time = datetime.now(pytz.utc)
|
||||
|
||||
cors_config = CORSConfig(
|
||||
allow_origin='*',
|
||||
allow_headers=['vnd.asayer.io.sid'],
|
||||
# max_age=600,
|
||||
# expose_headers=['X-Special-Header'],
|
||||
allow_credentials=True
|
||||
)
|
||||
|
||||
|
||||
def chalice_app(app):
|
||||
def app_route(self, path, **kwargs):
|
||||
kwargs.setdefault('cors', cors_config)
|
||||
kwargs.setdefault('authorizer', bp_authorizers.jwt_authorizer)
|
||||
handler_type = 'route'
|
||||
name = kwargs.pop('name', None)
|
||||
registration_kwargs = {'path': path, 'kwargs': kwargs, 'authorizer': kwargs.get("authorizer")}
|
||||
|
||||
def _register_handler(user_handler):
|
||||
handler_name = name
|
||||
if handler_name is None:
|
||||
handler_name = user_handler.__name__
|
||||
if registration_kwargs is not None:
|
||||
kwargs = registration_kwargs
|
||||
else:
|
||||
kwargs = {}
|
||||
|
||||
if kwargs['authorizer'] == bp_authorizers.jwt_authorizer \
|
||||
or kwargs['authorizer'] == bp_authorizers.api_key_authorizer:
|
||||
def _user_handler(context=None, **args):
|
||||
if context is not None:
|
||||
args['context'] = context
|
||||
else:
|
||||
authorizer_context = app.current_request.context['authorizer']
|
||||
if kwargs['authorizer'] == bp_authorizers.jwt_authorizer:
|
||||
args['context'] = authorizers.jwt_context(authorizer_context)
|
||||
else:
|
||||
args['context'] = authorizer_context
|
||||
return user_handler(**args)
|
||||
|
||||
wrapped = self._wrap_handler(handler_type, handler_name, _user_handler)
|
||||
self._register_handler(handler_type, handler_name, _user_handler, wrapped, kwargs)
|
||||
else:
|
||||
wrapped = self._wrap_handler(handler_type, handler_name, user_handler)
|
||||
self._register_handler(handler_type, handler_name, user_handler, wrapped, kwargs)
|
||||
return wrapped
|
||||
|
||||
return _register_handler
|
||||
|
||||
app.route = app_route.__get__(app, Chalice)
|
||||
|
||||
def app_schedule(self, expression, name=None, description=''):
|
||||
handler_type = 'schedule'
|
||||
registration_kwargs = {'expression': expression,
|
||||
'description': description}
|
||||
|
||||
def _register_handler(user_handler):
|
||||
handler_name = name
|
||||
if handler_name is None:
|
||||
handler_name = user_handler.__name__
|
||||
kwargs = registration_kwargs
|
||||
cron_expression = kwargs["expression"].to_string()[len("cron("):-1]
|
||||
if len(cron_expression.split(" ")) > 5:
|
||||
cron_expression = " ".join(cron_expression.split(" ")[:-1])
|
||||
cron_expression = cron_expression.replace("?", "*")
|
||||
cron_shell(user_handler, cron_expression)
|
||||
|
||||
wrapped = self._wrap_handler(handler_type, handler_name, user_handler)
|
||||
self._register_handler(handler_type, handler_name, user_handler, wrapped, kwargs)
|
||||
return wrapped
|
||||
|
||||
return _register_handler
|
||||
|
||||
app.schedule = app_schedule.__get__(app, Chalice)
|
||||
|
||||
def spawn(function, args):
|
||||
th = threading.Thread(target=function, kwargs=args)
|
||||
th.setDaemon(True)
|
||||
th.start()
|
||||
|
||||
def cron_shell(function, cron_expression):
|
||||
def to_start():
|
||||
scheduler = sched.scheduler(time.time, time.sleep)
|
||||
citer = croniter(cron_expression, base_time)
|
||||
while True:
|
||||
next_execution = citer.get_next(datetime)
|
||||
print(f"{function.__name__} next execution: {next_execution}")
|
||||
scheduler.enterabs(next_execution.timestamp(), 1, function, argument=(None,))
|
||||
scheduler.run()
|
||||
print(f"{function.__name__} executed: {next_execution}")
|
||||
|
||||
spawn(to_start, None)
|
||||
0
api/chalicelib/blueprints/__init__.py
Normal file
37
api/chalicelib/blueprints/bp_authorizers.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from chalice import Blueprint, AuthResponse
|
||||
from chalicelib.core import authorizers
|
||||
|
||||
|
||||
from chalicelib.core import users
|
||||
app = Blueprint(__name__)
|
||||
|
||||
|
||||
@app.authorizer()
|
||||
def api_key_authorizer(auth_request):
|
||||
r = authorizers.api_key_authorizer(auth_request.token)
|
||||
if r is None:
|
||||
return AuthResponse(routes=[], principal_id=None)
|
||||
|
||||
return AuthResponse(
|
||||
routes=['*'],
|
||||
principal_id=r['tenantId'],
|
||||
context=r
|
||||
)
|
||||
|
||||
|
||||
@app.authorizer(ttl_seconds=60)
|
||||
def jwt_authorizer(auth_request):
|
||||
print("---- Auth")
|
||||
jwt_payload = authorizers.jwt_authorizer(auth_request.token)
|
||||
print(jwt_payload)
|
||||
if jwt_payload is None \
|
||||
or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \
|
||||
or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"],
|
||||
jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]):
|
||||
return AuthResponse(routes=[], principal_id=None)
|
||||
|
||||
return AuthResponse(
|
||||
routes=['*'],
|
||||
principal_id=jwt_payload['userId'],
|
||||
context=jwt_payload
|
||||
)
|
||||
879
api/chalicelib/blueprints/bp_core.py
Normal file
|
|
@ -0,0 +1,879 @@
|
|||
from chalicelib.utils.helper import environ
|
||||
|
||||
from chalice import Blueprint
|
||||
from chalice import Response
|
||||
|
||||
from chalicelib import _overrides
|
||||
from chalicelib.blueprints import bp_authorizers
|
||||
from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \
|
||||
sessions_metas, alerts, funnels, issues, integrations_manager, errors_favorite_viewed, metadata, \
|
||||
log_tool_elasticsearch, log_tool_datadog, \
|
||||
log_tool_stackdriver, reset_password, sessions_favorite_viewed, \
|
||||
log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \
|
||||
log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
from chalicelib.utils import email_helper
|
||||
|
||||
app = Blueprint(__name__)
|
||||
_overrides.chalice_app(app)
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions2/favorite', methods=['GET'])
|
||||
def get_favorite_sessions2(projectId, context):
|
||||
params = app.current_request.query_params
|
||||
|
||||
return {
|
||||
'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context["userId"], include_viewed=True)
|
||||
}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions2/{sessionId}', methods=['GET'])
|
||||
def get_session2(projectId, sessionId, context):
|
||||
data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context["userId"],
|
||||
include_fav_viewed=True, group_metadata=True)
|
||||
if data is not None:
|
||||
sessions_favorite_viewed.view_session(project_id=projectId, user_id=context['userId'], session_id=sessionId)
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions2/{sessionId}/favorite', methods=['GET'])
|
||||
def add_remove_favorite_session2(projectId, sessionId, context):
|
||||
return {
|
||||
"data": sessions_favorite_viewed.favorite_session(project_id=projectId, user_id=context['userId'],
|
||||
session_id=sessionId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions2/{sessionId}/assign', methods=['GET'])
|
||||
def assign_session(projectId, sessionId, context):
|
||||
data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId,
|
||||
tenant_id=context['tenantId'],
|
||||
user_id=context["userId"])
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions2/{sessionId}/errors/{errorId}/sourcemaps', methods=['GET'])
|
||||
def get_error_trace(projectId, sessionId, errorId, context):
|
||||
data = errors.get_trace(project_id=projectId, error_id=errorId)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions2/{sessionId}/assign/{issueId}', methods=['GET'])
|
||||
def assign_session(projectId, sessionId, issueId, context):
|
||||
data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId,
|
||||
tenant_id=context['tenantId'], user_id=context["userId"])
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', methods=['POST', 'PUT'])
|
||||
def comment_assignment(projectId, sessionId, issueId, context):
|
||||
data = app.current_request.json_body
|
||||
data = sessions_assignments.comment(tenant_id=context['tenantId'], project_id=projectId,
|
||||
session_id=sessionId, assignment_id=issueId,
|
||||
user_id=context["userId"], message=data["message"])
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.route('/{projectId}/events/search', methods=['GET'])
|
||||
def events_search(projectId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
return {"data": []}
|
||||
|
||||
q = params.get('q', '')
|
||||
if len(q) == 0:
|
||||
return {"data": []}
|
||||
result = events.search_pg2(q, params.get('type', ''), project_id=projectId, source=params.get('source'),
|
||||
key=params.get("key"))
|
||||
return result
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions/search2', methods=['POST'])
|
||||
def sessions_search2(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
data = sessions.search2_pg(data, projectId, user_id=context["userId"])
|
||||
return {'data': data}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions/filters', methods=['GET'])
|
||||
def session_filter_values(projectId, context):
|
||||
return {'data': sessions_metas.get_key_values(projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions/filters/top', methods=['GET'])
|
||||
def session_top_filter_values(projectId, context):
|
||||
return {'data': sessions_metas.get_top_key_values(projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions/filters/search', methods=['GET'])
|
||||
def get_session_filters_meta(projectId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
return {"data": []}
|
||||
|
||||
meta_type = params.get('type', '')
|
||||
if len(meta_type) == 0:
|
||||
return {"data": []}
|
||||
q = params.get('q', '')
|
||||
if len(q) == 0:
|
||||
return {"data": []}
|
||||
return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q)
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}',
|
||||
methods=['POST', 'PUT'])
|
||||
def integration_notify(projectId, integration, integrationId, source, sourceId, context):
|
||||
data = app.current_request.json_body
|
||||
comment = None
|
||||
if "comment" in data:
|
||||
comment = data["comment"]
|
||||
if integration == "slack":
|
||||
args = {"tenant_id": context["tenantId"],
|
||||
"user": context['email'], "comment": comment, "project_id": projectId,
|
||||
"integration_id": integrationId}
|
||||
if source == "sessions":
|
||||
return Slack.share_session(session_id=sourceId, **args)
|
||||
elif source == "errors":
|
||||
return Slack.share_error(error_id=sourceId, **args)
|
||||
return {"data": None}
|
||||
|
||||
|
||||
@app.route('/integrations/sentry', methods=['GET'])
|
||||
def get_all_sentry(context):
|
||||
return {"data": log_tool_sentry.get_all(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/sentry', methods=['GET'])
|
||||
def get_sentry(projectId, context):
|
||||
return {"data": log_tool_sentry.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/sentry', methods=['POST', 'PUT'])
|
||||
def add_edit_sentry(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": log_tool_sentry.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/sentry', methods=['DELETE'])
|
||||
def delete_sentry(projectId, context):
|
||||
return {"data": log_tool_sentry.delete(tenant_id=context["tenantId"], project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/sentry/events/{eventId}', methods=['GET'])
|
||||
def proxy_sentry(projectId, eventId, context):
|
||||
return {"data": log_tool_sentry.proxy_get(tenant_id=context["tenantId"], project_id=projectId, event_id=eventId)}
|
||||
|
||||
|
||||
@app.route('/integrations/datadog', methods=['GET'])
|
||||
def get_all_datadog(context):
|
||||
return {"data": log_tool_datadog.get_all(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/datadog', methods=['GET'])
|
||||
def get_datadog(projectId, context):
|
||||
return {"data": log_tool_datadog.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/datadog', methods=['POST', 'PUT'])
|
||||
def add_edit_datadog(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": log_tool_datadog.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/datadog', methods=['DELETE'])
|
||||
def delete_datadog(projectId, context):
|
||||
return {"data": log_tool_datadog.delete(tenant_id=context["tenantId"], project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/integrations/stackdriver', methods=['GET'])
|
||||
def get_all_stackdriver(context):
|
||||
return {"data": log_tool_stackdriver.get_all(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/stackdriver', methods=['GET'])
|
||||
def get_stackdriver(projectId, context):
|
||||
return {"data": log_tool_stackdriver.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/stackdriver', methods=['POST', 'PUT'])
|
||||
def add_edit_stackdriver(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": log_tool_stackdriver.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/stackdriver', methods=['DELETE'])
|
||||
def delete_stackdriver(projectId, context):
|
||||
return {"data": log_tool_stackdriver.delete(tenant_id=context["tenantId"], project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/integrations/newrelic', methods=['GET'])
|
||||
def get_all_newrelic(context):
|
||||
return {"data": log_tool_newrelic.get_all(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/newrelic', methods=['GET'])
|
||||
def get_newrelic(projectId, context):
|
||||
return {"data": log_tool_newrelic.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/newrelic', methods=['POST', 'PUT'])
|
||||
def add_edit_newrelic(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": log_tool_newrelic.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/newrelic', methods=['DELETE'])
|
||||
def delete_newrelic(projectId, context):
|
||||
return {"data": log_tool_newrelic.delete(tenant_id=context["tenantId"], project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/integrations/rollbar', methods=['GET'])
|
||||
def get_all_rollbar(context):
|
||||
return {"data": log_tool_rollbar.get_all(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/rollbar', methods=['GET'])
|
||||
def get_rollbar(projectId, context):
|
||||
return {"data": log_tool_rollbar.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/rollbar', methods=['POST', 'PUT'])
|
||||
def add_edit_rollbar(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": log_tool_rollbar.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/rollbar', methods=['DELETE'])
|
||||
def delete_datadog(projectId, context):
|
||||
return {"data": log_tool_rollbar.delete(tenant_id=context["tenantId"], project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/integrations/bugsnag/list_projects', methods=['POST'])
|
||||
def list_projects_bugsnag(context):
|
||||
data = app.current_request.json_body
|
||||
return {"data": log_tool_bugsnag.list_projects(auth_token=data["authorizationToken"])}
|
||||
|
||||
|
||||
@app.route('/integrations/bugsnag', methods=['GET'])
|
||||
def get_all_bugsnag(context):
|
||||
return {"data": log_tool_bugsnag.get_all(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/bugsnag', methods=['GET'])
|
||||
def get_bugsnag(projectId, context):
|
||||
return {"data": log_tool_bugsnag.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/bugsnag', methods=['POST', 'PUT'])
|
||||
def add_edit_bugsnag(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": log_tool_bugsnag.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/bugsnag', methods=['DELETE'])
|
||||
def delete_bugsnag(projectId, context):
|
||||
return {"data": log_tool_bugsnag.delete(tenant_id=context["tenantId"], project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/integrations/cloudwatch/list_groups', methods=['POST'])
|
||||
def list_groups_cloudwatch(context):
|
||||
data = app.current_request.json_body
|
||||
return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data["awsAccessKeyId"],
|
||||
aws_secret_access_key=data["awsSecretAccessKey"],
|
||||
region=data["region"])}
|
||||
|
||||
|
||||
@app.route('/integrations/cloudwatch', methods=['GET'])
|
||||
def get_all_cloudwatch(context):
|
||||
return {"data": log_tool_cloudwatch.get_all(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/cloudwatch', methods=['GET'])
|
||||
def get_cloudwatch(projectId, context):
|
||||
return {"data": log_tool_cloudwatch.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/cloudwatch', methods=['POST', 'PUT'])
|
||||
def add_edit_cloudwatch(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": log_tool_cloudwatch.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/cloudwatch', methods=['DELETE'])
|
||||
def delete_cloudwatch(projectId, context):
|
||||
return {"data": log_tool_cloudwatch.delete(tenant_id=context["tenantId"], project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/integrations/elasticsearch', methods=['GET'])
|
||||
def get_all_elasticsearch(context):
|
||||
return {"data": log_tool_elasticsearch.get_all(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/elasticsearch', methods=['GET'])
|
||||
def get_elasticsearch(projectId, context):
|
||||
return {"data": log_tool_elasticsearch.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/integrations/elasticsearch/test', methods=['POST'])
|
||||
def test_elasticsearch_connection(context):
|
||||
data = app.current_request.json_body
|
||||
return {"data": log_tool_elasticsearch.ping(tenant_id=context["tenantId"], **data)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/elasticsearch', methods=['POST', 'PUT'])
|
||||
def add_edit_elasticsearch(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": log_tool_elasticsearch.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/elasticsearch', methods=['DELETE'])
|
||||
def delete_elasticsearch(projectId, context):
|
||||
return {"data": log_tool_elasticsearch.delete(tenant_id=context["tenantId"], project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/integrations/sumologic', methods=['GET'])
|
||||
def get_all_sumologic(context):
|
||||
return {"data": log_tool_sumologic.get_all(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/sumologic', methods=['GET'])
|
||||
def get_sumologic(projectId, context):
|
||||
return {"data": log_tool_sumologic.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/sumologic', methods=['POST', 'PUT'])
|
||||
def add_edit_sumologic(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": log_tool_sumologic.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/integrations/sumologic', methods=['DELETE'])
|
||||
def delete_sumologic(projectId, context):
|
||||
return {"data": log_tool_sumologic.delete(tenant_id=context["tenantId"], project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/integrations/issues', methods=['GET'])
|
||||
def get_integration_status(context):
|
||||
error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"],
|
||||
user_id=context["userId"])
|
||||
if error is not None:
|
||||
return {"data": {}}
|
||||
return {"data": integration.get_obfuscated()}
|
||||
|
||||
|
||||
@app.route('/integrations/jira', methods=['POST', 'PUT'])
|
||||
def add_edit_jira_cloud(context):
|
||||
data = app.current_request.json_body
|
||||
error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER,
|
||||
tenant_id=context["tenantId"],
|
||||
user_id=context["userId"])
|
||||
if error is not None:
|
||||
return error
|
||||
return {"data": integration.add_edit(data=data)}
|
||||
|
||||
|
||||
@app.route('/integrations/github', methods=['POST', 'PUT'])
|
||||
def add_edit_github(context):
|
||||
data = app.current_request.json_body
|
||||
error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER,
|
||||
tenant_id=context["tenantId"],
|
||||
user_id=context["userId"])
|
||||
if error is not None:
|
||||
return error
|
||||
return {"data": integration.add_edit(data=data)}
|
||||
|
||||
|
||||
@app.route('/integrations/issues', methods=['DELETE'])
|
||||
def delete_default_issue_tracking_tool(context):
|
||||
error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"],
|
||||
user_id=context["userId"])
|
||||
if error is not None:
|
||||
return error
|
||||
return {"data": integration.delete()}
|
||||
|
||||
|
||||
@app.route('/integrations/jira', methods=['DELETE'])
|
||||
def delete_jira_cloud(context):
|
||||
error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER,
|
||||
tenant_id=context["tenantId"],
|
||||
user_id=context["userId"])
|
||||
if error is not None:
|
||||
return error
|
||||
return {"data": integration.delete()}
|
||||
|
||||
|
||||
@app.route('/integrations/github', methods=['DELETE'])
|
||||
def delete_github(context):
|
||||
error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER,
|
||||
tenant_id=context["tenantId"],
|
||||
user_id=context["userId"])
|
||||
if error is not None:
|
||||
return error
|
||||
return {"data": integration.delete()}
|
||||
|
||||
|
||||
@app.route('/integrations/issues/list_projects', methods=['GET'])
|
||||
def get_all_issue_tracking_projects(context):
|
||||
error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"],
|
||||
user_id=context["userId"])
|
||||
if error is not None:
|
||||
return error
|
||||
data = integration.issue_handler.get_projects()
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.route('/integrations/issues/{integrationProjectId}', methods=['GET'])
|
||||
def get_integration_metadata(integrationProjectId, context):
|
||||
error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"],
|
||||
user_id=context["userId"])
|
||||
if error is not None:
|
||||
return error
|
||||
data = integration.issue_handler.get_metas(integrationProjectId)
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {"data": data}
|
||||
|
||||
|
||||
@app.route('/{projectId}/assignments', methods=['GET'])
|
||||
def get_all_assignments(projectId, context):
|
||||
data = sessions_assignments.get_all(project_id=projectId, user_id=context["userId"])
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sessions2/{sessionId}/assign/projects/{integrationProjectId}', methods=['POST', 'PUT'])
|
||||
def create_issue_assignment(projectId, sessionId, integrationProjectId, context):
|
||||
data = app.current_request.json_body
|
||||
data = sessions_assignments.create_new_assignment(tenant_id=context['tenantId'], project_id=projectId,
|
||||
session_id=sessionId,
|
||||
creator_id=context["userId"], assignee=data["assignee"],
|
||||
description=data["description"], title=data["title"],
|
||||
issue_type=data["issueType"],
|
||||
integration_project_id=integrationProjectId)
|
||||
if "errors" in data.keys():
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.route('/{projectId}/gdpr', methods=['GET'])
|
||||
def get_gdpr(projectId, context):
|
||||
return {"data": projects.get_gdpr(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/gdpr', methods=['POST', 'PUT'])
|
||||
def edit_gdpr(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": projects.edit_gdpr(project_id=projectId, gdpr=data)}
|
||||
|
||||
|
||||
@app.route('/password/reset/{step}', methods=['PUT', 'POST'], authorizer=None)
|
||||
def reset_password_handler(step):
|
||||
data = app.current_request.json_body
|
||||
if step == "1":
|
||||
if "email" not in data or len(data["email"]) < 5:
|
||||
return {"errors": ["please provide a valid email address"]}
|
||||
return reset_password.step1(data)
|
||||
elif step == "2":
|
||||
return reset_password.step2(data)
|
||||
|
||||
|
||||
@app.route('/{projectId}/metadata', methods=['GET'])
|
||||
def get_metadata(projectId, context):
|
||||
return {"data": metadata.get(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/metadata/list', methods=['POST', 'PUT'])
|
||||
def add_edit_delete_metadata(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return metadata.add_edit_delete(tenant_id=context["tenantId"], project_id=projectId, new_metas=data["list"])
|
||||
|
||||
|
||||
@app.route('/{projectId}/metadata', methods=['POST', 'PUT'])
|
||||
def add_metadata(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return metadata.add(tenant_id=context["tenantId"], project_id=projectId, new_name=data["key"])
|
||||
|
||||
|
||||
@app.route('/{projectId}/metadata/{index}', methods=['POST', 'PUT'])
|
||||
def edit_metadata(projectId, index, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return metadata.edit(tenant_id=context["tenantId"], project_id=projectId, index=int(index),
|
||||
new_name=data["key"])
|
||||
|
||||
|
||||
@app.route('/{projectId}/metadata/{index}', methods=['DELETE'])
|
||||
def delete_metadata(projectId, index, context):
|
||||
return metadata.delete(tenant_id=context["tenantId"], project_id=projectId, index=index)
|
||||
|
||||
|
||||
@app.route('/{projectId}/metadata/search', methods=['GET'])
|
||||
def search_metadata(projectId, context):
|
||||
params = app.current_request.query_params
|
||||
q = params.get('q', '')
|
||||
key = params.get('key', '')
|
||||
if len(q) == 0 and len(key) == 0:
|
||||
return {"data": []}
|
||||
if len(q) == 0:
|
||||
return {"errors": ["please provide a value for search"]}
|
||||
if len(key) == 0:
|
||||
return {"errors": ["please provide a key for search"]}
|
||||
return metadata.search(tenant_id=context["tenantId"], project_id=projectId, value=q, key=key)
|
||||
|
||||
|
||||
@app.route('/{projectId}/integration/sources', methods=['GET'])
|
||||
def search_integrations(projectId, context):
|
||||
return log_tools.search(project_id=projectId)
|
||||
|
||||
|
||||
@app.route('/async/email_assignment', methods=['POST', 'PUT'], authorizer=None)
|
||||
def async_send_signup_emails():
|
||||
data = app.current_request.json_body
|
||||
if data.pop("auth") != environ["async_Token"]:
|
||||
return {}
|
||||
email_helper.send_assign_session(recipient=data["email"], link=data["link"], message=data["message"])
|
||||
|
||||
|
||||
@app.route('/async/funnel/weekly_report2', methods=['POST', 'PUT'], authorizer=None)
|
||||
def async_weekly_report():
|
||||
print("=========================> Sending weekly report")
|
||||
data = app.current_request.json_body
|
||||
if data.pop("auth") != environ["async_Token"]:
|
||||
return {}
|
||||
email_helper.weekly_report2(recipients=data["email"], data=data.get("data", None))
|
||||
|
||||
|
||||
@app.route('/async/basic/{step}', methods=['POST', 'PUT'], authorizer=None)
|
||||
def async_basic_emails(step):
|
||||
data = app.current_request.json_body
|
||||
if data.pop("auth") != environ["async_Token"]:
|
||||
return {}
|
||||
if step.lower() == "member_invitation":
|
||||
email_helper.send_team_invitation(recipient=data["email"], user_name=data["userName"],
|
||||
temp_password=data["tempPassword"], client_id=data["clientId"],
|
||||
sender_name=data["senderName"])
|
||||
|
||||
|
||||
@app.route('/{projectId}/sample_rate', methods=['GET'])
|
||||
def get_capture_status(projectId, context):
|
||||
return {"data": projects.get_capture_status(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/sample_rate', methods=['POST', 'PUT'])
|
||||
def update_capture_status(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return {"data": projects.update_capture_status(project_id=projectId, changes=data)}
|
||||
|
||||
|
||||
@app.route('/announcements', methods=['GET'])
|
||||
def get_all_announcements(context):
|
||||
return {"data": announcements.get_all(context["userId"])}
|
||||
|
||||
|
||||
@app.route('/announcements/view', methods=['GET'])
|
||||
def get_all_announcements(context):
|
||||
return {"data": announcements.view(user_id=context["userId"])}
|
||||
|
||||
|
||||
@app.route('/{projectId}/errors/{errorId}/{action}', methods=['GET'])
|
||||
def add_remove_favorite_error(projectId, errorId, action, context):
|
||||
if action == "favorite":
|
||||
return errors_favorite_viewed.favorite_error(project_id=projectId, user_id=context['userId'], error_id=errorId)
|
||||
elif action == "sessions":
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
start_date = params.get("startDate")
|
||||
end_date = params.get("endDate")
|
||||
return {
|
||||
"data": errors.get_sessions(project_id=projectId, user_id=context['userId'], error_id=errorId,
|
||||
start_date=start_date, end_date=end_date)}
|
||||
elif action in list(errors.ACTION_STATE.keys()):
|
||||
return errors.change_state(project_id=projectId, user_id=context['userId'], error_id=errorId, action=action)
|
||||
else:
|
||||
return {"errors": ["undefined action"]}
|
||||
|
||||
|
||||
@app.route('/{projectId}/errors/merge', methods=['POST'])
|
||||
def errors_merge(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
data = errors.merge(error_ids=data.get("errors", []))
|
||||
return data
|
||||
|
||||
|
||||
@app.route('/show_banner', methods=['GET'])
|
||||
def errors_merge(context):
|
||||
return {"data": False}
|
||||
|
||||
|
||||
@app.route('/{projectId}/alerts', methods=['POST', 'PUT'])
|
||||
def create_alert(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
return alerts.create(projectId, data)
|
||||
|
||||
|
||||
@app.route('/{projectId}/alerts', methods=['GET'])
|
||||
def get_all_alerts(projectId, context):
|
||||
return {"data": alerts.get_all(projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/alerts/{alertId}', methods=['GET'])
|
||||
def get_alert(projectId, alertId, context):
|
||||
return {"data": alerts.get(alertId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/alerts/{alertId}', methods=['POST', 'PUT'])
|
||||
def update_alert(projectId, alertId, context):
|
||||
data = app.current_request.json_body
|
||||
return alerts.update(alertId, data)
|
||||
|
||||
|
||||
@app.route('/{projectId}/alerts/{alertId}', methods=['DELETE'])
|
||||
def delete_alert(projectId, alertId, context):
|
||||
return alerts.delete(projectId, alertId)
|
||||
|
||||
|
||||
@app.route('/alerts/notifications', methods=['POST', 'PUT'], authorizer=None)
|
||||
def send_alerts_notifications():
|
||||
data = app.current_request.json_body
|
||||
if data.get("token", "") != "nF46JdQqAM5v9KI9lPMpcu8o9xiJGvNNWOGL7TJP":
|
||||
return {"errors": ["missing token"]}
|
||||
return {"data": alerts.process_notifications(data.get("notifications", []))}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels', methods=['POST', 'PUT'])
|
||||
def add_funnel(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
return funnels.create(project_id=projectId,
|
||||
user_id=context['userId'],
|
||||
name=data["name"],
|
||||
filter=data["filter"],
|
||||
is_public=data.get("isPublic", False))
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels', methods=['GET'])
|
||||
def get_funnels(projectId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
return {"data": funnels.get_by_user(project_id=projectId,
|
||||
user_id=context['userId'],
|
||||
range_value=None,
|
||||
start_date=None,
|
||||
end_date=None,
|
||||
details=False)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/details', methods=['GET'])
|
||||
def get_funnels_with_details(projectId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
return {"data": funnels.get_by_user(project_id=projectId,
|
||||
user_id=context['userId'],
|
||||
range_value=params.get("rangeValue", None),
|
||||
start_date=params.get('startDate', None),
|
||||
end_date=params.get('endDate', None),
|
||||
details=True)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/issue_types', methods=['GET'])
|
||||
def get_possible_issue_types(projectId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
return {"data": funnels.get_possible_issue_types(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/{funnelId}/insights', methods=['GET'])
|
||||
def get_funnel_insights(projectId, funnelId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
return {"data": funnels.get_top_insights(funnel_id=funnelId, project_id=projectId,
|
||||
range_value=params.get("range_value", None),
|
||||
start_date=params.get('startDate', None),
|
||||
end_date=params.get('endDate', None))}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/{funnelId}/insights', methods=['POST', 'PUT'])
|
||||
def get_funnel_insights_on_the_fly(projectId, funnelId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"data": funnels.get_top_insights_on_the_fly(funnel_id=funnelId, project_id=projectId, data={**params, **data})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/{funnelId}/issues', methods=['GET'])
|
||||
def get_funnel_issues(projectId, funnelId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
return funnels.get_issues(funnel_id=funnelId, project_id=projectId,
|
||||
range_value=params.get("range_value", None),
|
||||
start_date=params.get('startDate', None), end_date=params.get('endDate', None))
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/{funnelId}/issues', methods=['POST', 'PUT'])
|
||||
def get_funnel_issues_on_the_fly(projectId, funnelId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, project_id=projectId, data={**params, **data})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/{funnelId}/sessions', methods=['GET'])
|
||||
def get_funnel_sessions(projectId, funnelId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
return {"data": funnels.get_sessions(funnel_id=funnelId, user_id=context['userId'], project_id=projectId,
|
||||
range_value=params.get("range_value", None),
|
||||
start_date=params.get('startDate', None),
|
||||
end_date=params.get('endDate', None))}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/{funnelId}/sessions', methods=['POST', 'PUT'])
|
||||
def get_funnel_sessions_on_the_fly(projectId, funnelId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
return {"data": funnels.get_sessions_on_the_fly(funnel_id=funnelId, user_id=context['userId'], project_id=projectId,
|
||||
data={**params, **data})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/issues/{issueId}/sessions', methods=['GET'])
|
||||
def get_issue_sessions(projectId, issueId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
issue = issues.get(project_id=projectId, issue_id=issueId)
|
||||
return {
|
||||
"data": {"sessions": sessions.search_by_issue(user_id=context["userId"], project_id=projectId, issue=issue,
|
||||
start_date=params.get('startDate', None),
|
||||
end_date=params.get('endDate', None)),
|
||||
"issue": issue}}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', methods=['POST', 'PUT'])
|
||||
def get_funnel_issue_sessions(projectId, funnelId, issueId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
data = funnels.search_by_issue(project_id=projectId, user_id=context["userId"], issue_id=issueId,
|
||||
funnel_id=funnelId, data=data)
|
||||
if "errors" in data:
|
||||
return data
|
||||
if data.get("issue") is None:
|
||||
data["issue"] = issues.get(project_id=projectId, issue_id=issueId)
|
||||
return {
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/{funnelId}', methods=['GET'])
|
||||
def get_funnel(projectId, funnelId, context):
|
||||
return {"data": funnels.get(funnel_id=funnelId,
|
||||
project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/{funnelId}', methods=['POST', 'PUT'])
|
||||
def edit_funnel(projectId, funnelId, context):
|
||||
data = app.current_request.json_body
|
||||
return funnels.update(funnel_id=funnelId,
|
||||
user_id=context['userId'],
|
||||
name=data.get("name"),
|
||||
filter=data.get("filter"),
|
||||
is_public=data.get("isPublic"))
|
||||
|
||||
|
||||
@app.route('/{projectId}/funnels/{funnelId}', methods=['DELETE'])
|
||||
def delete_filter(projectId, funnelId, context):
|
||||
return funnels.delete(user_id=context['userId'], funnel_id=funnelId, project_id=projectId)
|
||||
|
||||
|
||||
@app.route('/{projectId}/sourcemaps', methods=['PUT'], authorizer=bp_authorizers.api_key_authorizer)
|
||||
def sign_sourcemap_for_upload(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
project_id = projects.get_internal_project_id(projectId)
|
||||
if project_id is None:
|
||||
return Response(status_code=400, body='invalid projectId')
|
||||
|
||||
return {"data": sourcemaps.presign_upload_urls(project_id=project_id, urls=data["URL"])}
|
||||
|
||||
|
||||
@app.route('/config/weekly_report', methods=['GET'])
|
||||
def get_weekly_report_config(context):
|
||||
return {"data": weekly_report.get_config(user_id=context['userId'])}
|
||||
|
||||
|
||||
@app.route('/config/weekly_report', methods=['POST', 'PUT'])
|
||||
def get_weekly_report_config(context):
|
||||
data = app.current_request.json_body
|
||||
return {"data": weekly_report.edit_config(user_id=context['userId'], weekly_report=data.get("weeklyReport", True))}
|
||||
|
||||
|
||||
@app.route('/{projectId}/issue_types', methods=['GET'])
|
||||
def issue_types(projectId, context):
|
||||
return {"data": issues.get_types(project_id=projectId)}
|
||||
|
||||
|
||||
@app.route('/flows', methods=['GET', 'PUT', 'POST', 'DELETE'])
|
||||
@app.route('/{projectId}/flows', methods=['GET', 'PUT', 'POST', 'DELETE'])
|
||||
def removed_endpoints(context):
|
||||
return Response(body={"errors": ["Endpoint no longer available"]}, status_code=410)
|
||||
18
api/chalicelib/blueprints/bp_core_crons.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from chalice import Blueprint
|
||||
from chalice import Cron
|
||||
from chalicelib import _overrides
|
||||
from chalicelib.core import reset_password, weekly_report
|
||||
|
||||
app = Blueprint(__name__)
|
||||
_overrides.chalice_app(app)
|
||||
|
||||
|
||||
@app.schedule(Cron('0/60', '*', '*', '*', '?', '*'))
|
||||
def clear_password_reset(event):
|
||||
reset_password.cron()
|
||||
|
||||
|
||||
# Run every monday.
|
||||
@app.schedule(Cron('5', '0', '?', '*', 'MON', '*'))
|
||||
def weekly_report2(event):
|
||||
weekly_report.cron()
|
||||
385
api/chalicelib/blueprints/bp_core_dynamic.py
Normal file
|
|
@ -0,0 +1,385 @@
|
|||
from chalice import Blueprint, Response
|
||||
|
||||
from chalicelib import _overrides
|
||||
from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integration_github, \
|
||||
integrations_manager
|
||||
from chalicelib.utils import captcha
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.helper import environ
|
||||
|
||||
from chalicelib.core import tenants
|
||||
from chalicelib.core import signup
|
||||
from chalicelib.core import users
|
||||
from chalicelib.core import projects
|
||||
from chalicelib.core import errors
|
||||
from chalicelib.core import notifications
|
||||
from chalicelib.core import boarding
|
||||
from chalicelib.core import webhook
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
|
||||
app = Blueprint(__name__)
|
||||
_overrides.chalice_app(app)
|
||||
|
||||
|
||||
@app.route('/signedups', methods=['GET'], authorizer=None)
|
||||
def signed_ups():
|
||||
return {
|
||||
'data': tenants.get_tenants()
|
||||
}
|
||||
|
||||
|
||||
@app.route('/login', methods=['POST'], authorizer=None)
|
||||
def login():
|
||||
data = app.current_request.json_body
|
||||
if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]):
|
||||
return {"errors": ["Invalid captcha."]}
|
||||
r = users.authenticate(data['email'], data['password'],
|
||||
for_plugin= False
|
||||
)
|
||||
if r is None:
|
||||
return {
|
||||
'errors': ['You’ve entered invalid Email or Password.']
|
||||
}
|
||||
|
||||
tenant_id = r.pop("tenantId")
|
||||
|
||||
r["limits"] = {
|
||||
"teamMember": -1,
|
||||
"projects": -1,
|
||||
"metadata": metadata.get_remaining_metadata_with_count(tenant_id)}
|
||||
|
||||
c = tenants.get_by_tenant_id(tenant_id)
|
||||
c.pop("createdAt")
|
||||
c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True,
|
||||
stack_integrations=True)
|
||||
return {
|
||||
'jwt': r.pop('jwt'),
|
||||
'data': {
|
||||
"user": r,
|
||||
"client": c,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@app.route('/account', methods=['GET'])
|
||||
def get_account(context):
|
||||
r = users.get(tenant_id=context['tenantId'], user_id=context['userId'])
|
||||
return {
|
||||
'data': {
|
||||
**r,
|
||||
"limits": {
|
||||
"teamMember": -1,
|
||||
"projects": -1,
|
||||
"metadata": metadata.get_remaining_metadata_with_count(context['tenantId'])},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@app.route('/projects', methods=['GET'])
|
||||
def get_projects(context):
|
||||
return {"data": projects.get_projects(tenant_id=context["tenantId"], recording_state=True, gdpr=True, recorded=True,
|
||||
stack_integrations=True)}
|
||||
|
||||
|
||||
@app.route('/projects', methods=['POST', 'PUT'])
|
||||
def create_project(context):
|
||||
data = app.current_request.json_body
|
||||
return projects.create(tenant_id=context["tenantId"], user_id=context["userId"], data=data)
|
||||
|
||||
|
||||
@app.route('/projects/{projectId}', methods=['POST', 'PUT'])
|
||||
def create_edit_project(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
|
||||
return projects.edit(tenant_id=context["tenantId"], user_id=context["userId"], data=data, project_id=projectId)
|
||||
|
||||
|
||||
@app.route('/projects/{projectId}', methods=['GET'])
|
||||
def get_project(projectId, context):
|
||||
return {"data": projects.get_project(tenant_id=context["tenantId"], project_id=projectId, include_last_session=True,
|
||||
include_gdpr=True)}
|
||||
|
||||
|
||||
@app.route('/projects/{projectId}', methods=['DELETE'])
|
||||
def delete_project(projectId, context):
|
||||
return projects.delete(tenant_id=context["tenantId"], user_id=context["userId"], project_id=projectId)
|
||||
|
||||
|
||||
@app.route('/projects/limit', methods=['GET'])
|
||||
def get_projects_limit(context):
|
||||
return {"data": {
|
||||
"current": projects.count_by_tenant(tenant_id=context["tenantId"]),
|
||||
"remaining": -1
|
||||
}}
|
||||
|
||||
|
||||
@app.route('/client', methods=['GET'])
|
||||
def get_client(context):
|
||||
r = tenants.get_by_tenant_id(context['tenantId'])
|
||||
if r is not None:
|
||||
r.pop("createdAt")
|
||||
r["projects"] = projects.get_projects(tenant_id=context['tenantId'], recording_state=True, recorded=True,
|
||||
stack_integrations=True)
|
||||
return {
|
||||
'data': r
|
||||
}
|
||||
|
||||
|
||||
@app.route('/client/new_api_key', methods=['GET'])
|
||||
def generate_new_tenant_token(context):
|
||||
return {
|
||||
'data': tenants.generate_new_api_key(context['tenantId'])
|
||||
}
|
||||
|
||||
|
||||
@app.route('/client', methods=['PUT', 'POST'])
|
||||
def put_client(context):
|
||||
data = app.current_request.json_body
|
||||
return tenants.update(tenant_id=context["tenantId"], user_id=context["userId"], data=data)
|
||||
|
||||
|
||||
@app.route('/signup', methods=['GET'], authorizer=None)
|
||||
def get_all_signup():
|
||||
return {"data": signup.get_signed_ups()}
|
||||
|
||||
|
||||
@app.route('/signup', methods=['POST', 'PUT'], authorizer=None)
|
||||
def signup_handler():
|
||||
data = app.current_request.json_body
|
||||
return signup.create_step1(data)
|
||||
|
||||
|
||||
@app.route('/integrations/slack', methods=['POST', 'PUT'])
|
||||
def add_slack_client(context):
|
||||
data = app.current_request.json_body
|
||||
if "url" not in data or "name" not in data:
|
||||
return {"errors": ["please provide a url and a name"]}
|
||||
if Slack.add_integration(tenant_id=context["tenantId"], url=data["url"], name=data["name"]):
|
||||
return {"data": {"status": "success"}}
|
||||
else:
|
||||
return {
|
||||
"errors": ["failed URL verification, if you received a message on slack, please notify our dev-team"]
|
||||
}
|
||||
|
||||
@app.route('/{projectId}/errors/search', methods=['POST'])
|
||||
def errors_search(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
return errors.search(data, projectId, user_id=context["userId"], status=params.get("status", "ALL"),
|
||||
favorite_only="favorite" in params)
|
||||
|
||||
|
||||
@app.route('/{projectId}/errors/stats', methods=['GET'])
|
||||
def errors_stats(projectId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
return errors.stats(projectId, user_id=context["userId"], **params)
|
||||
|
||||
|
||||
@app.route('/{projectId}/errors/{errorId}', methods=['GET'])
|
||||
def errors_get_details(projectId, errorId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
data = errors.get_details(project_id=projectId, user_id=context["userId"], error_id=errorId, **params)
|
||||
if data.get("data") is not None:
|
||||
errors_favorite_viewed.viewed_error(project_id=projectId, user_id=context['userId'], error_id=errorId)
|
||||
return data
|
||||
|
||||
|
||||
@app.route('/{projectId}/errors/{errorId}/stats', methods=['GET'])
|
||||
def errors_get_details_right_column(projectId, errorId, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
data = errors.get_details_chart(project_id=projectId, user_id=context["userId"], error_id=errorId, **params)
|
||||
return data
|
||||
|
||||
|
||||
@app.route('/{projectId}/errors/{errorId}/sourcemaps', methods=['GET'])
|
||||
def errors_get_details_sourcemaps(projectId, errorId, context):
|
||||
data = errors.get_trace(project_id=projectId, error_id=errorId)
|
||||
if "errors" in data:
|
||||
return data
|
||||
return {
|
||||
'data': data
|
||||
}
|
||||
|
||||
|
||||
@app.route('/async/alerts/notifications/{step}', methods=['POST', 'PUT'], authorizer=None)
|
||||
def send_alerts_notification_async(step):
|
||||
data = app.current_request.json_body
|
||||
if data.pop("auth") != environ["async_Token"]:
|
||||
return {"errors": ["missing auth"]}
|
||||
if step == "slack":
|
||||
slack.send_batch(notifications_list=data.get("notifications"))
|
||||
elif step == "email":
|
||||
alerts.send_by_email_batch(notifications_list=data.get("notifications"))
|
||||
elif step == "webhook":
|
||||
webhook.trigger_batch(data_list=data.get("notifications"))
|
||||
|
||||
|
||||
@app.route('/notifications', methods=['GET'])
|
||||
def get_notifications(context):
|
||||
return {"data": notifications.get_all(tenant_id=context['tenantId'], user_id=context['userId'])}
|
||||
|
||||
|
||||
@app.route('/notifications/{notificationId}/view', methods=['GET'])
|
||||
def view_notifications(notificationId, context):
|
||||
return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context['userId'])}
|
||||
|
||||
|
||||
@app.route('/notifications/view', methods=['POST', 'PUT'])
|
||||
def batch_view_notifications(context):
|
||||
data = app.current_request.json_body
|
||||
return {"data": notifications.view_notification(notification_ids=data.get("ids", []),
|
||||
startTimestamp=data.get("startTimestamp"),
|
||||
endTimestamp=data.get("endTimestamp"),
|
||||
user_id=context['userId'],
|
||||
tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/notifications', methods=['POST', 'PUT'], authorizer=None)
|
||||
def create_notifications():
|
||||
data = app.current_request.json_body
|
||||
if data.get("token", "") != "nF46JdQqAM5v9KI9lPMpcu8o9xiJGvNNWOGL7TJP":
|
||||
return {"errors": ["missing token"]}
|
||||
return notifications.create(data.get("notifications", []))
|
||||
|
||||
|
||||
@app.route('/boarding', methods=['GET'])
|
||||
def get_boarding_state(context):
|
||||
return {"data": boarding.get_state(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/boarding/installing', methods=['GET'])
|
||||
def get_boarding_state_installing(context):
|
||||
return {"data": boarding.get_state_installing(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/boarding/identify-users', methods=['GET'])
|
||||
def get_boarding_state_identify_users(context):
|
||||
return {"data": boarding.get_state_identify_users(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/boarding/manage-users', methods=['GET'])
|
||||
def get_boarding_state_manage_users(context):
|
||||
return {"data": boarding.get_state_manage_users(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
@app.route('/boarding/integrations', methods=['GET'])
|
||||
def get_boarding_state_integrations(context):
|
||||
return {"data": boarding.get_state_integrations(tenant_id=context["tenantId"])}
|
||||
|
||||
|
||||
# this endpoint supports both jira & github based on `provider` attribute
|
||||
@app.route('/integrations/issues', methods=['POST', 'PUT'])
|
||||
def add_edit_jira_cloud_github(context):
|
||||
data = app.current_request.json_body
|
||||
provider = data.get("provider", "").upper()
|
||||
error, integration = integrations_manager.get_integration(tool=provider, tenant_id=context["tenantId"],
|
||||
user_id=context["userId"])
|
||||
if error is not None:
|
||||
return error
|
||||
return {"data": integration.add_edit(data=data)}
|
||||
|
||||
|
||||
@app.route('/integrations/slack/{integrationId}', methods=['GET'])
|
||||
def get_slack_webhook(integrationId, context):
|
||||
return {"data": webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId)}
|
||||
|
||||
|
||||
@app.route('/integrations/slack/channels', methods=['GET'])
|
||||
def get_slack_integration(context):
|
||||
return {"data": webhook.get_by_type(tenant_id=context["tenantId"], webhook_type='slack')}
|
||||
|
||||
|
||||
@app.route('/integrations/slack/{integrationId}', methods=['DELETE'])
|
||||
def delete_slack_integration(integrationId, context):
|
||||
return webhook.delete(context["tenantId"], integrationId)
|
||||
|
||||
|
||||
@app.route('/webhooks', methods=['POST', 'PUT'])
|
||||
def add_edit_webhook(context):
|
||||
data = app.current_request.json_body
|
||||
return {"data": webhook.add_edit(tenant_id=context["tenantId"], data=data, replace_none=True)}
|
||||
|
||||
|
||||
@app.route('/webhooks', methods=['GET'])
|
||||
def get_webhooks(context):
|
||||
return {"data": webhook.get_by_tenant(tenant_id=context["tenantId"], replace_none=True)}
|
||||
|
||||
|
||||
@app.route('/webhooks/{webhookId}', methods=['DELETE'])
|
||||
def delete_webhook(webhookId, context):
|
||||
return {"data": webhook.delete(tenant_id=context["tenantId"], webhook_id=webhookId)}
|
||||
|
||||
|
||||
@app.route('/client/members', methods=['GET'])
|
||||
def get_members(context):
|
||||
return {"data": users.get_members(tenant_id=context['tenantId'])}
|
||||
|
||||
|
||||
@app.route('/client/members', methods=['PUT', 'POST'])
|
||||
def add_member(context):
|
||||
data = app.current_request.json_body
|
||||
return users.create_member(tenant_id=context['tenantId'], user_id=context['userId'], data=data)
|
||||
|
||||
|
||||
@app.route('/client/members/{memberId}', methods=['PUT', 'POST'])
|
||||
def edit_member(memberId, context):
|
||||
data = app.current_request.json_body
|
||||
return users.edit(tenant_id=context['tenantId'], editor_id=context['userId'], changes=data,
|
||||
user_id_to_update=memberId)
|
||||
|
||||
|
||||
@app.route('/client/members/{memberId}', methods=['DELETE'])
|
||||
def delete_member(memberId, context):
|
||||
return users.delete_member(tenant_id=context["tenantId"], user_id=context['userId'], id_to_delete=memberId)
|
||||
|
||||
|
||||
@app.route('/account/new_api_key', methods=['GET'])
|
||||
def generate_new_user_token(context):
|
||||
return {"data": users.generate_new_api_key(user_id=context['userId'])}
|
||||
|
||||
|
||||
@app.route('/account', methods=['POST', 'PUT'])
|
||||
def edit_account(context):
|
||||
data = app.current_request.json_body
|
||||
return users.edit(tenant_id=context['tenantId'], user_id_to_update=context['userId'], changes=data,
|
||||
editor_id=context['userId'])
|
||||
|
||||
|
||||
@app.route('/account/password', methods=['PUT', 'POST'])
|
||||
def change_client_password(context):
|
||||
data = app.current_request.json_body
|
||||
return users.change_password(email=context['email'], old_password=data["oldPassword"],
|
||||
new_password=data["newPassword"], tenant_id=context["tenantId"],
|
||||
user_id=context["userId"])
|
||||
|
||||
|
||||
@app.route('/metadata/session_search', methods=['GET'])
|
||||
def search_sessions_by_metadata(context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
return {"errors": ["please provide a key&value for search"]}
|
||||
value = params.get('value', '')
|
||||
key = params.get('key', '')
|
||||
project_id = params.get('projectId')
|
||||
if len(value) == 0 and len(key) == 0:
|
||||
return {"errors": ["please provide a key&value for search"]}
|
||||
if len(value) == 0:
|
||||
return {"errors": ["please provide a value for search"]}
|
||||
if len(key) == 0:
|
||||
return {"errors": ["please provide a key for search"]}
|
||||
return {
|
||||
"data": sessions.search_by_metadata(tenant_id=context["tenantId"], user_id=context["userId"], m_value=value,
|
||||
m_key=key,
|
||||
project_id=project_id)}
|
||||
13
api/chalicelib/blueprints/bp_core_dynamic_crons.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
from chalice import Blueprint, Cron
|
||||
from chalicelib import _overrides
|
||||
|
||||
app = Blueprint(__name__)
|
||||
_overrides.chalice_app(app)
|
||||
|
||||
from chalicelib.core import telemetry
|
||||
|
||||
|
||||
# Run every day.
|
||||
@app.schedule(Cron('0', '0', '?', '*', '*', '*'))
|
||||
def telemetry_cron(event):
|
||||
telemetry.compute()
|
||||
0
api/chalicelib/blueprints/subs/__init__.py
Normal file
605
api/chalicelib/blueprints/subs/bp_dashboard.py
Normal file
|
|
@ -0,0 +1,605 @@
|
|||
from chalice import Blueprint
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib import _overrides
|
||||
|
||||
from chalicelib.core import dashboard
|
||||
from chalicelib.core import metadata
|
||||
|
||||
app = Blueprint(__name__)
|
||||
_overrides.chalice_app(app)
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/metadata', methods=['GET'])
|
||||
def get_metadata_map(projectId, context):
|
||||
metamap = []
|
||||
for m in metadata.get(project_id=projectId):
|
||||
metamap.append({"name": m["key"], "key": f"metadata{m['index']}"})
|
||||
return {"data": metamap}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/sessions', methods=['GET', 'POST'])
|
||||
def get_dashboard_processed_sessions(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_processed_sessions(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/errors', methods=['GET', 'POST'])
|
||||
def get_dashboard_errors(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_errors(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/errors_trend', methods=['GET', 'POST'])
|
||||
def get_dashboard_errors_trend(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_errors_trend(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/application_activity', methods=['GET', 'POST'])
|
||||
def get_dashboard_application_activity(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_application_activity(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/page_metrics', methods=['GET', 'POST'])
|
||||
def get_dashboard_page_metrics(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_page_metrics(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/user_activity', methods=['GET', 'POST'])
|
||||
def get_dashboard_user_activity(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_user_activity(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/performance', methods=['GET', 'POST'])
|
||||
def get_dashboard_performance(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_performance(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/slowest_images', methods=['GET', 'POST'])
|
||||
def get_dashboard_slowest_images(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_slowest_images(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/missing_resources', methods=['GET', 'POST'])
|
||||
def get_performance_sessions(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_missing_resources_trend(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/network', methods=['GET', 'POST'])
|
||||
def get_network_widget(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_network(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/{widget}/search', methods=['GET'])
|
||||
def get_dashboard_autocomplete(projectId, widget, context):
|
||||
params = app.current_request.query_params
|
||||
if params is None:
|
||||
return {"data": []}
|
||||
|
||||
if widget in ['performance']:
|
||||
data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
|
||||
platform=params.get('platform', None), performance=True)
|
||||
elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render',
|
||||
'impacted_sessions_by_slow_pages', 'pages_response_time']:
|
||||
data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
|
||||
platform=params.get('platform', None), pages_only=True)
|
||||
elif widget in ['resources_loading_time']:
|
||||
data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
|
||||
platform=params.get('platform', None), performance=False)
|
||||
elif widget in ['time_between_events', 'events']:
|
||||
data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId,
|
||||
platform=params.get('platform', None), performance=False, events_only=True)
|
||||
elif widget in ['metadata']:
|
||||
data = dashboard.search(params.get('q', ''), None, project_id=projectId,
|
||||
platform=params.get('platform', None), metadata=True, key=params.get("key"))
|
||||
else:
|
||||
return {"errors": [f"unsupported widget: {widget}"]}
|
||||
return {'data': data}
|
||||
|
||||
|
||||
# 1
|
||||
@app.route('/{projectId}/dashboard/slowest_resources', methods=['GET', 'POST'])
|
||||
def get_dashboard_slowest_resources(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_slowest_resources(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 2
|
||||
@app.route('/{projectId}/dashboard/resources_loading_time', methods=['GET', 'POST'])
|
||||
def get_dashboard_resources(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_resources_loading_time(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 3
|
||||
@app.route('/{projectId}/dashboard/pages_dom_buildtime', methods=['GET', 'POST'])
|
||||
def get_dashboard_pages_dom(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_pages_dom_build_time(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 4
|
||||
@app.route('/{projectId}/dashboard/busiest_time_of_day', methods=['GET', 'POST'])
|
||||
def get_dashboard_busiest_time_of_day(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_busiest_time_of_day(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 5
|
||||
@app.route('/{projectId}/dashboard/sessions_location', methods=['GET', 'POST'])
|
||||
def get_dashboard_sessions_location(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_sessions_location(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 6
|
||||
@app.route('/{projectId}/dashboard/speed_location', methods=['GET', 'POST'])
|
||||
def get_dashboard_speed_location(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_speed_index_location(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 7
|
||||
@app.route('/{projectId}/dashboard/pages_response_time', methods=['GET', 'POST'])
|
||||
def get_dashboard_pages_response_time(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_pages_response_time(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 8
|
||||
@app.route('/{projectId}/dashboard/pages_response_time_distribution', methods=['GET', 'POST'])
|
||||
def get_dashboard_pages_response_time_distribution(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_pages_response_time_distribution(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 9
|
||||
@app.route('/{projectId}/dashboard/top_metrics', methods=['GET', 'POST'])
|
||||
def get_dashboard_top_metrics(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_top_metrics(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 10
|
||||
@app.route('/{projectId}/dashboard/time_to_render', methods=['GET', 'POST'])
|
||||
def get_dashboard_time_to_render(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_time_to_render(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 11
|
||||
@app.route('/{projectId}/dashboard/impacted_sessions_by_slow_pages', methods=['GET', 'POST'])
|
||||
def get_dashboard_impacted_sessions_by_slow_pages(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_impacted_sessions_by_slow_pages(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 12
|
||||
@app.route('/{projectId}/dashboard/memory_consumption', methods=['GET', 'POST'])
|
||||
def get_dashboard_memory_consumption(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_memory_consumption(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 12.1
|
||||
@app.route('/{projectId}/dashboard/fps', methods=['GET', 'POST'])
|
||||
def get_dashboard_avg_fps(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_avg_fps(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 12.2
|
||||
@app.route('/{projectId}/dashboard/cpu', methods=['GET', 'POST'])
|
||||
def get_dashboard_avg_cpu(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_avg_cpu(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 13
|
||||
@app.route('/{projectId}/dashboard/crashes', methods=['GET', 'POST'])
|
||||
def get_dashboard_impacted_sessions_by_slow_pages(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_crashes(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 14
|
||||
@app.route('/{projectId}/dashboard/domains_errors', methods=['GET', 'POST'])
|
||||
def get_dashboard_domains_errors(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_domains_errors(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 14.1
|
||||
@app.route('/{projectId}/dashboard/domains_errors_4xx', methods=['GET', 'POST'])
|
||||
def get_dashboard_domains_errors_4xx(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_domains_errors_4xx(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 14.2
|
||||
@app.route('/{projectId}/dashboard/domains_errors_5xx', methods=['GET', 'POST'])
|
||||
def get_dashboard_domains_errors_5xx(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_domains_errors_5xx(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 15
|
||||
@app.route('/{projectId}/dashboard/slowest_domains', methods=['GET', 'POST'])
|
||||
def get_dashboard_slowest_domains(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_slowest_domains(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 16
|
||||
@app.route('/{projectId}/dashboard/errors_per_domains', methods=['GET', 'POST'])
|
||||
def get_dashboard_errors_per_domains(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_errors_per_domains(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 17
|
||||
@app.route('/{projectId}/dashboard/sessions_per_browser', methods=['GET', 'POST'])
|
||||
def get_dashboard_sessions_per_browser(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_sessions_per_browser(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 18
|
||||
@app.route('/{projectId}/dashboard/calls_errors', methods=['GET', 'POST'])
|
||||
def get_dashboard_calls_errors(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_calls_errors(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 18.1
|
||||
@app.route('/{projectId}/dashboard/calls_errors_4xx', methods=['GET', 'POST'])
|
||||
def get_dashboard_calls_errors_4xx(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_calls_errors_4xx(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 18.2
|
||||
@app.route('/{projectId}/dashboard/calls_errors_5xx', methods=['GET', 'POST'])
|
||||
def get_dashboard_calls_errors_5xx(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_calls_errors_5xx(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 19
|
||||
@app.route('/{projectId}/dashboard/errors_per_type', methods=['GET', 'POST'])
|
||||
def get_dashboard_errors_per_type(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_errors_per_type(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 20
|
||||
@app.route('/{projectId}/dashboard/resources_by_party', methods=['GET', 'POST'])
|
||||
def get_dashboard_resources_by_party(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_resources_by_party(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 21
|
||||
@app.route('/{projectId}/dashboard/resource_type_vs_response_end', methods=['GET', 'POST'])
|
||||
def get_dashboard_errors_per_resource_type(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.resource_type_vs_response_end(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 22
|
||||
@app.route('/{projectId}/dashboard/resources_vs_visually_complete', methods=['GET', 'POST'])
|
||||
def get_dashboard_resources_vs_visually_complete(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_resources_vs_visually_complete(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 23
|
||||
@app.route('/{projectId}/dashboard/impacted_sessions_by_js_errors', methods=['GET', 'POST'])
|
||||
def get_dashboard_impacted_sessions_by_js_errors(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_impacted_sessions_by_js_errors(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 24
|
||||
@app.route('/{projectId}/dashboard/resources_count_by_type', methods=['GET', 'POST'])
|
||||
def get_dashboard_resources_count_by_type(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": dashboard.get_resources_count_by_type(project_id=projectId, **{**data, **args})}
|
||||
|
||||
|
||||
# 25
|
||||
@app.route('/{projectId}/dashboard/time_between_events', methods=['GET'])
|
||||
def get_dashboard_resources_count_by_type(projectId, context):
|
||||
return {"errors": ["please choose 2 events"]}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/overview', methods=['GET', 'POST'])
|
||||
def get_dashboard_group(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": [
|
||||
*helper.explode_widget(key="count_sessions",
|
||||
data=dashboard.get_processed_sessions(project_id=projectId, **{**data, **args})),
|
||||
*helper.explode_widget(data={**dashboard.get_application_activity(project_id=projectId, **{**data, **args}),
|
||||
"chart": dashboard.get_performance(project_id=projectId, **{**data, **args})
|
||||
.get("chart", [])}),
|
||||
*helper.explode_widget(data=dashboard.get_page_metrics(project_id=projectId, **{**data, **args})),
|
||||
*helper.explode_widget(data=dashboard.get_user_activity(project_id=projectId, **{**data, **args})),
|
||||
*helper.explode_widget(data=dashboard.get_pages_dom_build_time(project_id=projectId, **{**data, **args}),
|
||||
key="avg_pages_dom_buildtime"),
|
||||
*helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **{**data, **args}),
|
||||
key="avg_pages_response_time"),
|
||||
*helper.explode_widget(dashboard.get_top_metrics(project_id=projectId, **{**data, **args})),
|
||||
*helper.explode_widget(data=dashboard.get_time_to_render(project_id=projectId, **{**data, **args}),
|
||||
key="avg_time_to_render"),
|
||||
*helper.explode_widget(dashboard.get_memory_consumption(project_id=projectId, **{**data, **args})),
|
||||
*helper.explode_widget(dashboard.get_avg_cpu(project_id=projectId, **{**data, **args})),
|
||||
*helper.explode_widget(dashboard.get_avg_fps(project_id=projectId, **{**data, **args})),
|
||||
]}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/errors_crashes', methods=['GET', 'POST'])
|
||||
def get_dashboard_group(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": [
|
||||
{"key": "errors",
|
||||
"data": dashboard.get_errors(project_id=projectId, **{**data, **args})},
|
||||
{"key": "errors_trend",
|
||||
"data": dashboard.get_errors_trend(project_id=projectId, **{**data, **args})},
|
||||
{"key": "crashes",
|
||||
"data": dashboard.get_crashes(project_id=projectId, **{**data, **args})},
|
||||
{"key": "domains_errors",
|
||||
"data": dashboard.get_domains_errors(project_id=projectId, **{**data, **args})},
|
||||
{"key": "errors_per_domains",
|
||||
"data": dashboard.get_errors_per_domains(project_id=projectId, **{**data, **args})},
|
||||
{"key": "calls_errors",
|
||||
"data": dashboard.get_calls_errors(project_id=projectId, **{**data, **args})},
|
||||
{"key": "errors_per_type",
|
||||
"data": dashboard.get_errors_per_type(project_id=projectId, **{**data, **args})},
|
||||
{"key": "impacted_sessions_by_js_errors",
|
||||
"data": dashboard.get_impacted_sessions_by_js_errors(project_id=projectId, **{**data, **args})}
|
||||
]}
|
||||
|
||||
|
||||
@app.route('/{projectId}/dashboard/resources', methods=['GET', 'POST'])
|
||||
def get_dashboard_group(projectId, context):
|
||||
data = app.current_request.json_body
|
||||
if data is None:
|
||||
data = {}
|
||||
params = app.current_request.query_params
|
||||
args = dashboard.dashboard_args(params)
|
||||
|
||||
return {"data": [
|
||||
{"key": "slowest_images",
|
||||
"data": dashboard.get_slowest_images(project_id=projectId, **{**data, **args})},
|
||||
{"key": "missing_resources",
|
||||
"data": dashboard.get_missing_resources_trend(project_id=projectId, **{**data, **args})},
|
||||
{"key": "slowest_resources",
|
||||
"data": dashboard.get_slowest_resources(project_id=projectId, type='all', **{**data, **args})},
|
||||
{"key": "resources_loading_time",
|
||||
"data": dashboard.get_resources_loading_time(project_id=projectId, **{**data, **args})},
|
||||
{"key": "resources_by_party",
|
||||
"data": dashboard.get_resources_by_party(project_id=projectId, **{**data, **args})},
|
||||
{"key": "resource_type_vs_response_end",
|
||||
"data": dashboard.resource_type_vs_response_end(project_id=projectId, **{**data, **args})},
|
||||
{"key": "resources_vs_visually_complete",
|
||||
"data": dashboard.get_resources_vs_visually_complete(project_id=projectId, **{**data, **args})},
|
||||
{"key": "resources_count_by_type",
|
||||
"data": dashboard.get_resources_count_by_type(project_id=projectId, **{**data, **args})}
|
||||
]}
|
||||
0
api/chalicelib/core/__init__.py
Normal file
168
api/chalicelib/core/alerts.py
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
import time
|
||||
from chalicelib.utils.helper import environ
|
||||
|
||||
from chalicelib.core import notifications
|
||||
from chalicelib.utils import pg_client, helper, email_helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
import json
|
||||
|
||||
ALLOW_UPDATE = ["name", "description", "active", "detectionMethod", "query", "options"]
|
||||
|
||||
|
||||
def get(id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT *
|
||||
FROM public.alerts
|
||||
WHERE alert_id =%(id)s;""",
|
||||
{"id": id})
|
||||
)
|
||||
a = helper.dict_to_camel_case(cur.fetchone())
|
||||
return __process_circular(a)
|
||||
|
||||
|
||||
def get_all(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""\
|
||||
SELECT *
|
||||
FROM public.alerts
|
||||
WHERE project_id =%(project_id)s AND deleted_at ISNULL
|
||||
ORDER BY created_at;""",
|
||||
{"project_id": project_id})
|
||||
cur.execute(query=query)
|
||||
all = helper.list_to_camel_case(cur.fetchall())
|
||||
for a in all:
|
||||
a = __process_circular(a)
|
||||
return all
|
||||
|
||||
|
||||
SUPPORTED_THRESHOLD = [15, 30, 60, 120, 240, 1440]
|
||||
|
||||
|
||||
def __transform_structure(data):
|
||||
if data.get("options") is None:
|
||||
return f"Missing 'options'", None
|
||||
if data["options"].get("currentPeriod") not in SUPPORTED_THRESHOLD:
|
||||
return f"Unsupported currentPeriod, please provide one of these values {SUPPORTED_THRESHOLD}", None
|
||||
if data["options"].get("previousPeriod", 15) not in SUPPORTED_THRESHOLD:
|
||||
return f"Unsupported previousPeriod, please provide one of these values {SUPPORTED_THRESHOLD}", None
|
||||
if data["options"].get("renotifyInterval") is None:
|
||||
data["options"]["renotifyInterval"] = 720
|
||||
data["query"]["right"] = float(data["query"]["right"])
|
||||
data["query"] = json.dumps(data["query"])
|
||||
data["description"] = data["description"] if data.get("description") is not None and len(
|
||||
data["description"]) > 0 else None
|
||||
if data.get("options"):
|
||||
messages = []
|
||||
for m in data["options"].get("message", []):
|
||||
if m.get("value") is None:
|
||||
continue
|
||||
m["value"] = str(m["value"])
|
||||
messages.append(m)
|
||||
data["options"]["message"] = messages
|
||||
data["options"] = json.dumps(data["options"])
|
||||
return None, data
|
||||
|
||||
|
||||
def __process_circular(alert):
|
||||
if alert is None:
|
||||
return None
|
||||
alert.pop("deletedAt")
|
||||
alert["createdAt"] = TimeUTC.datetime_to_timestamp(alert["createdAt"])
|
||||
return alert
|
||||
|
||||
|
||||
def create(project_id, data):
|
||||
err, data = __transform_structure(data)
|
||||
if err is not None:
|
||||
return {"errors": [err]}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
INSERT INTO public.alerts(project_id, name, description, detection_method, query, options)
|
||||
VALUES (%(project_id)s, %(name)s, %(description)s, %(detectionMethod)s, %(query)s, %(options)s::jsonb)
|
||||
RETURNING *;""",
|
||||
{"project_id": project_id, **data})
|
||||
)
|
||||
a = helper.dict_to_camel_case(cur.fetchone())
|
||||
return {"data": helper.dict_to_camel_case(__process_circular(a))}
|
||||
|
||||
|
||||
def update(id, changes):
|
||||
changes = {k: changes[k] for k in changes.keys() if k in ALLOW_UPDATE}
|
||||
err, changes = __transform_structure(changes)
|
||||
if err is not None:
|
||||
return {"errors": [err]}
|
||||
updateq = []
|
||||
for k in changes.keys():
|
||||
updateq.append(f"{helper.key_to_snake_case(k)} = %({k})s")
|
||||
if len(updateq) == 0:
|
||||
return {"errors": ["nothing to update"]}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
UPDATE public.alerts
|
||||
SET {", ".join(updateq)}
|
||||
WHERE alert_id =%(id)s AND deleted_at ISNULL
|
||||
RETURNING *;""",
|
||||
{"id": id, **changes})
|
||||
cur.execute(query=query)
|
||||
a = helper.dict_to_camel_case(cur.fetchone())
|
||||
return {"data": __process_circular(a)}
|
||||
|
||||
|
||||
def process_notifications(data):
|
||||
full = {}
|
||||
for n in data:
|
||||
if "message" in n["options"]:
|
||||
webhook_data = {}
|
||||
if "data" in n["options"]:
|
||||
webhook_data = n["options"].pop("data")
|
||||
for c in n["options"].pop("message"):
|
||||
if c["type"] not in full:
|
||||
full[c["type"]] = []
|
||||
if c["type"] in ["slack", "email"]:
|
||||
full[c["type"]].append({
|
||||
"notification": n,
|
||||
"destination": c["value"]
|
||||
})
|
||||
elif c["type"] in ["webhook"]:
|
||||
full[c["type"]].append({"data": webhook_data, "destination": c["value"]})
|
||||
notifications.create(data)
|
||||
BATCH_SIZE = 200
|
||||
for t in full.keys():
|
||||
for i in range(0, len(full[t]), BATCH_SIZE):
|
||||
helper.async_post(environ['alert_ntf'] % t, {"notifications": full[t][i:i + BATCH_SIZE]})
|
||||
|
||||
|
||||
def send_by_email(notification, destination):
|
||||
if notification is None:
|
||||
return
|
||||
email_helper.alert_email(recipients=destination,
|
||||
subject=f'"{notification["title"]}" has been triggered',
|
||||
data={
|
||||
"message": f'"{notification["title"]}" {notification["description"]}',
|
||||
"project_id": notification["options"]["projectId"]})
|
||||
|
||||
|
||||
def send_by_email_batch(notifications_list):
|
||||
if notifications_list is None or len(notifications_list) == 0:
|
||||
return
|
||||
for n in notifications_list:
|
||||
send_by_email(notification=n.get("notification"), destination=n.get("destination"))
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def delete(project_id, alert_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.alerts
|
||||
SET
|
||||
deleted_at = timezone('utc'::text, now()),
|
||||
active = FALSE
|
||||
WHERE
|
||||
alert_id = %(alert_id)s AND project_id=%(project_id)s;""",
|
||||
{"alert_id": alert_id, "project_id": project_id})
|
||||
)
|
||||
return {"data": {"state": "success"}}
|
||||
42
api/chalicelib/core/announcements.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.helper import environ
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def get_all(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""
|
||||
SELECT a.*, u.last >= (EXTRACT(EPOCH FROM a.created_at)*1000) AS viewed
|
||||
FROM public.announcements AS a,
|
||||
(SELECT COALESCE(CAST(data ->> 'lastAnnouncementView' AS bigint), 0)
|
||||
FROM public.users
|
||||
WHERE user_id = %(userId)s
|
||||
LIMIT 1) AS u(last)
|
||||
ORDER BY a.created_at DESC;""",
|
||||
{"userId": user_id})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
announcements = helper.list_to_camel_case(cur.fetchall())
|
||||
for a in announcements:
|
||||
a["createdAt"] = TimeUTC.datetime_to_timestamp(a["createdAt"])
|
||||
if a["imageUrl"] is not None and len(a["imageUrl"]) > 0:
|
||||
a["imageUrl"] = environ["announcement_bucket"] + a["imageUrl"]
|
||||
return announcements
|
||||
|
||||
|
||||
def view(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""
|
||||
UPDATE public.users
|
||||
SET data=data ||
|
||||
('{"lastAnnouncementView":' ||
|
||||
(EXTRACT(EPOCH FROM timezone('utc'::text, now())) * 1000)::bigint - 20 * 000 ||
|
||||
'}')::jsonb
|
||||
WHERE user_id = %(userId)s;""",
|
||||
{"userId": user_id})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
return True
|
||||
61
api/chalicelib/core/authorizers.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
from chalicelib.utils.helper import environ
|
||||
import jwt
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
from chalicelib.core import tenants
|
||||
from chalicelib.core import users
|
||||
|
||||
|
||||
def jwt_authorizer(token):
|
||||
token = token.split(" ")
|
||||
if len(token) != 2 or token[0].lower() != "bearer":
|
||||
return None
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token[1],
|
||||
environ["jwt_secret"],
|
||||
algorithms=environ["jwt_algorithm"],
|
||||
audience=[f"plugin:{helper.get_stage_name()}", f"front:{helper.get_stage_name()}"]
|
||||
)
|
||||
except jwt.ExpiredSignatureError:
|
||||
print("! JWT Expired signature")
|
||||
return None
|
||||
except BaseException as e:
|
||||
print("! JWT Base Exception")
|
||||
return None
|
||||
return payload
|
||||
|
||||
|
||||
def jwt_context(context):
|
||||
user = users.get(user_id=context["userId"], tenant_id=context["tenantId"])
|
||||
if user is None:
|
||||
return None
|
||||
return {
|
||||
"tenantId": context["tenantId"],
|
||||
"userId": context["userId"],
|
||||
**user
|
||||
}
|
||||
|
||||
|
||||
def generate_jwt(id, tenant_id, iat, aud):
|
||||
token = jwt.encode(
|
||||
payload={
|
||||
"userId": id,
|
||||
"tenantId": tenant_id,
|
||||
"exp": iat // 1000 + int(environ["jwt_exp_delta_seconds"]) + TimeUTC.get_utc_offset() // 1000,
|
||||
"iss": environ["jwt_issuer"],
|
||||
"iat": iat // 1000,
|
||||
"aud": aud
|
||||
},
|
||||
key=environ["jwt_secret"],
|
||||
algorithm=environ["jwt_algorithm"]
|
||||
)
|
||||
return token.decode("utf-8")
|
||||
|
||||
|
||||
def api_key_authorizer(token):
|
||||
t = tenants.get_by_api_key(token)
|
||||
if t is not None:
|
||||
t["createdAt"] = TimeUTC.datetime_to_timestamp(t["createdAt"])
|
||||
return t
|
||||
116
api/chalicelib/core/boarding.py
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
from chalicelib.utils import pg_client
|
||||
from chalicelib.core import projects, log_tool_datadog, log_tool_stackdriver, log_tool_sentry
|
||||
|
||||
from chalicelib.core import users
|
||||
|
||||
|
||||
def get_state(tenant_id):
|
||||
my_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False)
|
||||
pids = [s["projectId"] for s in my_projects]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
recorded = False
|
||||
meta = False
|
||||
|
||||
if len(pids) > 0:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT
|
||||
COUNT(*)
|
||||
FROM public.sessions AS s
|
||||
where s.project_id IN %(ids)s
|
||||
LIMIT 1;""",
|
||||
{"ids": tuple(pids)})
|
||||
)
|
||||
recorded = cur.fetchone()["count"] > 0
|
||||
meta = False
|
||||
if recorded:
|
||||
cur.execute("""SELECT SUM((SELECT COUNT(t.meta)
|
||||
FROM (VALUES (p.metadata_1), (p.metadata_2), (p.metadata_3), (p.metadata_4), (p.metadata_5),
|
||||
(p.metadata_6), (p.metadata_7), (p.metadata_8), (p.metadata_9), (p.metadata_10),
|
||||
(sessions.user_id)) AS t(meta)
|
||||
WHERE t.meta NOTNULL))
|
||||
FROM public.projects AS p
|
||||
LEFT JOIN LATERAL ( SELECT 'defined'
|
||||
FROM public.sessions
|
||||
WHERE sessions.project_id=p.project_id AND sessions.user_id IS NOT NULL
|
||||
LIMIT 1) AS sessions(user_id) ON(TRUE)
|
||||
WHERE p.deleted_at ISNULL;"""
|
||||
)
|
||||
|
||||
meta = cur.fetchone()["sum"] > 0
|
||||
|
||||
return [
|
||||
{"task": "Install Asayer",
|
||||
"done": recorded,
|
||||
"URL": "https://docs.asayer.io/getting-started/quick-start"},
|
||||
{"task": "Identify Users",
|
||||
"done": meta,
|
||||
"URL": "https://docs.asayer.io/data-privacy-security/metadata"},
|
||||
{"task": "Invite Team Members",
|
||||
"done": len(users.get_members(tenant_id=tenant_id)) > 1,
|
||||
"URL": "https://app.asayer.io/client/manage-users"},
|
||||
{"task": "Integrations",
|
||||
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0,
|
||||
"URL": "https://docs.asayer.io/integrations"}
|
||||
]
|
||||
|
||||
|
||||
def get_state_installing(tenant_id):
|
||||
my_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False)
|
||||
pids = [s["projectId"] for s in my_projects]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
recorded = False
|
||||
|
||||
if len(pids) > 0:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT
|
||||
COUNT(*)
|
||||
FROM public.sessions AS s
|
||||
where s.project_id IN %(ids)s
|
||||
LIMIT 1;""",
|
||||
{"ids": tuple(pids)})
|
||||
)
|
||||
recorded = cur.fetchone()["count"] > 0
|
||||
|
||||
return {"task": "Install Asayer",
|
||||
"done": recorded,
|
||||
"URL": "https://docs.asayer.io/getting-started/quick-start"}
|
||||
|
||||
|
||||
def get_state_identify_users(tenant_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
"""SELECT SUM((SELECT COUNT(t.meta)
|
||||
FROM (VALUES (p.metadata_1), (p.metadata_2), (p.metadata_3), (p.metadata_4), (p.metadata_5),
|
||||
(p.metadata_6), (p.metadata_7), (p.metadata_8), (p.metadata_9), (p.metadata_10),
|
||||
(sessions.user_id)) AS t(meta)
|
||||
WHERE t.meta NOTNULL))
|
||||
FROM public.projects AS p
|
||||
LEFT JOIN LATERAL ( SELECT 'defined'
|
||||
FROM public.sessions
|
||||
WHERE sessions.project_id=p.project_id AND sessions.user_id IS NOT NULL
|
||||
LIMIT 1) AS sessions(user_id) ON(TRUE)
|
||||
WHERE p.deleted_at ISNULL;""")
|
||||
|
||||
meta = cur.fetchone()["sum"] > 0
|
||||
|
||||
return {"task": "Identify Users",
|
||||
"done": meta,
|
||||
"URL": "https://docs.asayer.io/data-privacy-security/metadata"}
|
||||
|
||||
|
||||
def get_state_manage_users(tenant_id):
|
||||
return {"task": "Invite Team Members",
|
||||
"done": len(users.get_members(tenant_id=tenant_id)) > 1,
|
||||
"URL": "https://app.asayer.io/client/manage-users"}
|
||||
|
||||
|
||||
def get_state_integrations(tenant_id):
|
||||
return {"task": "Integrations",
|
||||
"done": len(log_tool_datadog.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_sentry.get_all(tenant_id=tenant_id)) > 0 \
|
||||
or len(log_tool_stackdriver.get_all(tenant_id=tenant_id)) > 0,
|
||||
"URL": "https://docs.asayer.io/integrations"}
|
||||
126
api/chalicelib/core/collaboration_slack.py
Normal file
|
|
@ -0,0 +1,126 @@
|
|||
import requests
|
||||
from chalicelib.utils.helper import environ
|
||||
from datetime import datetime
|
||||
from chalicelib.core import webhook
|
||||
|
||||
|
||||
class Slack:
|
||||
@classmethod
|
||||
def add_integration(cls, tenant_id, **args):
|
||||
url = args["url"]
|
||||
name = args["name"]
|
||||
if cls.__say_hello(url):
|
||||
webhook.add(tenant_id=tenant_id,
|
||||
endpoint=url,
|
||||
webhook_type="slack",
|
||||
name=name)
|
||||
return True
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def __say_hello(cls, url):
|
||||
r = requests.post(
|
||||
url=url,
|
||||
json={
|
||||
"attachments": [
|
||||
{
|
||||
"text": "Welcome to OpenReplay",
|
||||
"ts": datetime.now().timestamp(),
|
||||
}
|
||||
]
|
||||
})
|
||||
if r.status_code != 200:
|
||||
print("slack integration failed")
|
||||
print(r.text)
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def send_text(cls, tenant_id, webhook_id, text, **args):
|
||||
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
|
||||
if integration is None:
|
||||
return {"errors": ["slack integration not found"]}
|
||||
print("====> sending slack notification")
|
||||
r = requests.post(
|
||||
url=integration["endpoint"],
|
||||
json={
|
||||
"attachments": [
|
||||
{
|
||||
"text": text,
|
||||
"ts": datetime.now().timestamp(),
|
||||
**args
|
||||
}
|
||||
]
|
||||
})
|
||||
print(r)
|
||||
print(r.text)
|
||||
return {"data": r.text}
|
||||
|
||||
@classmethod
|
||||
def send_batch(cls, tenant_id, webhook_id, attachments):
|
||||
integration = cls.__get(tenant_id=tenant_id, integration_id=webhook_id)
|
||||
if integration is None:
|
||||
return {"errors": ["slack integration not found"]}
|
||||
print(f"====> sending slack batch notification: {len(attachments)}")
|
||||
for i in range(0, len(attachments), 100):
|
||||
r = requests.post(
|
||||
url=integration["endpoint"],
|
||||
json={"attachments": attachments[i:i + 100]})
|
||||
if r.status_code != 200:
|
||||
print("!!!! something went wrong")
|
||||
print(r)
|
||||
print(r.text)
|
||||
|
||||
@classmethod
|
||||
def __share_to_slack(cls, tenant_id, integration_id, fallback, pretext, title, title_link, text):
|
||||
integration = cls.__get(tenant_id=tenant_id, integration_id=integration_id)
|
||||
if integration is None:
|
||||
return {"errors": ["slack integration not found"]}
|
||||
r = requests.post(
|
||||
url=integration["endpoint"],
|
||||
json={
|
||||
"attachments": [
|
||||
{
|
||||
"fallback": fallback,
|
||||
"pretext": pretext,
|
||||
"title": title,
|
||||
"title_link": title_link,
|
||||
"text": text,
|
||||
"ts": datetime.now().timestamp()
|
||||
}
|
||||
]
|
||||
})
|
||||
return r.text
|
||||
|
||||
@classmethod
|
||||
def share_session(cls, tenant_id, project_id, session_id, user, comment, integration_id=None):
|
||||
args = {"fallback": f"{user} has shared the below session!",
|
||||
"pretext": f"{user} has shared the below session!",
|
||||
"title": f"{environ['SITE_URL']}/{project_id}/session/{session_id}",
|
||||
"title_link": f"{environ['SITE_URL']}/{project_id}/session/{session_id}",
|
||||
"text": comment}
|
||||
return {"data": cls.__share_to_slack(tenant_id, integration_id, **args)}
|
||||
|
||||
@classmethod
|
||||
def share_error(cls, tenant_id, project_id, error_id, user, comment, integration_id=None):
|
||||
args = {"fallback": f"{user} has shared the below error!",
|
||||
"pretext": f"{user} has shared the below error!",
|
||||
"title": f"{environ['SITE_URL']}/{project_id}/errors/{error_id}",
|
||||
"title_link": f"{environ['SITE_URL']}/{project_id}/errors/{error_id}",
|
||||
"text": comment}
|
||||
return {"data": cls.__share_to_slack(tenant_id, integration_id, **args)}
|
||||
|
||||
@classmethod
|
||||
def has_slack(cls, tenant_id):
|
||||
integration = cls.__get(tenant_id=tenant_id)
|
||||
return not (integration is None or len(integration) == 0)
|
||||
|
||||
@classmethod
|
||||
def __get(cls, tenant_id, integration_id=None):
|
||||
if integration_id is not None:
|
||||
return webhook.get(tenant_id=tenant_id, webhook_id=integration_id)
|
||||
|
||||
integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type="slack")
|
||||
if integrations is None or len(integrations) == 0:
|
||||
return None
|
||||
return integrations[0]
|
||||
2034
api/chalicelib/core/dashboard.py
Normal file
777
api/chalicelib/core/errors.py
Normal file
|
|
@ -0,0 +1,777 @@
|
|||
import json
|
||||
|
||||
from chalicelib.utils import pg_client, helper, dev
|
||||
from chalicelib.core import sourcemaps, sessions
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.metrics_helper import __get_step_size
|
||||
|
||||
|
||||
def get(error_id, family=False):
|
||||
if family:
|
||||
return get_batch([error_id])
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"SELECT * FROM events.errors AS e INNER JOIN public.errors AS re USING(error_id) WHERE error_id = %(error_id)s;",
|
||||
{"error_id": error_id})
|
||||
cur.execute(query=query)
|
||||
result = cur.fetchone()
|
||||
if result is not None:
|
||||
result["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(result["stacktrace_parsed_at"])
|
||||
return helper.dict_to_camel_case(result)
|
||||
|
||||
|
||||
def get_batch(error_ids):
|
||||
if len(error_ids) == 0:
|
||||
return []
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""
|
||||
WITH RECURSIVE error_family AS (
|
||||
SELECT *
|
||||
FROM public.errors
|
||||
WHERE error_id IN %(error_ids)s
|
||||
UNION
|
||||
SELECT child_errors.*
|
||||
FROM public.errors AS child_errors
|
||||
INNER JOIN error_family ON error_family.error_id = child_errors.parent_error_id OR error_family.parent_error_id = child_errors.error_id
|
||||
)
|
||||
SELECT *
|
||||
FROM error_family;""",
|
||||
{"error_ids": tuple(error_ids)})
|
||||
cur.execute(query=query)
|
||||
return helper.list_to_camel_case(cur.fetchall())
|
||||
|
||||
|
||||
def __flatten_sort_key_count_version(data, merge_nested=False):
|
||||
if data is None:
|
||||
return []
|
||||
return sorted(
|
||||
[
|
||||
{
|
||||
"name": f'{o["name"]}@{v["version"]}',
|
||||
"count": v["count"]
|
||||
} for o in data for v in o["partition"]
|
||||
],
|
||||
key=lambda o: o["count"], reverse=True) if merge_nested else \
|
||||
[
|
||||
{
|
||||
"name": o["name"],
|
||||
"count": o["count"],
|
||||
} for o in data
|
||||
]
|
||||
|
||||
|
||||
def __process_tags(row):
|
||||
return [
|
||||
{"name": "browser", "partitions": __flatten_sort_key_count_version(data=row.get("browsers_partition"))},
|
||||
{"name": "browser.ver",
|
||||
"partitions": __flatten_sort_key_count_version(data=row.pop("browsers_partition"), merge_nested=True)},
|
||||
{"name": "OS", "partitions": __flatten_sort_key_count_version(data=row.get("os_partition"))},
|
||||
{"name": "OS.ver",
|
||||
"partitions": __flatten_sort_key_count_version(data=row.pop("os_partition"), merge_nested=True)},
|
||||
{"name": "device.family", "partitions": __flatten_sort_key_count_version(data=row.get("device_partition"))},
|
||||
{"name": "device",
|
||||
"partitions": __flatten_sort_key_count_version(data=row.pop("device_partition"), merge_nested=True)},
|
||||
{"name": "country", "partitions": row.pop("country_partition")}
|
||||
]
|
||||
|
||||
|
||||
def get_details(project_id, error_id, user_id, **data):
|
||||
pg_sub_query24 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size24")
|
||||
pg_sub_query24.append("error_id = %(error_id)s")
|
||||
pg_sub_query30 = __get_basic_constraints(time_constraint=False, chart=True, step_size_name="step_size30")
|
||||
pg_sub_query30.append("error_id = %(error_id)s")
|
||||
pg_basic_query = __get_basic_constraints(time_constraint=False)
|
||||
pg_basic_query.append("error_id = %(error_id)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
data["startDate24"] = TimeUTC.now(-1)
|
||||
data["endDate24"] = TimeUTC.now()
|
||||
data["startDate30"] = TimeUTC.now(-30)
|
||||
data["endDate30"] = TimeUTC.now()
|
||||
density24 = int(data.get("density24", 24))
|
||||
step_size24 = __get_step_size(data["startDate24"], data["endDate24"], density24, factor=1)
|
||||
density30 = int(data.get("density30", 30))
|
||||
step_size30 = __get_step_size(data["startDate30"], data["endDate30"], density30, factor=1)
|
||||
params = {
|
||||
"startDate24": data['startDate24'],
|
||||
"endDate24": data['endDate24'],
|
||||
"startDate30": data['startDate30'],
|
||||
"endDate30": data['endDate30'],
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size24": step_size24,
|
||||
"step_size30": step_size30,
|
||||
"error_id": error_id}
|
||||
|
||||
main_pg_query = f"""\
|
||||
SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
users,
|
||||
sessions,
|
||||
last_occurrence,
|
||||
first_occurrence,
|
||||
last_session_id,
|
||||
browsers_partition,
|
||||
os_partition,
|
||||
device_partition,
|
||||
country_partition,
|
||||
chart24,
|
||||
chart30
|
||||
FROM (SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
COUNT(DISTINCT user_uuid) AS users,
|
||||
COUNT(DISTINCT session_id) AS sessions
|
||||
FROM public.errors
|
||||
INNER JOIN events.errors AS s_errors USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE error_id = %(error_id)s
|
||||
GROUP BY error_id, name, message) AS details
|
||||
INNER JOIN (SELECT error_id,
|
||||
MAX(timestamp) AS last_occurrence,
|
||||
MIN(timestamp) AS first_occurrence
|
||||
FROM events.errors
|
||||
WHERE error_id = %(error_id)s
|
||||
GROUP BY error_id) AS time_details USING (error_id)
|
||||
INNER JOIN (SELECT error_id,
|
||||
session_id AS last_session_id,
|
||||
user_os,
|
||||
user_os_version,
|
||||
user_browser,
|
||||
user_browser_version,
|
||||
user_device,
|
||||
user_device_type,
|
||||
user_uuid
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE error_id = %(error_id)s
|
||||
ORDER BY errors.timestamp DESC
|
||||
LIMIT 1) AS last_session_details USING (error_id)
|
||||
INNER JOIN (SELECT jsonb_agg(browser_details) AS browsers_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_browser AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors
|
||||
INNER JOIN sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
GROUP BY user_browser
|
||||
ORDER BY count DESC) AS count_per_browser_query
|
||||
INNER JOIN LATERAL (SELECT JSONB_AGG(version_details) AS partition
|
||||
FROM (SELECT user_browser_version AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
AND sessions.user_browser = count_per_browser_query.name
|
||||
GROUP BY user_browser_version
|
||||
ORDER BY count DESC) AS version_details
|
||||
) AS browser_version_details ON (TRUE)) AS browser_details) AS browser_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_os AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
GROUP BY user_os
|
||||
ORDER BY count DESC) AS count_per_os_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
|
||||
FROM (SELECT COALESCE(user_os_version,'unknown') AS version, COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
AND sessions.user_os = count_per_os_details.name
|
||||
GROUP BY user_os_version
|
||||
ORDER BY count DESC) AS count_per_version_details
|
||||
GROUP BY count_per_os_details.name ) AS os_version_details
|
||||
ON (TRUE)) AS os_details) AS os_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_device_type AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
GROUP BY user_device_type
|
||||
ORDER BY count DESC) AS count_per_device_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_v_details) AS partition
|
||||
FROM (SELECT CASE
|
||||
WHEN user_device = '' OR user_device ISNULL
|
||||
THEN 'unknown'
|
||||
ELSE user_device END AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
AND sessions.user_device_type = count_per_device_details.name
|
||||
GROUP BY user_device
|
||||
ORDER BY count DESC) AS count_per_device_v_details
|
||||
GROUP BY count_per_device_details.name ) AS device_version_details
|
||||
ON (TRUE)) AS device_details) AS device_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
|
||||
FROM (SELECT user_country AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_basic_query)}
|
||||
GROUP BY user_country
|
||||
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart24
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate24)s, %(endDate24)s, %(step_size24)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query24)}
|
||||
) AS chart_details ON (TRUE)
|
||||
GROUP BY generated_timestamp
|
||||
ORDER BY generated_timestamp) AS chart_details) AS chart_details24 ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart30
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate30)s, %(endDate30)s, %(step_size30)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query30)}) AS chart_details
|
||||
ON (TRUE)
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details) AS chart_details30 ON (TRUE);
|
||||
"""
|
||||
|
||||
# print("--------------------")
|
||||
# print(cur.mogrify(main_pg_query, params))
|
||||
# print("--------------------")
|
||||
cur.execute(cur.mogrify(main_pg_query, params))
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return {"errors": ["error doesn't exist"]}
|
||||
row["tags"] = __process_tags(row)
|
||||
|
||||
query = cur.mogrify(
|
||||
f"""SELECT error_id, status, session_id, start_ts,
|
||||
parent_error_id,session_id, user_anonymous_id,
|
||||
user_id, user_uuid, user_browser, user_browser_version,
|
||||
user_os, user_os_version, user_device, payload,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_errors AS fe
|
||||
WHERE pe.error_id = fe.error_id
|
||||
AND fe.user_id = %(user_id)s), FALSE) AS favorite,
|
||||
True AS viewed
|
||||
FROM public.errors AS pe
|
||||
INNER JOIN events.errors AS ee USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE pe.project_id = %(project_id)s
|
||||
AND error_id = %(error_id)s
|
||||
ORDER BY start_ts DESC
|
||||
LIMIT 1;""",
|
||||
{"project_id": project_id, "error_id": error_id, "user_id": user_id})
|
||||
cur.execute(query=query)
|
||||
status = cur.fetchone()
|
||||
|
||||
if status is not None:
|
||||
row["stack"] = format_first_stack_frame(status).pop("stack")
|
||||
row["status"] = status.pop("status")
|
||||
row["parent_error_id"] = status.pop("parent_error_id")
|
||||
row["favorite"] = status.pop("favorite")
|
||||
row["viewed"] = status.pop("viewed")
|
||||
row["last_hydrated_session"] = status
|
||||
else:
|
||||
row["stack"] = []
|
||||
row["last_hydrated_session"] = None
|
||||
row["status"] = "untracked"
|
||||
row["parent_error_id"] = None
|
||||
row["favorite"] = False
|
||||
row["viewed"] = False
|
||||
return {"data": helper.dict_to_camel_case(row)}
|
||||
|
||||
|
||||
def get_details_chart(project_id, error_id, user_id, **data):
|
||||
pg_sub_query = __get_basic_constraints()
|
||||
pg_sub_query.append("error_id = %(error_id)s")
|
||||
pg_sub_query_chart = __get_basic_constraints(time_constraint=False, chart=True)
|
||||
pg_sub_query_chart.append("error_id = %(error_id)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if data.get("startDate") is None:
|
||||
data["startDate"] = TimeUTC.now(-7)
|
||||
else:
|
||||
data["startDate"] = int(data["startDate"])
|
||||
if data.get("endDate") is None:
|
||||
data["endDate"] = TimeUTC.now()
|
||||
else:
|
||||
data["endDate"] = int(data["endDate"])
|
||||
density = int(data.get("density", 7))
|
||||
step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
|
||||
params = {
|
||||
"startDate": data['startDate'],
|
||||
"endDate": data['endDate'],
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size": step_size,
|
||||
"error_id": error_id}
|
||||
|
||||
main_pg_query = f"""\
|
||||
SELECT %(error_id)s AS error_id,
|
||||
browsers_partition,
|
||||
os_partition,
|
||||
device_partition,
|
||||
country_partition,
|
||||
chart
|
||||
FROM (SELECT jsonb_agg(browser_details) AS browsers_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_browser AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY user_browser
|
||||
ORDER BY count DESC) AS count_per_browser_query
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_details) AS partition
|
||||
FROM (SELECT user_browser_version AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
AND user_browser = count_per_browser_query.name
|
||||
GROUP BY user_browser_version
|
||||
ORDER BY count DESC) AS count_per_version_details) AS browesr_version_details
|
||||
ON (TRUE)) AS browser_details) AS browser_details
|
||||
INNER JOIN (SELECT jsonb_agg(os_details) AS os_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_os AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY user_os
|
||||
ORDER BY count DESC) AS count_per_os_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_version_query) AS partition
|
||||
FROM (SELECT COALESCE(user_os_version, 'unknown') AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
AND user_os = count_per_os_details.name
|
||||
GROUP BY user_os_version
|
||||
ORDER BY count DESC) AS count_per_version_query
|
||||
) AS os_version_query ON (TRUE)) AS os_details) AS os_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(device_details) AS device_partition
|
||||
FROM (SELECT *
|
||||
FROM (SELECT user_device_type AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY user_device_type
|
||||
ORDER BY count DESC) AS count_per_device_details
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(count_per_device_details) AS partition
|
||||
FROM (SELECT CASE
|
||||
WHEN user_device = '' OR user_device ISNULL
|
||||
THEN 'unknown'
|
||||
ELSE user_device END AS version,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
AND user_device_type = count_per_device_details.name
|
||||
GROUP BY user_device_type, user_device
|
||||
ORDER BY count DESC) AS count_per_device_details
|
||||
) AS device_version_details ON (TRUE)) AS device_details) AS device_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(count_per_country_details) AS country_partition
|
||||
FROM (SELECT user_country AS name,
|
||||
COUNT(session_id) AS count
|
||||
FROM events.errors INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY user_country
|
||||
ORDER BY count DESC) AS count_per_country_details) AS country_details ON (TRUE)
|
||||
INNER JOIN (SELECT jsonb_agg(chart_details) AS chart
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
||||
) AS chart_details ON (TRUE)
|
||||
GROUP BY generated_timestamp
|
||||
ORDER BY generated_timestamp) AS chart_details) AS chart_details ON (TRUE);"""
|
||||
|
||||
cur.execute(cur.mogrify(main_pg_query, params))
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return {"errors": ["error doesn't exist"]}
|
||||
row["tags"] = __process_tags(row)
|
||||
return {"data": helper.dict_to_camel_case(row)}
|
||||
|
||||
|
||||
def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_name="startDate",
|
||||
endTime_arg_name="endDate", chart=False, step_size_name="step_size",
|
||||
project_key="project_id"):
|
||||
ch_sub_query = [f"{project_key} =%(project_id)s"]
|
||||
if time_constraint:
|
||||
ch_sub_query += [f"timestamp >= %({startTime_arg_name})s",
|
||||
f"timestamp < %({endTime_arg_name})s"]
|
||||
if chart:
|
||||
ch_sub_query += [f"timestamp >= generated_timestamp",
|
||||
f"timestamp < generated_timestamp + %({step_size_name})s"]
|
||||
if platform == 'mobile':
|
||||
ch_sub_query.append("user_device_type = 'mobile'")
|
||||
elif platform == 'desktop':
|
||||
ch_sub_query.append("user_device_type = 'desktop'")
|
||||
return ch_sub_query
|
||||
|
||||
|
||||
def __get_sort_key(key):
|
||||
return {
|
||||
"datetime": "max_datetime",
|
||||
"lastOccurrence": "max_datetime",
|
||||
"firstOccurrence": "min_datetime"
|
||||
}.get(key, 'max_datetime')
|
||||
|
||||
|
||||
@dev.timed
|
||||
def search(data, project_id, user_id, flows=False, status="ALL", favorite_only=False):
|
||||
status = status.upper()
|
||||
if status.lower() not in ['all', 'unresolved', 'resolved', 'ignored']:
|
||||
return {"errors": ["invalid error status"]}
|
||||
pg_sub_query = __get_basic_constraints(data.get('platform'), project_key="sessions.project_id")
|
||||
pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'",
|
||||
"pe.project_id=%(project_id)s"]
|
||||
pg_sub_query_chart = __get_basic_constraints(data.get('platform'), time_constraint=False, chart=True)
|
||||
pg_sub_query_chart.append("source ='js_exception'")
|
||||
pg_sub_query_chart.append("errors.error_id =details.error_id")
|
||||
statuses = []
|
||||
error_ids = None
|
||||
if data.get("startDate") is None:
|
||||
data["startDate"] = TimeUTC.now(-30)
|
||||
if data.get("endDate") is None:
|
||||
data["endDate"] = TimeUTC.now(1)
|
||||
if len(data.get("events", [])) > 0 or len(data.get("filters", [])) > 0 or status != "ALL" or favorite_only:
|
||||
statuses = sessions.search2_pg(data=data, project_id=project_id, user_id=user_id, errors_only=True,
|
||||
error_status=status, favorite_only=favorite_only)
|
||||
if len(statuses) == 0:
|
||||
return {"data": {
|
||||
'total': 0,
|
||||
'errors': []
|
||||
}}
|
||||
error_ids = [e["error_id"] for e in statuses]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if data.get("startDate") is None:
|
||||
data["startDate"] = TimeUTC.now(-7)
|
||||
if data.get("endDate") is None:
|
||||
data["endDate"] = TimeUTC.now()
|
||||
density = data.get("density", 7)
|
||||
step_size = __get_step_size(data["startDate"], data["endDate"], density, factor=1)
|
||||
sort = __get_sort_key('datetime')
|
||||
if data.get("sort") is not None:
|
||||
sort = __get_sort_key(data["sort"])
|
||||
order = "DESC"
|
||||
if data.get("order") is not None:
|
||||
order = data["order"]
|
||||
|
||||
params = {
|
||||
"startDate": data['startDate'],
|
||||
"endDate": data['endDate'],
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"step_size": step_size}
|
||||
if error_ids is not None:
|
||||
params["error_ids"] = tuple(error_ids)
|
||||
pg_sub_query.append("error_id IN %(error_ids)s")
|
||||
main_pg_query = f"""\
|
||||
SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
users,
|
||||
sessions,
|
||||
last_occurrence,
|
||||
first_occurrence,
|
||||
chart
|
||||
FROM (SELECT error_id,
|
||||
name,
|
||||
message,
|
||||
COUNT(DISTINCT user_uuid) AS users,
|
||||
COUNT(DISTINCT session_id) AS sessions,
|
||||
MAX(timestamp) AS max_datetime,
|
||||
MIN(timestamp) AS min_datetime
|
||||
FROM events.errors
|
||||
INNER JOIN public.errors AS pe USING (error_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(pg_sub_query)}
|
||||
GROUP BY error_id, name, message
|
||||
ORDER BY {sort} {order}) AS details
|
||||
INNER JOIN LATERAL (SELECT MAX(timestamp) AS last_occurrence,
|
||||
MIN(timestamp) AS first_occurrence
|
||||
FROM events.errors
|
||||
WHERE errors.error_id = details.error_id) AS time_details ON (TRUE)
|
||||
INNER JOIN LATERAL (SELECT jsonb_agg(chart_details) AS chart
|
||||
FROM (SELECT generated_timestamp AS timestamp,
|
||||
COUNT(session_id) AS count
|
||||
FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp
|
||||
LEFT JOIN LATERAL (SELECT DISTINCT session_id
|
||||
FROM events.errors INNER JOIN public.errors AS m_errors USING (error_id)
|
||||
WHERE {" AND ".join(pg_sub_query_chart)}
|
||||
) AS sessions ON (TRUE)
|
||||
GROUP BY timestamp
|
||||
ORDER BY timestamp) AS chart_details) AS chart_details ON (TRUE);"""
|
||||
|
||||
print("--------------------")
|
||||
print(cur.mogrify(main_pg_query, params))
|
||||
cur.execute(cur.mogrify(main_pg_query, params))
|
||||
total = cur.rowcount
|
||||
if flows:
|
||||
return {"data": {"count": total}}
|
||||
row = cur.fetchone()
|
||||
rows = []
|
||||
limit = 200
|
||||
while row is not None and len(rows) < limit:
|
||||
rows.append(row)
|
||||
row = cur.fetchone()
|
||||
if total == 0:
|
||||
rows = []
|
||||
else:
|
||||
if len(statuses) == 0:
|
||||
query = cur.mogrify(
|
||||
"""SELECT error_id, status, parent_error_id, payload,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_errors AS fe
|
||||
WHERE errors.error_id = fe.error_id
|
||||
AND fe.user_id = %(user_id)s LIMIT 1), FALSE) AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_errors AS ve
|
||||
WHERE errors.error_id = ve.error_id
|
||||
AND ve.user_id = %(user_id)s LIMIT 1), FALSE) AS viewed
|
||||
FROM public.errors
|
||||
WHERE project_id = %(project_id)s AND error_id IN %(error_ids)s;""",
|
||||
{"project_id": project_id, "error_ids": tuple([r["error_id"] for r in rows]),
|
||||
"user_id": user_id})
|
||||
cur.execute(query=query)
|
||||
statuses = cur.fetchall()
|
||||
statuses = {
|
||||
s["error_id"]: s for s in statuses
|
||||
}
|
||||
|
||||
for r in rows:
|
||||
if r["error_id"] in statuses:
|
||||
r["status"] = statuses[r["error_id"]]["status"]
|
||||
r["parent_error_id"] = statuses[r["error_id"]]["parent_error_id"]
|
||||
r["favorite"] = statuses[r["error_id"]]["favorite"]
|
||||
r["viewed"] = statuses[r["error_id"]]["viewed"]
|
||||
r["stack"] = format_first_stack_frame(statuses[r["error_id"]])["stack"]
|
||||
else:
|
||||
r["status"] = "untracked"
|
||||
r["parent_error_id"] = None
|
||||
r["favorite"] = False
|
||||
r["viewed"] = False
|
||||
r["stack"] = None
|
||||
|
||||
offset = len(rows)
|
||||
rows = [r for r in rows if r["stack"] is None
|
||||
or (len(r["stack"]) == 0 or len(r["stack"]) > 1
|
||||
or len(r["stack"]) > 0
|
||||
and (r["message"].lower() != "script error." or len(r["stack"][0]["absPath"]) > 0))]
|
||||
offset -= len(rows)
|
||||
return {
|
||||
"data": {
|
||||
'total': total - offset,
|
||||
'errors': helper.list_to_camel_case(rows)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def __save_stacktrace(error_id, data):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""UPDATE public.errors
|
||||
SET stacktrace=%(data)s::jsonb, stacktrace_parsed_at=timezone('utc'::text, now())
|
||||
WHERE error_id = %(error_id)s;""",
|
||||
{"error_id": error_id, "data": json.dumps(data)})
|
||||
cur.execute(query=query)
|
||||
|
||||
|
||||
def get_trace(project_id, error_id):
|
||||
error = get(error_id=error_id)
|
||||
if error is None:
|
||||
return {"errors": ["error not found"]}
|
||||
if error.get("source", "") != "js_exception":
|
||||
return {"errors": ["this source of errors doesn't have a sourcemap"]}
|
||||
if error.get("payload") is None:
|
||||
return {"errors": ["null payload"]}
|
||||
if error.get("stacktrace") is not None:
|
||||
return {"sourcemapUploaded": True,
|
||||
"trace": error.get("stacktrace"),
|
||||
"preparsed": True}
|
||||
trace, all_exists = sourcemaps.get_traces_group(project_id=project_id, payload=error["payload"])
|
||||
if all_exists:
|
||||
__save_stacktrace(error_id=error_id, data=trace)
|
||||
return {"sourcemapUploaded": all_exists,
|
||||
"trace": trace,
|
||||
"preparsed": False}
|
||||
|
||||
|
||||
def get_sessions(start_date, end_date, project_id, user_id, error_id):
|
||||
extra_constraints = ["s.project_id = %(project_id)s",
|
||||
"s.start_ts >= %(startDate)s",
|
||||
"s.start_ts <= %(endDate)s",
|
||||
"e.error_id = %(error_id)s"]
|
||||
if start_date is None:
|
||||
start_date = TimeUTC.now(-7)
|
||||
if end_date is None:
|
||||
end_date = TimeUTC.now()
|
||||
|
||||
params = {
|
||||
"startDate": start_date,
|
||||
"endDate": end_date,
|
||||
"project_id": project_id,
|
||||
"userId": user_id,
|
||||
"error_id": error_id}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT s.project_id,
|
||||
s.session_id::text AS session_id,
|
||||
s.user_uuid,
|
||||
s.user_id,
|
||||
s.user_agent,
|
||||
s.user_os,
|
||||
s.user_browser,
|
||||
s.user_device,
|
||||
s.user_country,
|
||||
s.start_ts,
|
||||
s.duration,
|
||||
s.events_count,
|
||||
s.pages_count,
|
||||
s.errors_count,
|
||||
s.issue_types,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
|
||||
FROM public.sessions AS s INNER JOIN events.errors AS e USING (session_id)
|
||||
WHERE {" AND ".join(extra_constraints)}
|
||||
ORDER BY s.start_ts DESC;""",
|
||||
params)
|
||||
cur.execute(query=query)
|
||||
sessions_list = []
|
||||
total = cur.rowcount
|
||||
row = cur.fetchone()
|
||||
while row is not None and len(sessions_list) < 100:
|
||||
sessions_list.append(row)
|
||||
row = cur.fetchone()
|
||||
|
||||
return {
|
||||
'total': total,
|
||||
'sessions': helper.list_to_camel_case(sessions_list)
|
||||
}
|
||||
|
||||
|
||||
ACTION_STATE = {
|
||||
"unsolve": 'unresolved',
|
||||
"solve": 'resolved',
|
||||
"ignore": 'ignored'
|
||||
}
|
||||
|
||||
|
||||
def change_state(project_id, user_id, error_id, action):
|
||||
errors = get(error_id, family=True)
|
||||
print(len(errors))
|
||||
status = ACTION_STATE.get(action)
|
||||
if errors is None or len(errors) == 0:
|
||||
return {"errors": ["error not found"]}
|
||||
if errors[0]["status"] == status:
|
||||
return {"errors": [f"error is already {status}"]}
|
||||
|
||||
if errors[0]["status"] == ACTION_STATE["solve"] and status == ACTION_STATE["ignore"]:
|
||||
return {"errors": [f"state transition not permitted {errors[0]['status']} -> {status}"]}
|
||||
|
||||
params = {
|
||||
"userId": user_id,
|
||||
"error_ids": tuple([e["errorId"] for e in errors]),
|
||||
"status": status}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""UPDATE public.errors
|
||||
SET status = %(status)s
|
||||
WHERE error_id IN %(error_ids)s
|
||||
RETURNING status""",
|
||||
params)
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
if row is not None:
|
||||
for e in errors:
|
||||
e["status"] = row["status"]
|
||||
return {"data": errors}
|
||||
|
||||
|
||||
MAX_RANK = 2
|
||||
|
||||
|
||||
def __status_rank(status):
|
||||
return {
|
||||
'unresolved': MAX_RANK - 2,
|
||||
'ignored': MAX_RANK - 1,
|
||||
'resolved': MAX_RANK
|
||||
}.get(status)
|
||||
|
||||
|
||||
def merge(error_ids):
|
||||
error_ids = list(set(error_ids))
|
||||
errors = get_batch(error_ids)
|
||||
if len(error_ids) <= 1 or len(error_ids) > len(errors):
|
||||
return {"errors": ["invalid list of ids"]}
|
||||
error_ids = [e["errorId"] for e in errors]
|
||||
parent_error_id = error_ids[0]
|
||||
status = "unresolved"
|
||||
for e in errors:
|
||||
if __status_rank(status) < __status_rank(e["status"]):
|
||||
status = e["status"]
|
||||
if __status_rank(status) == MAX_RANK:
|
||||
break
|
||||
params = {
|
||||
"error_ids": tuple(error_ids),
|
||||
"parent_error_id": parent_error_id,
|
||||
"status": status
|
||||
}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""UPDATE public.errors
|
||||
SET parent_error_id = %(parent_error_id)s, status = %(status)s
|
||||
WHERE error_id IN %(error_ids)s OR parent_error_id IN %(error_ids)s;""",
|
||||
params)
|
||||
cur.execute(query=query)
|
||||
# row = cur.fetchone()
|
||||
|
||||
return {"data": "success"}
|
||||
|
||||
|
||||
def format_first_stack_frame(error):
|
||||
error["stack"] = sourcemaps.format_payload(error.pop("payload"), truncate_to_first=True)
|
||||
for s in error["stack"]:
|
||||
for c in s.get("context", []):
|
||||
for sci, sc in enumerate(c):
|
||||
if isinstance(sc, str) and len(sc) > 1000:
|
||||
c[sci] = sc[:1000]
|
||||
# convert bytes to string:
|
||||
if isinstance(s["filename"], bytes):
|
||||
s["filename"] = s["filename"].decode("utf-8")
|
||||
return error
|
||||
|
||||
|
||||
def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""
|
||||
SELECT COUNT(errors.*) AS unresolved_and_unviewed
|
||||
FROM public.errors
|
||||
INNER JOIN (SELECT root_error.error_id
|
||||
FROM events.errors
|
||||
INNER JOIN public.errors AS root_error USING (error_id)
|
||||
WHERE project_id = %(project_id)s
|
||||
AND timestamp >= %(startTimestamp)s
|
||||
AND timestamp <= %(endTimestamp)s
|
||||
AND source = 'js_exception') AS timed_errors USING (error_id)
|
||||
LEFT JOIN (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s) AS user_viewed
|
||||
USING (error_id)
|
||||
WHERE user_viewed.error_id ISNULL
|
||||
AND errors.project_id = %(project_id)s
|
||||
AND errors.status = 'unresolved'
|
||||
AND errors.source = 'js_exception';""",
|
||||
{"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp,
|
||||
"endTimestamp": endTimestamp})
|
||||
cur.execute(query=query)
|
||||
row = cur.fetchone()
|
||||
|
||||
return {
|
||||
"data": helper.dict_to_camel_case(row)
|
||||
}
|
||||
91
api/chalicelib/core/errors_favorite_viewed.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
from chalicelib.utils import pg_client
|
||||
|
||||
|
||||
def add_favorite_error(project_id, user_id, error_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
INSERT INTO public.user_favorite_errors
|
||||
(user_id, error_id)
|
||||
VALUES
|
||||
(%(userId)s,%(error_id)s);""",
|
||||
{"userId": user_id, "error_id": error_id})
|
||||
)
|
||||
return {"errorId": error_id, "favorite": True}
|
||||
|
||||
|
||||
def remove_favorite_error(project_id, user_id, error_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
DELETE FROM public.user_favorite_errors
|
||||
WHERE
|
||||
user_id = %(userId)s
|
||||
AND error_id = %(error_id)s;""",
|
||||
{"userId": user_id, "error_id": error_id})
|
||||
)
|
||||
return {"errorId": error_id, "favorite": False}
|
||||
|
||||
|
||||
def favorite_error(project_id, user_id, error_id):
|
||||
exists, favorite = error_exists_and_favorite(user_id=user_id, error_id=error_id)
|
||||
if not exists:
|
||||
return {"errors": ["cannot bookmark non-rehydrated errors"]}
|
||||
if favorite:
|
||||
return remove_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
|
||||
return add_favorite_error(project_id=project_id, user_id=user_id, error_id=error_id)
|
||||
|
||||
|
||||
def error_exists_and_favorite(user_id, error_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT errors.error_id AS exists, ufe.error_id AS favorite
|
||||
FROM public.errors
|
||||
LEFT JOIN (SELECT error_id FROM public.user_favorite_errors WHERE user_id = %(userId)s) AS ufe USING (error_id)
|
||||
WHERE error_id = %(error_id)s;""",
|
||||
{"userId": user_id, "error_id": error_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
if r is None:
|
||||
return False, False
|
||||
return True, r.get("favorite") is not None
|
||||
|
||||
|
||||
def add_viewed_error(project_id, user_id, error_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
INSERT INTO public.user_viewed_errors
|
||||
(user_id, error_id)
|
||||
VALUES
|
||||
(%(userId)s,%(error_id)s);""",
|
||||
{"userId": user_id, "error_id": error_id})
|
||||
)
|
||||
|
||||
|
||||
def viewed_error_exists(user_id, error_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""SELECT
|
||||
errors.error_id AS hydrated,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_errors AS ve
|
||||
WHERE ve.error_id = %(error_id)s
|
||||
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
|
||||
FROM public.errors
|
||||
WHERE error_id = %(error_id)s""",
|
||||
{"userId": user_id, "error_id": error_id})
|
||||
cur.execute(
|
||||
query=query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
if r:
|
||||
return r.get("viewed")
|
||||
return True
|
||||
|
||||
|
||||
def viewed_error(project_id, user_id, error_id):
|
||||
if viewed_error_exists(user_id=user_id, error_id=error_id):
|
||||
return None
|
||||
return add_viewed_error(project_id=project_id, user_id=user_id, error_id=error_id)
|
||||
422
api/chalicelib/core/events.py
Normal file
|
|
@ -0,0 +1,422 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.core import sessions_metas, metadata
|
||||
from chalicelib.core import issues
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.event_filter_definition import SupportedFilter, Event
|
||||
|
||||
|
||||
def get_customs_by_sessionId2_pg(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
c.*,
|
||||
'CUSTOM' AS type
|
||||
FROM events_common.customs AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return helper.dict_to_camel_case(rows)
|
||||
|
||||
|
||||
def __merge_cells(rows, start, count, replacement):
|
||||
rows[start] = replacement
|
||||
rows = rows[:start + 1] + rows[start + count:]
|
||||
return rows
|
||||
|
||||
|
||||
def __get_grouped_clickrage(rows, session_id):
|
||||
click_rage_issues = issues.get_by_session_id(session_id=session_id, issue_type="click_rage")
|
||||
if len(click_rage_issues) == 0:
|
||||
return rows
|
||||
|
||||
for c in click_rage_issues:
|
||||
merge_count = c.get("payload")
|
||||
if merge_count is not None:
|
||||
merge_count = merge_count.get("count", 3)
|
||||
else:
|
||||
merge_count = 3
|
||||
for i in range(len(rows)):
|
||||
if rows[i]["timestamp"] == c["timestamp"]:
|
||||
rows = __merge_cells(rows=rows,
|
||||
start=i,
|
||||
count=merge_count,
|
||||
replacement={**rows[i], "type": "CLICKRAGE", "count": merge_count})
|
||||
break
|
||||
return rows
|
||||
|
||||
|
||||
def get_by_sessionId2_pg(session_id, project_id, group_clickrage=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
c.*,
|
||||
'CLICK' AS type
|
||||
FROM events.clicks AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
if group_clickrage:
|
||||
rows = __get_grouped_clickrage(rows=rows, session_id=session_id)
|
||||
|
||||
cur.execute(cur.mogrify("""
|
||||
SELECT
|
||||
i.*,
|
||||
'INPUT' AS type
|
||||
FROM events.inputs AS i
|
||||
WHERE
|
||||
i.session_id = %(session_id)s
|
||||
ORDER BY i.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT
|
||||
l.*,
|
||||
l.path AS value,
|
||||
l.path AS url,
|
||||
'LOCATION' AS type
|
||||
FROM events.pages AS l
|
||||
WHERE
|
||||
l.session_id = %(session_id)s
|
||||
ORDER BY l.timestamp;""", {"project_id": project_id, "session_id": session_id}))
|
||||
rows += cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: k["messageId"])
|
||||
return rows
|
||||
|
||||
|
||||
def __get_data_for_extend(data):
|
||||
if "errors" not in data:
|
||||
return data["data"]
|
||||
|
||||
|
||||
def __pg_errors_query(source=None):
|
||||
return f"""((SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{event_type.ERROR.ui_type}' AS type
|
||||
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(svalue)s
|
||||
{"AND source = %(source)s" if source is not None else ""}
|
||||
LIMIT 5)
|
||||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{event_type.ERROR.ui_type}' AS type
|
||||
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(svalue)s
|
||||
{"AND source = %(source)s" if source is not None else ""}
|
||||
LIMIT 5)
|
||||
UNION
|
||||
(SELECT DISTINCT ON(lg.message)
|
||||
lg.message AS value,
|
||||
source,
|
||||
'{event_type.ERROR.ui_type}' AS type
|
||||
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.message ILIKE %(value)s
|
||||
{"AND source = %(source)s" if source is not None else ""}
|
||||
LIMIT 5)
|
||||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
source,
|
||||
'{event_type.ERROR.ui_type}' AS type
|
||||
FROM {event_type.ERROR.table} INNER JOIN public.errors AS lg USING (error_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(value)s
|
||||
{"AND source = %(source)s" if source is not None else ""}
|
||||
LIMIT 5));"""
|
||||
|
||||
|
||||
def __search_pg_errors(project_id, value, key=None, source=None):
|
||||
now = TimeUTC.now()
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(__pg_errors_query(source), {"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value),
|
||||
"source": source}))
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
print(f"{TimeUTC.now() - now} : errors")
|
||||
return results
|
||||
|
||||
|
||||
def __search_pg_errors_ios(project_id, value, key=None, source=None):
|
||||
now = TimeUTC.now()
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""(SELECT DISTINCT ON(lg.reason)
|
||||
lg.reason AS value,
|
||||
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.reason ILIKE %(value)s
|
||||
LIMIT 5)
|
||||
UNION ALL
|
||||
(SELECT DISTINCT ON(lg.name)
|
||||
lg.name AS value,
|
||||
'{event_type.ERROR_IOS.ui_type}' AS type
|
||||
FROM {event_type.ERROR_IOS.table} INNER JOIN public.crashes_ios AS lg USING (crash_id) LEFT JOIN public.sessions AS s USING(session_id)
|
||||
WHERE
|
||||
s.project_id = %(project_id)s
|
||||
AND lg.name ILIKE %(value)s
|
||||
LIMIT 5);""",
|
||||
{"project_id": project_id, "value": helper.string_to_sql_like(value)}))
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
print(f"{TimeUTC.now() - now} : errors")
|
||||
return results
|
||||
|
||||
|
||||
def __search_pg_metadata(project_id, value, key=None, source=None):
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
if len(meta_keys) == 0 or key is not None and key not in meta_keys.keys():
|
||||
return []
|
||||
sub_from = []
|
||||
if key is not None:
|
||||
meta_keys = {key: meta_keys[key]}
|
||||
|
||||
for k in meta_keys.keys():
|
||||
colname = metadata.index_to_colname(meta_keys[k])
|
||||
sub_from.append(
|
||||
f"(SELECT DISTINCT ON ({colname}) {colname} AS value, '{k}' AS key FROM public.sessions WHERE project_id = %(project_id)s AND {colname} ILIKE %(value)s LIMIT 5)")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT key, value, 'METADATA' AS TYPE
|
||||
FROM({" UNION ALL ".join(sub_from)}) AS all_metas
|
||||
LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value)}))
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
return results
|
||||
|
||||
|
||||
def __generic_query(typename):
|
||||
return f"""\
|
||||
(SELECT value, type
|
||||
FROM public.autocomplete
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND type='{typename}'
|
||||
AND value ILIKE %(svalue)s
|
||||
LIMIT 5)
|
||||
UNION
|
||||
(SELECT value, type
|
||||
FROM public.autocomplete
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND type='{typename}'
|
||||
AND value ILIKE %(value)s
|
||||
LIMIT 5)"""
|
||||
|
||||
|
||||
def __generic_autocomplete(event: Event):
|
||||
def f(project_id, value, key=None, source=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(__generic_query(event.ui_type),
|
||||
{"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)}))
|
||||
return helper.list_to_camel_case(cur.fetchall())
|
||||
|
||||
return f
|
||||
|
||||
|
||||
class event_type:
|
||||
CLICK = Event(ui_type="CLICK", table="events.clicks", column="label")
|
||||
INPUT = Event(ui_type="INPUT", table="events.inputs", column="label")
|
||||
LOCATION = Event(ui_type="LOCATION", table="events.pages", column="base_path")
|
||||
CUSTOM = Event(ui_type="CUSTOM", table="events_common.customs", column="name")
|
||||
REQUEST = Event(ui_type="REQUEST", table="events_common.requests", column="url")
|
||||
GRAPHQL = Event(ui_type="GRAPHQL", table="events.graphql", column="name")
|
||||
STATEACTION = Event(ui_type="STATEACTION", table="events.state_actions", column="name")
|
||||
ERROR = Event(ui_type="ERROR", table="events.errors",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
METADATA = Event(ui_type="METADATA", table="public.sessions", column=None)
|
||||
# IOS
|
||||
CLICK_IOS = Event(ui_type="CLICK_IOS", table="events_ios.clicks", column="label")
|
||||
INPUT_IOS = Event(ui_type="INPUT_IOS", table="events_ios.inputs", column="label")
|
||||
VIEW_IOS = Event(ui_type="VIEW_IOS", table="events_ios.views", column="name")
|
||||
CUSTOM_IOS = Event(ui_type="CUSTOM_IOS", table="events_common.customs", column="name")
|
||||
REQUEST_IOS = Event(ui_type="REQUEST_IOS", table="events_common.requests", column="url")
|
||||
ERROR_IOS = Event(ui_type="ERROR_IOS", table="events_ios.crashes",
|
||||
column=None) # column=None because errors are searched by name or message
|
||||
|
||||
|
||||
SUPPORTED_TYPES = {
|
||||
event_type.CLICK.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK),
|
||||
query=__generic_query(typename=event_type.CLICK.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=["/"]),
|
||||
event_type.INPUT.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT),
|
||||
query=__generic_query(typename=event_type.INPUT.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=["/"]),
|
||||
event_type.LOCATION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.LOCATION),
|
||||
query=__generic_query(typename=event_type.LOCATION.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="/",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=[]),
|
||||
event_type.CUSTOM.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM),
|
||||
query=__generic_query(typename=event_type.CUSTOM.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=[""]),
|
||||
event_type.REQUEST.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST),
|
||||
query=__generic_query(typename=event_type.REQUEST.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="/",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=[""]),
|
||||
event_type.GRAPHQL.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.GRAPHQL),
|
||||
query=__generic_query(typename=event_type.GRAPHQL.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="/",
|
||||
starts_limit=4,
|
||||
ignore_if_starts_with=[]),
|
||||
event_type.STATEACTION.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.STATEACTION),
|
||||
query=__generic_query(typename=event_type.STATEACTION.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=[]),
|
||||
event_type.ERROR.ui_type: SupportedFilter(get=__search_pg_errors,
|
||||
query=None,
|
||||
value_limit=4,
|
||||
starts_with="",
|
||||
starts_limit=4,
|
||||
ignore_if_starts_with=["/"]),
|
||||
event_type.METADATA.ui_type: SupportedFilter(get=__search_pg_metadata,
|
||||
query=None,
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=["/"]),
|
||||
# IOS
|
||||
event_type.CLICK_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CLICK_IOS),
|
||||
query=__generic_query(typename=event_type.CLICK_IOS.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=["/"]),
|
||||
event_type.INPUT_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.INPUT_IOS),
|
||||
query=__generic_query(typename=event_type.INPUT_IOS.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=["/"]),
|
||||
event_type.VIEW_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.VIEW_IOS),
|
||||
query=__generic_query(typename=event_type.VIEW_IOS.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="/",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=[]),
|
||||
event_type.CUSTOM_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.CUSTOM_IOS),
|
||||
query=__generic_query(typename=event_type.CUSTOM_IOS.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=[""]),
|
||||
event_type.REQUEST_IOS.ui_type: SupportedFilter(get=__generic_autocomplete(event_type.REQUEST_IOS),
|
||||
query=__generic_query(typename=event_type.REQUEST_IOS.ui_type),
|
||||
value_limit=3,
|
||||
starts_with="/",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=[""]),
|
||||
event_type.ERROR_IOS.ui_type: SupportedFilter(get=__search_pg_errors,
|
||||
query=None,
|
||||
value_limit=4,
|
||||
starts_with="",
|
||||
starts_limit=4,
|
||||
ignore_if_starts_with=["/"]),
|
||||
}
|
||||
|
||||
|
||||
def __get_merged_queries(queries, value, project_id):
|
||||
if len(queries) == 0:
|
||||
return []
|
||||
now = TimeUTC.now()
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("(" + ")UNION ALL(".join(queries) + ")",
|
||||
{"project_id": project_id, "value": helper.string_to_sql_like(value)}))
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
print(f"{TimeUTC.now() - now} : merged-queries for len: {len(queries)}")
|
||||
return results
|
||||
|
||||
|
||||
def __get_autocomplete_table(value, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type
|
||||
FROM (SELECT *
|
||||
FROM (SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
|
||||
FROM public.autocomplete
|
||||
WHERE project_id = %(project_id)s
|
||||
AND value ILIKE %(svalue)s
|
||||
UNION
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
|
||||
FROM public.autocomplete
|
||||
WHERE project_id = %(project_id)s
|
||||
AND value ILIKE %(value)s) AS u
|
||||
WHERE Row_ID <= 5) AS sfa
|
||||
ORDER BY sfa.type;""",
|
||||
{"project_id": project_id, "value": helper.string_to_sql_like(value),
|
||||
"svalue": helper.string_to_sql_like("^" + value)}))
|
||||
results = helper.list_to_camel_case(cur.fetchall())
|
||||
return results
|
||||
|
||||
|
||||
def search_pg2(text, event_type, project_id, source, key):
|
||||
if not event_type:
|
||||
return {"data": __get_autocomplete_table(text, project_id)}
|
||||
|
||||
if event_type.upper() in SUPPORTED_TYPES.keys():
|
||||
rows = SUPPORTED_TYPES[event_type.upper()].get(project_id=project_id, value=text, key=key, source=source)
|
||||
if event_type.upper() + "_IOS" in SUPPORTED_TYPES.keys():
|
||||
rows += SUPPORTED_TYPES[event_type.upper() + "_IOS"].get(project_id=project_id, value=text, key=key,
|
||||
source=source)
|
||||
elif event_type.upper() + "_IOS" in SUPPORTED_TYPES.keys():
|
||||
rows = SUPPORTED_TYPES[event_type.upper() + "_IOS"].get(project_id=project_id, value=text, key=key,
|
||||
source=source)
|
||||
elif event_type.upper() in sessions_metas.SUPPORTED_TYPES.keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.upper().endswith("_IOS") \
|
||||
and event_type.upper()[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
else:
|
||||
return {"errors": ["unsupported event"]}
|
||||
|
||||
return {"data": rows}
|
||||
|
||||
|
||||
def get_errors_by_session_id(session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT er.*,ur.*, er.timestamp - s.start_ts AS time
|
||||
FROM {event_type.ERROR.table} AS er INNER JOIN public.errors AS ur USING (error_id) INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE
|
||||
er.session_id = %(session_id)s
|
||||
ORDER BY timestamp;""", {"session_id": session_id}))
|
||||
errors = cur.fetchall()
|
||||
for e in errors:
|
||||
e["stacktrace_parsed_at"] = TimeUTC.datetime_to_timestamp(e["stacktrace_parsed_at"])
|
||||
return helper.list_to_camel_case(errors)
|
||||
69
api/chalicelib/core/events_ios.py
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.core import events
|
||||
|
||||
|
||||
def get_customs_by_sessionId(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""\
|
||||
SELECT
|
||||
c.*,
|
||||
'{events.event_type.CUSTOM_IOS.ui_type}' AS type
|
||||
FROM {events.event_type.CUSTOM_IOS.table} AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return helper.dict_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_by_sessionId(session_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""
|
||||
SELECT
|
||||
c.*,
|
||||
'{events.event_type.CLICK_IOS.ui_type}' AS type
|
||||
FROM {events.event_type.CLICK_IOS.table} AS c
|
||||
WHERE
|
||||
c.session_id = %(session_id)s
|
||||
ORDER BY c.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
|
||||
cur.execute(cur.mogrify(f"""
|
||||
SELECT
|
||||
i.*,
|
||||
'{events.event_type.INPUT_IOS.ui_type}' AS type
|
||||
FROM {events.event_type.INPUT_IOS.table} AS i
|
||||
WHERE
|
||||
i.session_id = %(session_id)s
|
||||
ORDER BY i.timestamp;""",
|
||||
{"project_id": project_id, "session_id": session_id})
|
||||
)
|
||||
rows += cur.fetchall()
|
||||
cur.execute(cur.mogrify(f"""
|
||||
SELECT
|
||||
v.*,
|
||||
'{events.event_type.VIEW_IOS.ui_type}' AS type
|
||||
FROM {events.event_type.VIEW_IOS.table} AS v
|
||||
WHERE
|
||||
v.session_id = %(session_id)s
|
||||
ORDER BY v.timestamp;""", {"project_id": project_id, "session_id": session_id}))
|
||||
rows += cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
rows = sorted(rows, key=lambda k: k["timestamp"])
|
||||
return rows
|
||||
|
||||
|
||||
def get_crashes_by_session_id(session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(f"""
|
||||
SELECT cr.*,uc.*, cr.timestamp - s.start_ts AS time
|
||||
FROM {events.event_type.ERROR_IOS.table} AS cr INNER JOIN public.crashes_ios AS uc USING (crash_id) INNER JOIN public.sessions AS s USING (session_id)
|
||||
WHERE
|
||||
cr.session_id = %(session_id)s
|
||||
ORDER BY timestamp;""", {"session_id": session_id}))
|
||||
errors = cur.fetchall()
|
||||
return helper.list_to_camel_case(errors)
|
||||
269
api/chalicelib/core/funnels.py
Normal file
|
|
@ -0,0 +1,269 @@
|
|||
import chalicelib.utils.helper
|
||||
from chalicelib.core import events, significance, sessions
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
from chalicelib.utils import helper, pg_client
|
||||
from chalicelib.utils import dev
|
||||
import json
|
||||
|
||||
REMOVE_KEYS = ["key", "_key", "startDate", "endDate"]
|
||||
|
||||
ALLOW_UPDATE_FOR = ["name", "filter"]
|
||||
|
||||
|
||||
def filter_stages(stages):
|
||||
ALLOW_TYPES = [events.event_type.CLICK.ui_type, events.event_type.INPUT.ui_type,
|
||||
events.event_type.LOCATION.ui_type, events.event_type.CUSTOM.ui_type,
|
||||
events.event_type.CLICK_IOS.ui_type, events.event_type.INPUT_IOS.ui_type,
|
||||
events.event_type.VIEW_IOS.ui_type, events.event_type.CUSTOM_IOS.ui_type, ]
|
||||
return [s for s in stages if s["type"] in ALLOW_TYPES and s.get("value") is not None]
|
||||
|
||||
|
||||
def create(project_id, user_id, name, filter, is_public):
|
||||
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
|
||||
filter["events"] = filter_stages(stages=filter.get("events", []))
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""\
|
||||
INSERT INTO public.funnels (project_id, user_id, name, filter,is_public)
|
||||
VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s)
|
||||
RETURNING *;""",
|
||||
{"user_id": user_id, "project_id": project_id, "name": name, "filter": json.dumps(filter),
|
||||
"is_public": is_public})
|
||||
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r = helper.dict_to_camel_case(r)
|
||||
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
|
||||
return {"data": r}
|
||||
|
||||
|
||||
def update(funnel_id, user_id, name=None, filter=None, is_public=None):
|
||||
s_query = []
|
||||
if filter is not None:
|
||||
helper.delete_keys_from_dict(filter, REMOVE_KEYS)
|
||||
s_query.append("filter = %(filter)s::jsonb")
|
||||
if name is not None and len(name) > 0:
|
||||
s_query.append("name = %(name)s")
|
||||
if is_public is not None:
|
||||
s_query.append("is_public = %(is_public)s")
|
||||
if len(s_query) == 0:
|
||||
return {"errors": ["Nothing to update"]}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
UPDATE public.funnels
|
||||
SET {" , ".join(s_query)}
|
||||
WHERE funnel_id=%(funnel_id)s
|
||||
RETURNING *;""",
|
||||
{"user_id": user_id, "funnel_id": funnel_id, "name": name,
|
||||
"filter": json.dumps(filter) if filter is not None else None, "is_public": is_public})
|
||||
# print("--------------------")
|
||||
# print(query)
|
||||
# print("--------------------")
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
r = cur.fetchone()
|
||||
r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"])
|
||||
r = helper.dict_to_camel_case(r)
|
||||
r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"])
|
||||
return {"data": r}
|
||||
|
||||
|
||||
def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
team_query = ""
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
SELECT DISTINCT ON (funnels.funnel_id) funnel_id,project_id, user_id, name, created_at, deleted_at, is_public
|
||||
{",filter" if details else ""}
|
||||
FROM public.funnels {team_query}
|
||||
WHERE project_id = %(project_id)s
|
||||
AND funnels.deleted_at IS NULL
|
||||
AND (funnels.user_id = %(user_id)s OR funnels.is_public);""",
|
||||
{"project_id": project_id, "user_id": user_id}
|
||||
)
|
||||
)
|
||||
|
||||
rows = cur.fetchall()
|
||||
rows = helper.list_to_camel_case(rows)
|
||||
for row in rows:
|
||||
row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"])
|
||||
if details:
|
||||
row["filter"]["events"] = filter_stages(row["filter"]["events"])
|
||||
get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date,
|
||||
end_date=end_date)
|
||||
counts = sessions.search2_pg(data=row["filter"], project_id=project_id, user_id=None, count_only=True)
|
||||
row["sessionsCount"] = counts["countSessions"]
|
||||
row["usersCount"] = counts["countUsers"]
|
||||
overview = significance.get_overview(filter_d=row["filter"], project_id=project_id)
|
||||
row["stages"] = overview["stages"]
|
||||
row.pop("filter")
|
||||
row["stagesCount"] = len(row["stages"])
|
||||
# TODO: ask david to count it alone
|
||||
row["criticalIssuesCount"] = overview["criticalIssuesCount"]
|
||||
row["missedConversions"] = 0 if len(row["stages"]) < 2 \
|
||||
else row["stages"][0]["sessionsCount"] - row["stages"][-1]["sessionsCount"]
|
||||
return rows
|
||||
|
||||
|
||||
def get_possible_issue_types(project_id):
|
||||
return [{"type": t, "title": chalicelib.utils.helper.get_issue_title(t)} for t in
|
||||
['click_rage', 'dead_click', 'excessive_scrolling',
|
||||
'bad_request', 'missing_resource', 'memory', 'cpu',
|
||||
'slow_resource', 'slow_page_load', 'crash', 'custom_event_error',
|
||||
'js_error']]
|
||||
|
||||
|
||||
def get_start_end_time(filter_d, range_value, start_date, end_date):
|
||||
if start_date is not None and end_date is not None:
|
||||
filter_d["startDate"], filter_d["endDate"] = start_date, end_date
|
||||
elif range_value is not None and len(range_value) > 0:
|
||||
filter_d["rangeValue"] = range_value
|
||||
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value)
|
||||
else:
|
||||
filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"])
|
||||
|
||||
|
||||
def delete(project_id, funnel_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.funnels
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE project_id = %(project_id)s
|
||||
AND funnel_id = %(funnel_id)s;""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id})
|
||||
)
|
||||
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["filter not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
return sessions.search2_pg(data=f["filter"], project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def get_sessions_on_the_fly(funnel_id, project_id, user_id, data):
|
||||
data["events"] = filter_stages(data.get("events", []))
|
||||
if len(data["events"]) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
|
||||
start_date=data.get('startDate', None),
|
||||
end_date=data.get('endDate', None))
|
||||
data = f["filter"]
|
||||
return sessions.search2_pg(data=data, project_id=project_id, user_id=user_id)
|
||||
|
||||
|
||||
def get_top_insights(project_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["filter not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id)
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"stages": helper.list_to_camel_case(insights),
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}
|
||||
|
||||
|
||||
def get_top_insights_on_the_fly(funnel_id, project_id, data):
|
||||
data["events"] = filter_stages(data.get("events", []))
|
||||
if len(data["events"]) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
|
||||
start_date=data.get('startDate', None),
|
||||
end_date=data.get('endDate', None))
|
||||
data = f["filter"]
|
||||
insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id)
|
||||
if len(insights) > 0:
|
||||
insights[-1]["dropDueToIssues"] = total_drop_due_to_issues
|
||||
return {"stages": helper.list_to_camel_case(insights),
|
||||
"totalDropDueToIssues": total_drop_due_to_issues}
|
||||
|
||||
|
||||
def get_issues(project_id, funnel_id, range_value=None, start_date=None, end_date=None):
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date)
|
||||
return {"data": {
|
||||
"issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id))
|
||||
}}
|
||||
|
||||
|
||||
@dev.timed
|
||||
def get_issues_on_the_fly(funnel_id, project_id, data):
|
||||
first_stage = data.get("firstStage")
|
||||
last_stage = data.get("lastStage")
|
||||
data["events"] = filter_stages(data.get("events", []))
|
||||
if len(data["events"]) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=data.get("rangeValue", None),
|
||||
start_date=data.get('startDate', None),
|
||||
end_date=data.get('endDate', None))
|
||||
data = f["filter"]
|
||||
return {
|
||||
"issues": helper.dict_to_camel_case(
|
||||
significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=first_stage,
|
||||
last_stage=last_stage))}
|
||||
|
||||
|
||||
def get(funnel_id, project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
*
|
||||
FROM public.funnels
|
||||
WHERE project_id = %(project_id)s
|
||||
AND deleted_at IS NULL
|
||||
AND funnel_id = %(funnel_id)s;""",
|
||||
{"funnel_id": funnel_id, "project_id": project_id}
|
||||
)
|
||||
)
|
||||
|
||||
f = helper.dict_to_camel_case(cur.fetchone())
|
||||
if f is None:
|
||||
return None
|
||||
|
||||
f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"])
|
||||
f["filter"]["events"] = filter_stages(stages=f["filter"]["events"])
|
||||
return f
|
||||
|
||||
|
||||
@dev.timed
|
||||
def search_by_issue(user_id, project_id, funnel_id, issue_id, data, range_value=None, start_date=None, end_date=None):
|
||||
if len(data.get("events", [])) == 0:
|
||||
f = get(funnel_id=funnel_id, project_id=project_id)
|
||||
if f is None:
|
||||
return {"errors": ["funnel not found"]}
|
||||
get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.get('startDate', start_date),
|
||||
end_date=data.get('endDate', end_date))
|
||||
data = f["filter"]
|
||||
|
||||
# insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id)
|
||||
issues = get_issues_on_the_fly(funnel_id=funnel_id, project_id=project_id, data=data).get("issues", {})
|
||||
issues = issues.get("significant", []) + issues.get("insignificant", [])
|
||||
issue = None
|
||||
for i in issues:
|
||||
if i.get("issueId", "") == issue_id:
|
||||
issue = i
|
||||
break
|
||||
return {"sessions": sessions.search2_pg(user_id=user_id, project_id=project_id, issue=issue,
|
||||
data=data) if issue is not None else {"total": 0, "sessions": []},
|
||||
# "stages": helper.list_to_camel_case(insights),
|
||||
# "totalDropDueToIssues": total_drop_due_to_issues,
|
||||
"issue": issue}
|
||||
53
api/chalicelib/core/integration_base.py
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
|
||||
class BaseIntegration(ABC):
|
||||
|
||||
def __init__(self, user_id, ISSUE_CLASS):
|
||||
self._user_id = user_id
|
||||
self.issue_handler = ISSUE_CLASS(self.integration_token)
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def provider(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def integration_token(self):
|
||||
integration = self.get()
|
||||
if integration is None:
|
||||
print("no token configured yet")
|
||||
return None
|
||||
return integration["token"]
|
||||
|
||||
def get(self):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT *
|
||||
FROM public.oauth_authentication
|
||||
WHERE user_id=%(user_id)s AND provider=%(provider)s;""",
|
||||
{"user_id": self._user_id, "provider": self.provider.lower()})
|
||||
)
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
@abstractmethod
|
||||
def get_obfuscated(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def update(self, changes):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _add(self, data):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def add_edit(self, data):
|
||||
pass
|
||||
41
api/chalicelib/core/integration_base_issue.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class RequestException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def proxy_issues_handler(e):
|
||||
print("=======__proxy_issues_handler=======")
|
||||
print(str(e))
|
||||
return {"errors": [str(e)]}
|
||||
|
||||
|
||||
class BaseIntegrationIssue(ABC):
|
||||
def __init__(self, provider, integration_token):
|
||||
self.provider = provider
|
||||
self.integration_token = integration_token
|
||||
|
||||
@abstractmethod
|
||||
def create_new_assignment(self, integration_project_id, title, description, assignee, issue_type):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_by_ids(self, saved_issues):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, integration_project_id, assignment_id):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def comment(self, integration_project_id, assignment_id, comment):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_metas(self, integration_project_id):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_projects(self):
|
||||
pass
|
||||
78
api/chalicelib/core/integration_github.py
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.core.integration_github_issue import GithubIntegrationIssue
|
||||
from chalicelib.core import integration_base
|
||||
|
||||
PROVIDER = "GITHUB"
|
||||
|
||||
|
||||
class GitHubIntegration(integration_base.BaseIntegration):
|
||||
|
||||
def __init__(self, tenant_id, user_id):
|
||||
self.__tenant_id = tenant_id
|
||||
super(GitHubIntegration, self).__init__(user_id=user_id, ISSUE_CLASS=GithubIntegrationIssue)
|
||||
|
||||
@property
|
||||
def provider(self):
|
||||
return PROVIDER
|
||||
|
||||
|
||||
|
||||
def get_obfuscated(self):
|
||||
integration = self.get()
|
||||
if integration is None:
|
||||
return None
|
||||
token = "*" * (len(integration["token"]) - 4) + integration["token"][-4:]
|
||||
return {"token": token, "provider": self.provider.lower()}
|
||||
|
||||
def update(self, changes):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()]
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
UPDATE public.oauth_authentication
|
||||
SET {','.join(sub_query)}
|
||||
WHERE user_id=%(user_id)s
|
||||
RETURNING token;""",
|
||||
{"user_id": self._user_id,
|
||||
**changes})
|
||||
)
|
||||
w = helper.dict_to_camel_case(cur.fetchone())
|
||||
return w
|
||||
|
||||
def _add(self, data):
|
||||
pass
|
||||
|
||||
def add(self, token):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
INSERT INTO public.oauth_authentication(user_id, provider, provider_user_id, token)
|
||||
VALUES(%(user_id)s, 'github', '', %(token)s)
|
||||
RETURNING token;""",
|
||||
{"user_id": self._user_id,
|
||||
"token": token})
|
||||
)
|
||||
w = helper.dict_to_camel_case(cur.fetchone())
|
||||
return w
|
||||
|
||||
# TODO: make a revoke token call
|
||||
def delete(self):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
DELETE FROM public.oauth_authentication
|
||||
WHERE user_id=%(user_id)s AND provider=%(provider)s;""",
|
||||
{"user_id": self._user_id, "provider": self.provider.lower()})
|
||||
)
|
||||
return {"state": "success"}
|
||||
|
||||
def add_edit(self, data):
|
||||
s = self.get()
|
||||
if s is not None:
|
||||
return self.update(
|
||||
changes={
|
||||
"token": data["token"]
|
||||
}
|
||||
)
|
||||
else:
|
||||
return self.add(token=data["token"])
|
||||
100
api/chalicelib/core/integration_github_issue.py
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
from chalicelib.core.integration_base_issue import BaseIntegrationIssue
|
||||
from chalicelib.utils import github_client_v3
|
||||
from chalicelib.utils.github_client_v3 import github_formatters as formatter
|
||||
|
||||
|
||||
class GithubIntegrationIssue(BaseIntegrationIssue):
|
||||
def __init__(self, integration_token):
|
||||
self.__client = github_client_v3.githubV3Request(integration_token)
|
||||
super(GithubIntegrationIssue, self).__init__("GITHUB", integration_token)
|
||||
|
||||
def get_current_user(self):
|
||||
return formatter.user(self.__client.get("/user"))
|
||||
|
||||
def get_meta(self, repoId):
|
||||
current_user = self.get_current_user()
|
||||
try:
|
||||
users = self.__client.get(f"/repositories/{repoId}/collaborators")
|
||||
except Exception as e:
|
||||
users = []
|
||||
users = [formatter.user(u) for u in users]
|
||||
if current_user not in users:
|
||||
users.insert(0, current_user)
|
||||
meta = {
|
||||
'users': users,
|
||||
'issueTypes': [formatter.label(l) for l in
|
||||
self.__client.get(f"/repositories/{repoId}/labels")]
|
||||
}
|
||||
|
||||
return meta
|
||||
|
||||
def create_new_assignment(self, integration_project_id, title, description, assignee,
|
||||
issue_type):
|
||||
repoId = integration_project_id
|
||||
assignees = [assignee]
|
||||
labels = [str(issue_type)]
|
||||
|
||||
metas = self.get_meta(repoId)
|
||||
real_assignees = []
|
||||
for a in assignees:
|
||||
for u in metas["users"]:
|
||||
if a == str(u["id"]):
|
||||
real_assignees.append(u["login"])
|
||||
break
|
||||
real_labels = ["Asayer"]
|
||||
for l in labels:
|
||||
found = False
|
||||
for ll in metas["issueTypes"]:
|
||||
if l == str(ll["id"]):
|
||||
real_labels.append(ll["name"])
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
real_labels.append(l)
|
||||
issue = self.__client.post(f"/repositories/{repoId}/issues", body={"title": title, "body": description,
|
||||
"assignees": real_assignees,
|
||||
"labels": real_labels})
|
||||
return formatter.issue(issue)
|
||||
|
||||
def get_by_ids(self, saved_issues):
|
||||
results = []
|
||||
for i in saved_issues:
|
||||
results.append(self.get(integration_project_id=i["integrationProjectId"], assignment_id=i["id"]))
|
||||
return {"issues": results}
|
||||
|
||||
def get(self, integration_project_id, assignment_id):
|
||||
repoId = integration_project_id
|
||||
issueNumber = assignment_id
|
||||
issue = self.__client.get(f"/repositories/{repoId}/issues/{issueNumber}")
|
||||
issue = formatter.issue(issue)
|
||||
if issue["commentsCount"] > 0:
|
||||
issue["comments"] = [formatter.comment(c) for c in
|
||||
self.__client.get(f"/repositories/{repoId}/issues/{issueNumber}/comments")]
|
||||
return issue
|
||||
|
||||
def comment(self, integration_project_id, assignment_id, comment):
|
||||
repoId = integration_project_id
|
||||
issueNumber = assignment_id
|
||||
commentCreated = self.__client.post(f"/repositories/{repoId}/issues/{issueNumber}/comments",
|
||||
body={"body": comment})
|
||||
return formatter.comment(commentCreated)
|
||||
|
||||
def get_metas(self, integration_project_id):
|
||||
current_user = self.get_current_user()
|
||||
try:
|
||||
users = self.__client.get(f"/repositories/{integration_project_id}/collaborators")
|
||||
except Exception as e:
|
||||
users = []
|
||||
users = [formatter.user(u) for u in users]
|
||||
if current_user not in users:
|
||||
users.insert(0, current_user)
|
||||
|
||||
return {"provider": self.provider.lower(),
|
||||
'users': users,
|
||||
'issueTypes': [formatter.label(l) for l in
|
||||
self.__client.get(f"/repositories/{integration_project_id}/labels")]
|
||||
}
|
||||
|
||||
def get_projects(self):
|
||||
repos = self.__client.get("/user/repos")
|
||||
return [formatter.repo(r) for r in repos]
|
||||
101
api/chalicelib/core/integration_jira_cloud.py
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.core.integration_jira_cloud_issue import JIRACloudIntegrationIssue
|
||||
from chalicelib.core import integration_base
|
||||
|
||||
PROVIDER = "JIRA"
|
||||
|
||||
|
||||
class JIRAIntegration(integration_base.BaseIntegration):
|
||||
def __init__(self, tenant_id, user_id):
|
||||
self.__tenant_id = tenant_id
|
||||
# TODO: enable super-constructor when OAuth is done
|
||||
# super(JIRAIntegration, self).__init__(jwt, user_id, JIRACloudIntegrationProxy)
|
||||
self._user_id = user_id
|
||||
i = self.get()
|
||||
if i is None:
|
||||
return
|
||||
self.issue_handler = JIRACloudIntegrationIssue(token=i["token"], username=i["username"], url=i["url"])
|
||||
|
||||
@property
|
||||
def provider(self):
|
||||
return PROVIDER
|
||||
|
||||
# TODO: remove this once jira-oauth is done
|
||||
def get(self):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT username, token, url
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
{"user_id": self._user_id})
|
||||
)
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
def get_obfuscated(self):
|
||||
integration = self.get()
|
||||
if integration is None:
|
||||
return None
|
||||
integration["token"] = "*" * (len(integration["token"]) - 4) + integration["token"][-4:]
|
||||
integration["provider"] = self.provider.lower()
|
||||
return integration
|
||||
|
||||
def update(self, changes):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()]
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
UPDATE public.jira_cloud
|
||||
SET {','.join(sub_query)}
|
||||
WHERE user_id=%(user_id)s
|
||||
RETURNING username, token, url;""",
|
||||
{"user_id": self._user_id,
|
||||
**changes})
|
||||
)
|
||||
w = helper.dict_to_camel_case(cur.fetchone())
|
||||
return w
|
||||
|
||||
# TODO: make this generic for all issue tracking integrations
|
||||
def _add(self, data):
|
||||
print("a pretty defined abstract method")
|
||||
return
|
||||
|
||||
def add(self, username, token, url):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
INSERT INTO public.jira_cloud(username, token, user_id,url)
|
||||
VALUES (%(username)s, %(token)s, %(user_id)s,%(url)s)
|
||||
RETURNING username, token, url;""",
|
||||
{"user_id": self._user_id, "username": username,
|
||||
"token": token, "url": url})
|
||||
)
|
||||
w = helper.dict_to_camel_case(cur.fetchone())
|
||||
return w
|
||||
|
||||
def delete(self):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
DELETE FROM public.jira_cloud
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
{"user_id": self._user_id})
|
||||
)
|
||||
return {"state": "success"}
|
||||
|
||||
def add_edit(self, data):
|
||||
s = self.get()
|
||||
if s is not None:
|
||||
return self.update(
|
||||
changes={
|
||||
"username": data["username"],
|
||||
"token": data["token"],
|
||||
"url": data["url"]
|
||||
}
|
||||
)
|
||||
else:
|
||||
return self.add(
|
||||
username=data["username"],
|
||||
token=data["token"],
|
||||
url=data["url"]
|
||||
)
|
||||
56
api/chalicelib/core/integration_jira_cloud_issue.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
from chalicelib.utils import jira_client
|
||||
from chalicelib.core.integration_base_issue import BaseIntegrationIssue
|
||||
|
||||
|
||||
class JIRACloudIntegrationIssue(BaseIntegrationIssue):
|
||||
def __init__(self, token, username, url):
|
||||
self.username = username
|
||||
self.url = url
|
||||
self._client = jira_client.JiraManager(self.url, self.username, token, None)
|
||||
super(JIRACloudIntegrationIssue, self).__init__("JIRA", token)
|
||||
|
||||
def create_new_assignment(self, integration_project_id, title, description, assignee, issue_type):
|
||||
self._client.set_jira_project_id(integration_project_id)
|
||||
data = {
|
||||
'summary': title,
|
||||
'description': description,
|
||||
'issuetype': {'id': issue_type},
|
||||
'assignee': {"id": assignee},
|
||||
"labels": ["Asayer"]
|
||||
}
|
||||
return self._client.create_issue(data)
|
||||
|
||||
def get_by_ids(self, saved_issues):
|
||||
projects_map = {}
|
||||
for i in saved_issues:
|
||||
if i["integrationProjectId"] not in projects_map.keys():
|
||||
projects_map[i["integrationProjectId"]] = []
|
||||
projects_map[i["integrationProjectId"]].append(i["id"])
|
||||
|
||||
results = []
|
||||
for integration_project_id in projects_map:
|
||||
self._client.set_jira_project_id(integration_project_id)
|
||||
jql = 'labels = Asayer'
|
||||
if len(projects_map[integration_project_id]) > 0:
|
||||
jql += f" AND ID IN ({','.join(projects_map[integration_project_id])})"
|
||||
issues = self._client.get_issues(jql, offset=0)
|
||||
results += [issues]
|
||||
return {"issues": results}
|
||||
|
||||
def get(self, integration_project_id, assignment_id):
|
||||
self._client.set_jira_project_id(integration_project_id)
|
||||
return self._client.get_issue_v3(assignment_id)
|
||||
|
||||
def comment(self, integration_project_id, assignment_id, comment):
|
||||
self._client.set_jira_project_id(integration_project_id)
|
||||
return self._client.add_comment_v3(assignment_id, comment)
|
||||
|
||||
def get_metas(self, integration_project_id):
|
||||
meta = {}
|
||||
self._client.set_jira_project_id(integration_project_id)
|
||||
meta['issueTypes'] = self._client.get_issue_types()
|
||||
meta['users'] = self._client.get_assignable_users()
|
||||
return {"provider": self.provider.lower(), **meta}
|
||||
|
||||
def get_projects(self):
|
||||
return self._client.get_projects()
|
||||
42
api/chalicelib/core/integrations_manager.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from chalicelib.core import integration_github, integration_jira_cloud
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
SUPPORTED_TOOLS = [integration_github.PROVIDER, integration_jira_cloud.PROVIDER]
|
||||
|
||||
|
||||
def get_available_integrations(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
SELECT EXISTS((SELECT 1
|
||||
FROM public.oauth_authentication
|
||||
WHERE user_id = %(user_id)s
|
||||
AND provider = 'github')) AS github,
|
||||
EXISTS((SELECT 1
|
||||
FROM public.jira_cloud
|
||||
WHERE user_id = %(user_id)s)) AS jira;""",
|
||||
{"user_id": user_id})
|
||||
)
|
||||
current_integrations = cur.fetchone()
|
||||
return dict(current_integrations)
|
||||
|
||||
|
||||
def __get_default_integration(user_id):
|
||||
current_integrations = get_available_integrations(user_id)
|
||||
return integration_github.PROVIDER if current_integrations["github"] else integration_jira_cloud.PROVIDER if \
|
||||
current_integrations["jira"] else None
|
||||
|
||||
|
||||
def get_integration(tenant_id, user_id, tool=None):
|
||||
if tool is None:
|
||||
tool = __get_default_integration(user_id=user_id)
|
||||
if tool is None:
|
||||
return {"errors": [f"no issue tracking tool found"]}, None
|
||||
tool = tool.upper()
|
||||
if tool not in SUPPORTED_TOOLS:
|
||||
return {"errors": [f"issue tracking tool not supported yet, available: {SUPPORTED_TOOLS}"]}, None
|
||||
if tool == integration_jira_cloud.PROVIDER:
|
||||
return None, integration_jira_cloud.JIRAIntegration(tenant_id=tenant_id, user_id=user_id)
|
||||
elif tool == integration_github.PROVIDER:
|
||||
return None, integration_github.GitHubIntegration(tenant_id=tenant_id, user_id=user_id)
|
||||
return {"errors": ["lost integration"]}, None
|
||||
72
api/chalicelib/core/issues.py
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
ISSUE_TYPES = ['click_rage', 'dead_click', 'excessive_scrolling', 'bad_request', 'missing_resource', 'memory', 'cpu',
|
||||
'slow_resource', 'slow_page_load', 'crash', 'ml_cpu', 'ml_memory', 'ml_dead_click', 'ml_click_rage',
|
||||
'ml_mouse_thrashing', 'ml_excessive_scrolling', 'ml_slow_resources', 'custom', 'js_exception',
|
||||
'custom_event_error', 'js_error']
|
||||
ORDER_QUERY = """\
|
||||
(CASE WHEN type = 'js_exception' THEN 0
|
||||
WHEN type = 'bad_request' THEN 1
|
||||
WHEN type = 'missing_resource' THEN 2
|
||||
WHEN type = 'click_rage' THEN 3
|
||||
WHEN type = 'dead_click' THEN 4
|
||||
WHEN type = 'memory' THEN 5
|
||||
WHEN type = 'cpu' THEN 6
|
||||
WHEN type = 'crash' THEN 7
|
||||
ELSE -1 END)::INTEGER
|
||||
"""
|
||||
NAME_QUERY = """\
|
||||
(CASE WHEN type = 'js_exception' THEN 'Errors'
|
||||
WHEN type = 'bad_request' THEN 'Bad Requests'
|
||||
WHEN type = 'missing_resource' THEN 'Missing Images'
|
||||
WHEN type = 'click_rage' THEN 'Click Rage'
|
||||
WHEN type = 'dead_click' THEN 'Dead Clicks'
|
||||
WHEN type = 'memory' THEN 'High Memory'
|
||||
WHEN type = 'cpu' THEN 'High CPU'
|
||||
WHEN type = 'crash' THEN 'Crashes'
|
||||
ELSE type::text END)::text
|
||||
"""
|
||||
|
||||
|
||||
def get(project_id, issue_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
"""\
|
||||
SELECT
|
||||
*
|
||||
FROM public.issues
|
||||
WHERE project_id = %(project_id)s
|
||||
AND issue_id = %(issue_id)s;""",
|
||||
{"project_id": project_id, "issue_id": issue_id}
|
||||
)
|
||||
cur.execute(query=query)
|
||||
data = cur.fetchone()
|
||||
return helper.dict_to_camel_case(data)
|
||||
|
||||
|
||||
def get_by_session_id(session_id, issue_type=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
SELECT *
|
||||
FROM events_common.issues
|
||||
INNER JOIN public.issues USING (issue_id)
|
||||
WHERE session_id = %(session_id)s {"AND type = %(type)s" if issue_type is not None else ""}
|
||||
ORDER BY timestamp;""",
|
||||
{"session_id": session_id, "type": issue_type})
|
||||
)
|
||||
return helper.list_to_camel_case(cur.fetchall())
|
||||
|
||||
|
||||
def get_types(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""SELECT type,
|
||||
{ORDER_QUERY}>=0 AS visible,
|
||||
{ORDER_QUERY} AS order,
|
||||
{NAME_QUERY} AS name
|
||||
FROM (SELECT DISTINCT type
|
||||
FROM public.issues
|
||||
WHERE project_id = %(project_id)s) AS types
|
||||
ORDER BY "order";""", {"project_id": project_id}))
|
||||
return helper.list_to_camel_case(cur.fetchall())
|
||||
73
api/chalicelib/core/log_tool_bugsnag.py
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
from chalicelib.core import log_tools
|
||||
import requests
|
||||
|
||||
IN_TY = "bugsnag"
|
||||
|
||||
|
||||
def list_projects(auth_token):
|
||||
r = requests.get(url="https://api.bugsnag.com/user/organizations",
|
||||
params={"per_page": "100"},
|
||||
headers={"Authorization": "token " + auth_token, "X-Version": "2"})
|
||||
if r.status_code != 200:
|
||||
print("=======> bugsnag get organizations: something went wrong")
|
||||
print(r)
|
||||
print(r.status_code)
|
||||
print(r.text)
|
||||
return []
|
||||
|
||||
orgs = []
|
||||
for i in r.json():
|
||||
|
||||
pr = requests.get(url="https://api.bugsnag.com/organizations/%s/projects" % i["id"],
|
||||
params={"per_page": "100"},
|
||||
headers={"Authorization": "token " + auth_token, "X-Version": "2"})
|
||||
if pr.status_code != 200:
|
||||
print("=======> bugsnag get projects: something went wrong")
|
||||
print(pr)
|
||||
print(r.status_code)
|
||||
print(r.text)
|
||||
continue
|
||||
orgs.append({"name": i["name"], "projects": [{"name": p["name"], "id": p["id"]} for p in pr.json()]})
|
||||
return orgs
|
||||
|
||||
|
||||
def get_all(tenant_id):
|
||||
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
|
||||
|
||||
|
||||
def get(project_id):
|
||||
return log_tools.get(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def update(tenant_id, project_id, changes):
|
||||
options = {}
|
||||
if "authorizationToken" in changes:
|
||||
options["authorizationToken"] = changes.pop("authorizationToken")
|
||||
if "bugsnagProjectId" in changes:
|
||||
options["bugsnagProjectId"] = changes.pop("bugsnagProjectId")
|
||||
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
|
||||
|
||||
|
||||
def add(tenant_id, project_id, authorization_token, bugsnag_project_id):
|
||||
options = {
|
||||
"bugsnagProjectId": bugsnag_project_id,
|
||||
"authorizationToken": authorization_token,
|
||||
}
|
||||
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
|
||||
|
||||
|
||||
def delete(tenant_id, project_id):
|
||||
return log_tools.delete(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def add_edit(tenant_id, project_id, data):
|
||||
s = get(project_id)
|
||||
if s is not None:
|
||||
return update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"authorizationToken": data["authorizationToken"],
|
||||
"bugsnagProjectId": data["bugsnagProjectId"]})
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
project_id=project_id,
|
||||
authorization_token=data["authorizationToken"],
|
||||
bugsnag_project_id=data["bugsnagProjectId"])
|
||||
119
api/chalicelib/core/log_tool_cloudwatch.py
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
import boto3
|
||||
from chalicelib.core import log_tools
|
||||
|
||||
IN_TY = "cloudwatch"
|
||||
|
||||
|
||||
def __find_groups(client, token):
|
||||
d_args = {
|
||||
"limit": 50
|
||||
}
|
||||
if token is not None:
|
||||
d_args["nextToken"] = token
|
||||
response = client.describe_log_groups(**d_args)
|
||||
response["logGroups"] = [i['logGroupName'] for i in response["logGroups"]]
|
||||
if "nextToken" not in response:
|
||||
return response["logGroups"]
|
||||
|
||||
return response["logGroups"] + __find_groups(client, response["nextToken"])
|
||||
|
||||
|
||||
def __make_stream_filter(start_time, end_time):
|
||||
def __valid_stream(stream):
|
||||
return "firstEventTimestamp" in stream and not (
|
||||
stream['firstEventTimestamp'] <= start_time and stream["lastEventTimestamp"] <= start_time
|
||||
or stream['firstEventTimestamp'] >= end_time and stream["lastEventTimestamp"] >= end_time
|
||||
)
|
||||
|
||||
return __valid_stream
|
||||
|
||||
|
||||
def __find_streams(project_id, log_group, client, token, stream_filter):
|
||||
d_args = {"logGroupName": log_group, "orderBy": 'LastEventTime', 'limit': 50}
|
||||
if token is not None and len(token) > 0:
|
||||
d_args["nextToken"] = token
|
||||
data = client.describe_log_streams(**d_args)
|
||||
streams = list(filter(stream_filter, data['logStreams']))
|
||||
if 'nextToken' not in data:
|
||||
save_new_token(project_id=project_id, token=token)
|
||||
return streams
|
||||
return streams + __find_streams(project_id, log_group, client, data['nextToken'], stream_filter)
|
||||
|
||||
|
||||
def __find_events(client, log_group, streams, last_token, start_time, end_time):
|
||||
f_args = {
|
||||
"logGroupName": log_group,
|
||||
"logStreamNames": streams,
|
||||
"startTime": start_time,
|
||||
"endTime": end_time,
|
||||
"limit": 10000,
|
||||
"filterPattern": "asayer_session_id"
|
||||
}
|
||||
if last_token is not None:
|
||||
f_args["nextToken"] = last_token
|
||||
response = client.filter_log_events(
|
||||
**f_args
|
||||
)
|
||||
if "nextToken" not in response:
|
||||
return response["events"]
|
||||
|
||||
return response["events"] + __find_events(client, log_group, streams, response["nextToken"], start_time, end_time)
|
||||
|
||||
|
||||
def list_log_groups(aws_access_key_id, aws_secret_access_key, region):
|
||||
logs = boto3.client('logs', aws_access_key_id=aws_access_key_id,
|
||||
aws_secret_access_key=aws_secret_access_key,
|
||||
region_name=region
|
||||
)
|
||||
return __find_groups(logs, None)
|
||||
|
||||
|
||||
def get_all(tenant_id):
|
||||
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
|
||||
|
||||
|
||||
def get(project_id):
|
||||
return log_tools.get(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def update(tenant_id, project_id, changes):
|
||||
options = {}
|
||||
if "authorization_token" in changes:
|
||||
options["authorization_token"] = changes.pop("authorization_token")
|
||||
if "project_id" in changes:
|
||||
options["project_id"] = changes.pop("project_id")
|
||||
if len(options.keys()) > 0:
|
||||
changes["options"] = options
|
||||
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=changes)
|
||||
|
||||
|
||||
def add(tenant_id, project_id, aws_access_key_id, aws_secret_access_key, log_group_name, region):
|
||||
return log_tools.add(project_id=project_id, integration=IN_TY,
|
||||
options={"awsAccessKeyId": aws_access_key_id,
|
||||
"awsSecretAccessKey": aws_secret_access_key,
|
||||
"logGroupName": log_group_name, "region": region})
|
||||
|
||||
|
||||
def save_new_token(project_id, token):
|
||||
update(tenant_id=None, project_id=project_id, changes={"last_token": token})
|
||||
|
||||
|
||||
def delete(tenant_id, project_id):
|
||||
return log_tools.delete(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def add_edit(tenant_id, project_id, data):
|
||||
s = get(project_id)
|
||||
if s is not None:
|
||||
return update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"awsAccessKeyId": data["awsAccessKeyId"],
|
||||
"awsSecretAccessKey": data["awsSecretAccessKey"],
|
||||
"logGroupName": data["logGroupName"],
|
||||
"region": data["region"]})
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
project_id=project_id,
|
||||
aws_access_key_id=data["awsAccessKeyId"],
|
||||
aws_secret_access_key=data["awsSecretAccessKey"],
|
||||
log_group_name=data["logGroupName"],
|
||||
region=data["region"])
|
||||
43
api/chalicelib/core/log_tool_datadog.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
from chalicelib.core import log_tools
|
||||
|
||||
IN_TY = "datadog"
|
||||
|
||||
|
||||
def get_all(tenant_id):
|
||||
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
|
||||
|
||||
|
||||
def get(project_id):
|
||||
return log_tools.get(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def update(tenant_id, project_id, changes):
|
||||
options = {}
|
||||
if "apiKey" in changes:
|
||||
options["apiKey"] = changes["apiKey"]
|
||||
if "applicationKey" in changes:
|
||||
options["applicationKey"] = changes["applicationKey"]
|
||||
|
||||
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
|
||||
|
||||
|
||||
def add(tenant_id, project_id, api_key, application_key):
|
||||
options = {"apiKey": api_key, "applicationKey": application_key}
|
||||
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
|
||||
|
||||
|
||||
def delete(tenant_id, project_id):
|
||||
return log_tools.delete(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def add_edit(tenant_id, project_id, data):
|
||||
s = get(project_id)
|
||||
if s is not None:
|
||||
return update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"apiKey": data["apiKey"],
|
||||
"applicationKey": data["applicationKey"]})
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
project_id=project_id,
|
||||
api_key=data["apiKey"],
|
||||
application_key=data["applicationKey"])
|
||||
94
api/chalicelib/core/log_tool_elasticsearch.py
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
from elasticsearch import Elasticsearch, RequestsHttpConnection
|
||||
from chalicelib.core import log_tools
|
||||
import base64
|
||||
import logging
|
||||
|
||||
logging.getLogger('elasticsearch').level = logging.ERROR
|
||||
|
||||
IN_TY = "elasticsearch"
|
||||
|
||||
|
||||
def get_all(tenant_id):
|
||||
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
|
||||
|
||||
|
||||
def get(project_id):
|
||||
return log_tools.get(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def update(tenant_id, project_id, changes):
|
||||
options = {}
|
||||
|
||||
if "host" in changes:
|
||||
options["host"] = changes["host"]
|
||||
if "apiKeyId" in changes:
|
||||
options["apiKeyId"] = changes["apiKeyId"]
|
||||
if "apiKey" in changes:
|
||||
options["apiKey"] = changes["apiKey"]
|
||||
if "indexes" in changes:
|
||||
options["indexes"] = changes["indexes"]
|
||||
if "port" in changes:
|
||||
options["port"] = changes["port"]
|
||||
|
||||
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
|
||||
|
||||
|
||||
def add(tenant_id, project_id, host, api_key_id, api_key, indexes, port):
|
||||
options = {
|
||||
"host": host, "apiKeyId": api_key_id, "apiKey": api_key, "indexes": indexes, "port": port
|
||||
}
|
||||
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
|
||||
|
||||
|
||||
def delete(tenant_id, project_id):
|
||||
return log_tools.delete(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def add_edit(tenant_id, project_id, data):
|
||||
s = get(project_id)
|
||||
if s is not None:
|
||||
return update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"host": data["host"], "apiKeyId": data["apiKeyId"], "apiKey": data["apiKey"],
|
||||
"indexes": data["indexes"], "port": data["port"]})
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
project_id=project_id,
|
||||
host=data["host"], api_key=data["apiKeyId"], api_key_id=data["apiKey"], indexes=data["indexes"],
|
||||
port=data["port"])
|
||||
|
||||
|
||||
def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=29):
|
||||
host = host.replace("http://", "").replace("https://", "")
|
||||
try:
|
||||
args = {
|
||||
"hosts": [{"host": host, "port": port}],
|
||||
"use_ssl": use_ssl,
|
||||
"verify_certs": False,
|
||||
"ca_certs": False,
|
||||
"connection_class": RequestsHttpConnection,
|
||||
"timeout": timeout
|
||||
}
|
||||
if api_key_id is not None and len(api_key_id) > 0:
|
||||
# args["http_auth"] = (username, password)
|
||||
token = "ApiKey " + base64.b64encode(f"{api_key_id}:{api_key}".encode("utf-8")).decode("utf-8")
|
||||
args["headers"] = {"Authorization": token}
|
||||
es = Elasticsearch(
|
||||
**args
|
||||
)
|
||||
r = es.ping()
|
||||
if not r and not use_ssl:
|
||||
return __get_es_client(host, port, api_key_id, api_key, use_ssl=True, timeout=timeout)
|
||||
if not r:
|
||||
return None
|
||||
except Exception as err:
|
||||
print("================exception connecting to ES host:")
|
||||
print(err)
|
||||
return None
|
||||
return es
|
||||
|
||||
|
||||
def ping(tenant_id, host, port, apiKeyId, apiKey):
|
||||
es = __get_es_client(host, port, apiKeyId, apiKey, timeout=3)
|
||||
if es is None:
|
||||
return {"state": False}
|
||||
return {"state": es.ping()}
|
||||
50
api/chalicelib/core/log_tool_newrelic.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
from chalicelib.core import log_tools
|
||||
|
||||
IN_TY = "newrelic"
|
||||
|
||||
|
||||
def get_all(tenant_id):
|
||||
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
|
||||
|
||||
|
||||
def get(project_id):
|
||||
return log_tools.get(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def update(tenant_id, project_id, changes):
|
||||
options = {}
|
||||
if "region" in changes and len(changes["region"]) == 0:
|
||||
options["region"] = "US"
|
||||
if "applicationId" in changes:
|
||||
options["applicationId"] = changes["applicationId"]
|
||||
if "xQueryKey" in changes:
|
||||
options["xQueryKey"] = changes["xQueryKey"]
|
||||
|
||||
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
|
||||
|
||||
|
||||
def add(tenant_id, project_id, application_id, x_query_key, region):
|
||||
if region is None or len(region) == 0:
|
||||
region = "US"
|
||||
options = {"applicationId": application_id, "xQueryKey": x_query_key, "region": region}
|
||||
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
|
||||
|
||||
|
||||
def delete(tenant_id, project_id):
|
||||
return log_tools.delete(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def add_edit(tenant_id, project_id, data):
|
||||
s = get(project_id)
|
||||
if s is not None:
|
||||
return update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"applicationId": data["applicationId"],
|
||||
"xQueryKey": data["xQueryKey"],
|
||||
"region": data["region"]})
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
project_id=project_id,
|
||||
application_id=data["applicationId"],
|
||||
x_query_key=data["xQueryKey"],
|
||||
region=data["region"]
|
||||
)
|
||||
38
api/chalicelib/core/log_tool_rollbar.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
from chalicelib.core import log_tools
|
||||
|
||||
IN_TY = "rollbar"
|
||||
|
||||
|
||||
def get_all(tenant_id):
|
||||
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
|
||||
|
||||
|
||||
def get(project_id):
|
||||
return log_tools.get(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def update(tenant_id, project_id, changes):
|
||||
options = {}
|
||||
if "accessToken" in changes:
|
||||
options["accessToken"] = changes["accessToken"]
|
||||
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
|
||||
|
||||
|
||||
def add(tenant_id, project_id, access_token):
|
||||
options = {"accessToken": access_token}
|
||||
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
|
||||
|
||||
|
||||
def delete(tenant_id, project_id):
|
||||
return log_tools.delete(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def add_edit(tenant_id, project_id, data):
|
||||
s = get(project_id)
|
||||
if s is not None:
|
||||
return update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"accessToken": data["accessToken"]})
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
project_id=project_id,
|
||||
access_token=data["accessToken"])
|
||||
65
api/chalicelib/core/log_tool_sentry.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
import requests
|
||||
from chalicelib.core import log_tools
|
||||
|
||||
IN_TY = "sentry"
|
||||
|
||||
|
||||
def get_all(tenant_id):
|
||||
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
|
||||
|
||||
|
||||
def get(project_id):
|
||||
return log_tools.get(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def update(tenant_id, project_id, changes):
|
||||
options = {}
|
||||
if "organizationSlug" in changes:
|
||||
options["organizationSlug"] = changes["organizationSlug"]
|
||||
if "projectSlug" in changes:
|
||||
options["projectSlug"] = changes["projectSlug"]
|
||||
if "token" in changes:
|
||||
options["token"] = changes["token"]
|
||||
|
||||
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=changes)
|
||||
|
||||
|
||||
def add(tenant_id, project_id, project_slug, organization_slug, token):
|
||||
options = {
|
||||
"organizationSlug": organization_slug, "projectSlug": project_slug, "token": token
|
||||
}
|
||||
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
|
||||
|
||||
|
||||
def delete(tenant_id, project_id):
|
||||
return log_tools.delete(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def add_edit(tenant_id, project_id, data):
|
||||
s = get(project_id)
|
||||
if s is not None:
|
||||
return update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"projectSlug": data["projectSlug"],
|
||||
"organizationSlug": data["organizationSlug"],
|
||||
"token": data["token"]})
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
project_id=project_id,
|
||||
project_slug=data["projectSlug"],
|
||||
organization_slug=data["organizationSlug"], token=data["token"])
|
||||
|
||||
|
||||
def proxy_get(tenant_id, project_id, event_id):
|
||||
i = get(project_id)
|
||||
if i is None:
|
||||
return {}
|
||||
r = requests.get(
|
||||
url="https://sentry.io/api/0/projects/%(organization_slug)s/%(project_slug)s/events/%(event_id)s/" % {
|
||||
"organization_slug": i["organizationSlug"], "project_slug": i["projectSlug"], "event_id": event_id},
|
||||
headers={"Authorization": "Bearer " + i["token"]})
|
||||
if r.status_code != 200:
|
||||
print("=======> sentry get: something went wrong")
|
||||
print(r)
|
||||
print(r.status_code)
|
||||
print(r.text)
|
||||
return r.json()
|
||||
42
api/chalicelib/core/log_tool_stackdriver.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
from chalicelib.core import log_tools
|
||||
|
||||
IN_TY = "stackdriver"
|
||||
|
||||
|
||||
def get_all(tenant_id):
|
||||
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
|
||||
|
||||
|
||||
def get(project_id):
|
||||
return log_tools.get(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def update(tenant_id, project_id, changes):
|
||||
options = {}
|
||||
if "serviceAccountCredentials" in changes:
|
||||
options["serviceAccountCredentials"] = changes["serviceAccountCredentials"]
|
||||
if "logName" in changes:
|
||||
options["logName"] = changes["logName"]
|
||||
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
|
||||
|
||||
|
||||
def add(tenant_id, project_id, service_account_credentials, log_name):
|
||||
options = {"serviceAccountCredentials": service_account_credentials, "logName": log_name}
|
||||
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
|
||||
|
||||
|
||||
def delete(tenant_id, project_id):
|
||||
return log_tools.delete(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def add_edit(tenant_id, project_id, data):
|
||||
s = get(project_id)
|
||||
if s is not None:
|
||||
return update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"serviceAccountCredentials": data["serviceAccountCredentials"],
|
||||
"logName": data["logName"]})
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
project_id=project_id,
|
||||
service_account_credentials=data["serviceAccountCredentials"],
|
||||
log_name=data["logName"])
|
||||
53
api/chalicelib/core/log_tool_sumologic.py
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
from chalicelib.core import log_tools
|
||||
|
||||
IN_TY = "sumologic"
|
||||
|
||||
|
||||
def get_all(tenant_id):
|
||||
return log_tools.get_all_by_tenant(tenant_id=tenant_id, integration=IN_TY)
|
||||
|
||||
|
||||
def get(project_id):
|
||||
return log_tools.get(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def update(tenant_id, project_id, changes):
|
||||
options = {}
|
||||
|
||||
if "region" in changes:
|
||||
options["region"] = changes["region"]
|
||||
|
||||
if "accessId" in changes:
|
||||
options["accessId"] = changes["accessId"]
|
||||
|
||||
if "accessKey" in changes:
|
||||
options["accessKey"] = changes["accessKey"]
|
||||
return log_tools.edit(project_id=project_id, integration=IN_TY, changes=options)
|
||||
|
||||
|
||||
def add(tenant_id, project_id, access_id, access_key, region):
|
||||
options = {
|
||||
"accessId": access_id,
|
||||
"accessKey": access_key,
|
||||
"region": region
|
||||
}
|
||||
return log_tools.add(project_id=project_id, integration=IN_TY, options=options)
|
||||
|
||||
|
||||
def delete(tenant_id, project_id):
|
||||
return log_tools.delete(project_id=project_id, integration=IN_TY)
|
||||
|
||||
|
||||
def add_edit(tenant_id, project_id, data):
|
||||
s = get(project_id)
|
||||
if s is not None:
|
||||
return update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"accessId": data["accessId"],
|
||||
"accessKey": data["accessKey"],
|
||||
"region": data["region"]})
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
project_id=project_id,
|
||||
access_id=data["accessId"],
|
||||
access_key=data["accessKey"],
|
||||
region=data["region"])
|
||||
118
api/chalicelib/core/log_tools.py
Normal file
|
|
@ -0,0 +1,118 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
import json
|
||||
|
||||
EXCEPT = ["jira_server", "jira_cloud"]
|
||||
|
||||
|
||||
def search(project_id):
|
||||
result = []
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""\
|
||||
SELECT supported_integrations.name,
|
||||
(SELECT COUNT(*)
|
||||
FROM public.integrations
|
||||
INNER JOIN public.projects USING (project_id)
|
||||
WHERE provider = supported_integrations.name
|
||||
AND project_id = %(project_id)s
|
||||
AND projects.deleted_at ISNULL
|
||||
LIMIT 1) AS count
|
||||
FROM unnest(enum_range(NULL::integration_provider)) AS supported_integrations(name);""",
|
||||
{"project_id": project_id})
|
||||
)
|
||||
r = cur.fetchall()
|
||||
for k in r:
|
||||
if k["count"] > 0 and k["name"] not in EXCEPT:
|
||||
result.append({"value": helper.key_to_camel_case(k["name"]), "type": "logTool"})
|
||||
return {"data": result}
|
||||
|
||||
|
||||
def add(project_id, integration, options):
|
||||
options = json.dumps(options)
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""\
|
||||
INSERT INTO public.integrations(project_id, provider, options)
|
||||
VALUES (%(project_id)s, %(provider)s, %(options)s::jsonb)
|
||||
RETURNING *;""",
|
||||
{"project_id": project_id, "provider": integration, "options": options})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return helper.dict_to_camel_case(helper.flatten_nested_dicts(r))
|
||||
|
||||
|
||||
def get(project_id, integration):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""\
|
||||
SELECT integrations.*
|
||||
FROM public.integrations INNER JOIN public.projects USING(project_id)
|
||||
WHERE provider = %(provider)s
|
||||
AND project_id = %(project_id)s
|
||||
AND projects.deleted_at ISNULL
|
||||
LIMIT 1;""",
|
||||
{"project_id": project_id, "provider": integration})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return helper.dict_to_camel_case(helper.flatten_nested_dicts(r))
|
||||
|
||||
|
||||
def get_all_by_type(integration):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""\
|
||||
SELECT integrations.*
|
||||
FROM public.integrations INNER JOIN public.projects USING(project_id)
|
||||
WHERE provider = %(provider)s AND projects.deleted_at ISNULL;""",
|
||||
{"provider": integration})
|
||||
)
|
||||
r = cur.fetchall()
|
||||
return helper.list_to_camel_case(r, flatten=True)
|
||||
|
||||
|
||||
def edit(project_id, integration, changes):
|
||||
if "projectId" in changes:
|
||||
changes.pop("project_id")
|
||||
if "integration" in changes:
|
||||
changes.pop("integration")
|
||||
if len(changes.keys()) == 0:
|
||||
return None
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.integrations
|
||||
SET options=options||%(changes)s
|
||||
WHERE project_id =%(project_id)s AND provider = %(provider)s
|
||||
RETURNING *;""",
|
||||
{"project_id": project_id, "provider": integration, "changes": json.dumps(changes)})
|
||||
)
|
||||
return helper.dict_to_camel_case(helper.flatten_nested_dicts(cur.fetchone()))
|
||||
|
||||
|
||||
def delete(project_id, integration):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
DELETE FROM public.integrations
|
||||
WHERE project_id=%(project_id)s AND provider=%(provider)s;""",
|
||||
{"project_id": project_id, "provider": integration})
|
||||
)
|
||||
return {"state": "success"}
|
||||
|
||||
|
||||
def get_all_by_tenant(tenant_id, integration):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT integrations.*
|
||||
FROM public.integrations INNER JOIN public.projects USING(project_id)
|
||||
WHERE provider = %(provider)s
|
||||
AND projects.deleted_at ISNULL;""",
|
||||
{"provider": integration})
|
||||
)
|
||||
r = cur.fetchall()
|
||||
return helper.list_to_camel_case(r, flatten=True)
|
||||
262
api/chalicelib/core/metadata.py
Normal file
|
|
@ -0,0 +1,262 @@
|
|||
from chalicelib.utils import pg_client, helper, dev
|
||||
|
||||
|
||||
from chalicelib.core import projects
|
||||
import re
|
||||
|
||||
MAX_INDEXES = 10
|
||||
|
||||
|
||||
def _get_column_names():
|
||||
return [f"metadata_{i}" for i in range(1, MAX_INDEXES + 1)]
|
||||
|
||||
|
||||
def get(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
SELECT
|
||||
{",".join(_get_column_names())}
|
||||
FROM public.projects
|
||||
WHERE project_id = %(project_id)s AND deleted_at ISNULL
|
||||
LIMIT 1;""", {"project_id": project_id})
|
||||
)
|
||||
metas = cur.fetchone()
|
||||
results = []
|
||||
for i, k in enumerate(metas.keys()):
|
||||
if metas[k] is not None:
|
||||
results.append({"key": metas[k], "index": i + 1})
|
||||
return results
|
||||
|
||||
|
||||
regex = re.compile(r'^[a-z0-9_-]+$', re.IGNORECASE)
|
||||
|
||||
|
||||
def index_to_colname(index):
|
||||
if index <= 0 or index > MAX_INDEXES:
|
||||
raise Exception("metadata index out or bound")
|
||||
return f"metadata_{index}"
|
||||
|
||||
|
||||
def __get_available_index(project_id):
|
||||
used_indexs = get(project_id)
|
||||
used_indexs = [i["index"] for i in used_indexs]
|
||||
if len(used_indexs) >= MAX_INDEXES:
|
||||
return -1
|
||||
i = 1
|
||||
while i in used_indexs:
|
||||
i += 1
|
||||
return i
|
||||
|
||||
|
||||
def __edit(project_id, col_index, colname, new_name):
|
||||
if new_name is None or len(new_name) == 0:
|
||||
return {"errors": ["key value invalid"]}
|
||||
old_metas = get(project_id)
|
||||
old_metas = {k["index"]: k for k in old_metas}
|
||||
if col_index not in list(old_metas.keys()):
|
||||
return {"errors": ["custom field doesn't exist"]}
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if old_metas[col_index]["key"].lower() != new_name:
|
||||
cur.execute(cur.mogrify(f"""UPDATE public.projects
|
||||
SET {colname} = %(value)s
|
||||
WHERE project_id = %(project_id)s AND deleted_at ISNULL
|
||||
RETURNING {colname};""",
|
||||
{"project_id": project_id, "value": new_name}))
|
||||
new_name = cur.fetchone()[colname]
|
||||
old_metas[col_index]["key"] = new_name
|
||||
return {"data": old_metas[col_index]}
|
||||
|
||||
|
||||
def edit(tenant_id, project_id, index: int, new_name: str):
|
||||
return __edit(project_id=project_id, col_index=index, colname=index_to_colname(index), new_name=new_name)
|
||||
|
||||
|
||||
def delete(tenant_id, project_id, index: int):
|
||||
index = int(index)
|
||||
old_segments = get(project_id)
|
||||
old_segments = [k["index"] for k in old_segments]
|
||||
if index not in old_segments:
|
||||
return {"errors": ["custom field doesn't exist"]}
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
colname = index_to_colname(index)
|
||||
query = cur.mogrify(f"""UPDATE public.projects
|
||||
SET {colname}= NULL
|
||||
WHERE project_id = %(project_id)s AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id})
|
||||
cur.execute(query=query)
|
||||
query = cur.mogrify(f"""UPDATE public.sessions
|
||||
SET {colname}= NULL
|
||||
WHERE project_id = %(project_id)s""",
|
||||
{"project_id": project_id})
|
||||
cur.execute(query=query)
|
||||
|
||||
return {"data": get(project_id)}
|
||||
|
||||
|
||||
def add(tenant_id, project_id, new_name):
|
||||
index = __get_available_index(project_id=project_id)
|
||||
if index < 1:
|
||||
return {"errors": ["maximum allowed metadata reached"]}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
colname = index_to_colname(index)
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""UPDATE public.projects SET {colname}= %(key)s WHERE project_id =%(project_id)s RETURNING {colname};""",
|
||||
{"key": new_name, "project_id": project_id}))
|
||||
col_val = cur.fetchone()[colname]
|
||||
return {"data": {"key": col_val, "index": index}}
|
||||
|
||||
|
||||
def search(tenant_id, project_id, key, value):
|
||||
value = value + "%"
|
||||
s_query = []
|
||||
for f in _get_column_names():
|
||||
s_query.append(f"CASE WHEN {f}=%(key)s THEN TRUE ELSE FALSE END AS {f}")
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
SELECT
|
||||
{",".join(s_query)}
|
||||
FROM public.projects
|
||||
WHERE
|
||||
project_id = %(project_id)s AND deleted_at ISNULL
|
||||
LIMIT 1;""",
|
||||
{"key": key, "project_id": project_id})
|
||||
)
|
||||
all_metas = cur.fetchone()
|
||||
key = None
|
||||
for c in all_metas:
|
||||
if all_metas[c]:
|
||||
key = c
|
||||
break
|
||||
if key is None:
|
||||
return {"errors": ["key does not exist"]}
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
SELECT
|
||||
DISTINCT "{key}" AS "{key}"
|
||||
FROM public.sessions
|
||||
{f'WHERE "{key}"::text ILIKE %(value)s' if value is not None and len(value) > 0 else ""}
|
||||
ORDER BY "{key}"
|
||||
LIMIT 20;""",
|
||||
{"value": value, "project_id": project_id})
|
||||
)
|
||||
value = cur.fetchall()
|
||||
return {"data": [k[key] for k in value]}
|
||||
|
||||
|
||||
def get_available_keys(project_id):
|
||||
all_metas = get(project_id=project_id)
|
||||
return [k["key"] for k in all_metas]
|
||||
|
||||
|
||||
def get_by_session_id(project_id, session_id):
|
||||
all_metas = get(project_id=project_id)
|
||||
if len(all_metas) == 0:
|
||||
return []
|
||||
keys = {index_to_colname(k["index"]): k["key"] for k in all_metas}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
select {",".join(keys.keys())}
|
||||
FROM public.sessions
|
||||
WHERE project_id= %(project_id)s AND session_id=%(session_id)s;""",
|
||||
{"session_id": session_id, "project_id": project_id})
|
||||
)
|
||||
session_metas = cur.fetchall()
|
||||
results = []
|
||||
for m in session_metas:
|
||||
r = {}
|
||||
for k in m.keys():
|
||||
r[keys[k]] = m[k]
|
||||
results.append(r)
|
||||
return results
|
||||
|
||||
|
||||
def get_keys_by_projects(project_ids):
|
||||
if project_ids is None or len(project_ids) == 0:
|
||||
return {}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""\
|
||||
SELECT
|
||||
project_id,
|
||||
{",".join(_get_column_names())}
|
||||
FROM public.projects
|
||||
WHERE project_id IN %(project_ids)s AND deleted_at ISNULL;""",
|
||||
{"project_ids": tuple(project_ids)})
|
||||
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
results = {}
|
||||
for r in rows:
|
||||
project_id = r.pop("project_id")
|
||||
results[project_id] = {}
|
||||
for m in r:
|
||||
if r[m] is not None:
|
||||
results[project_id][m] = r[m]
|
||||
return results
|
||||
|
||||
|
||||
def add_edit_delete(tenant_id, project_id, new_metas):
|
||||
old_metas = get(project_id)
|
||||
old_indexes = [k["index"] for k in old_metas]
|
||||
new_indexes = [k["index"] for k in new_metas if "index" in k]
|
||||
new_keys = [k["key"] for k in new_metas]
|
||||
|
||||
add_metas = [k["key"] for k in new_metas
|
||||
if "index" not in k]
|
||||
new_metas = {k["index"]: {"key": k["key"]} for
|
||||
k in new_metas if
|
||||
"index" in k}
|
||||
old_metas = {k["index"]: {"key": k["key"]} for k in old_metas}
|
||||
|
||||
if len(new_keys) > 20:
|
||||
return {"errors": ["you cannot add more than 20 key"]}
|
||||
for k in new_metas.keys():
|
||||
if re.match(regex, new_metas[k]["key"]) is None:
|
||||
return {"errors": [f"invalid key {k}"]}
|
||||
for k in add_metas:
|
||||
if re.match(regex, k) is None:
|
||||
return {"errors": [f"invalid key {k}"]}
|
||||
if len(new_indexes) > len(set(new_indexes)):
|
||||
return {"errors": ["duplicate indexes"]}
|
||||
if len(new_keys) > len(set(new_keys)):
|
||||
return {"errors": ["duplicate keys"]}
|
||||
to_delete = list(set(old_indexes) - set(new_indexes))
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
for d in to_delete:
|
||||
delete(tenant_id=tenant_id, project_id=project_id, index=d)
|
||||
|
||||
for k in add_metas:
|
||||
add(tenant_id=tenant_id, project_id=project_id, new_name=k)
|
||||
|
||||
for k in new_metas.keys():
|
||||
if new_metas[k]["key"].lower() != old_metas[k]["key"]:
|
||||
edit(tenant_id=tenant_id, project_id=project_id, index=k, new_name=new_metas[k]["key"])
|
||||
|
||||
return {"data": get(project_id)}
|
||||
|
||||
|
||||
@dev.timed
|
||||
def get_remaining_metadata_with_count(tenant_id):
|
||||
all_projects = projects.get_projects(tenant_id=tenant_id)
|
||||
results = []
|
||||
for p in all_projects:
|
||||
used_metas = get(p["projectId"])
|
||||
if MAX_INDEXES < 0:
|
||||
remaining = -1
|
||||
else:
|
||||
remaining = MAX_INDEXES - len(used_metas)
|
||||
results.append({**p, "limit": MAX_INDEXES, "remaining": remaining, "count": len(used_metas)})
|
||||
|
||||
return results
|
||||
80
api/chalicelib/core/notifications.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
import json
|
||||
|
||||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def get_all(tenant_id, user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT notifications.*,
|
||||
user_viewed_notifications.notification_id NOTNULL AS viewed
|
||||
FROM public.notifications
|
||||
LEFT JOIN (SELECT notification_id
|
||||
FROM public.user_viewed_notifications
|
||||
WHERE user_viewed_notifications.user_id = %(user_id)s) AS user_viewed_notifications USING (notification_id)
|
||||
WHERE notifications.user_id IS NULL OR notifications.user_id =%(user_id)s
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 100;""",
|
||||
{"user_id": user_id})
|
||||
)
|
||||
rows = helper.list_to_camel_case(cur.fetchall())
|
||||
for r in rows:
|
||||
r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"])
|
||||
return rows
|
||||
|
||||
|
||||
def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None):
|
||||
if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None:
|
||||
return False
|
||||
if startTimestamp is None:
|
||||
startTimestamp = 0
|
||||
notification_ids = [(user_id, id) for id in notification_ids]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if len(notification_ids) > 0:
|
||||
cur.executemany(
|
||||
"INSERT INTO public.user_viewed_notifications(user_id, notification_id) VALUES (%s,%s) ON CONFLICT DO NOTHING;",
|
||||
notification_ids)
|
||||
else:
|
||||
query = """INSERT INTO public.user_viewed_notifications(user_id, notification_id)
|
||||
SELECT %(user_id)s AS user_id, notification_id
|
||||
FROM public.notifications
|
||||
WHERE (user_id IS NULL OR user_id =%(user_id)s)
|
||||
AND EXTRACT(EPOCH FROM created_at)*1000>=(%(startTimestamp)s)
|
||||
AND EXTRACT(EPOCH FROM created_at)*1000<=(%(endTimestamp)s+1000)
|
||||
ON CONFLICT DO NOTHING;"""
|
||||
params = {"user_id": user_id, "startTimestamp": startTimestamp,
|
||||
"endTimestamp": endTimestamp}
|
||||
# print('-------------------')
|
||||
# print(cur.mogrify(query, params))
|
||||
cur.execute(cur.mogrify(query, params))
|
||||
return True
|
||||
|
||||
|
||||
def create(notifications):
|
||||
if len(notifications) == 0:
|
||||
return []
|
||||
with pg_client.PostgresClient() as cur:
|
||||
values = []
|
||||
for n in notifications:
|
||||
clone = dict(n)
|
||||
if "userId" not in clone:
|
||||
clone["userId"] = None
|
||||
if "options" not in clone:
|
||||
clone["options"] = '{}'
|
||||
else:
|
||||
clone["options"] = json.dumps(clone["options"])
|
||||
values.append(
|
||||
cur.mogrify(
|
||||
"(%(userId)s, %(title)s, %(description)s, %(buttonText)s, %(buttonUrl)s, %(imageUrl)s,%(options)s)",
|
||||
clone).decode('UTF-8')
|
||||
)
|
||||
cur.execute(
|
||||
f"""INSERT INTO public.notifications(user_id, title, description, button_text, button_url, image_url, options)
|
||||
VALUES {",".join(values)} RETURNING *;""")
|
||||
rows = helper.list_to_camel_case(cur.fetchall())
|
||||
for r in rows:
|
||||
r["createdAt"] = TimeUTC.datetime_to_timestamp(r["createdAt"])
|
||||
r["viewed"] = False
|
||||
return rows
|
||||
229
api/chalicelib/core/projects.py
Normal file
|
|
@ -0,0 +1,229 @@
|
|||
import json
|
||||
|
||||
from chalicelib.core import users
|
||||
from chalicelib.utils import pg_client, helper, dev
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def __update(tenant_id, project_id, changes):
|
||||
if len(changes.keys()) == 0:
|
||||
return None
|
||||
|
||||
sub_query = []
|
||||
for key in changes.keys():
|
||||
sub_query.append(f"{helper.key_to_snake_case(key)} = %({key})s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
UPDATE public.projects
|
||||
SET
|
||||
{" ,".join(sub_query)}
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND deleted_at ISNULL
|
||||
RETURNING project_id,name,gdpr;""",
|
||||
{"project_id": project_id, **changes})
|
||||
)
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def __create(tenant_id, name):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
INSERT INTO public.projects (name, active)
|
||||
VALUES (%(name)s,TRUE)
|
||||
RETURNING project_id;""",
|
||||
{"name": name})
|
||||
)
|
||||
project_id = cur.fetchone()["project_id"]
|
||||
return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True)
|
||||
|
||||
|
||||
@dev.timed
|
||||
def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, stack_integrations=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(f"""\
|
||||
SELECT
|
||||
s.project_id, s.name, s.project_key
|
||||
{',s.gdpr' if gdpr else ''}
|
||||
{',COALESCE((SELECT TRUE FROM public.sessions WHERE sessions.project_id = s.project_id LIMIT 1), FALSE) AS recorded' if recorded else ''}
|
||||
{',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''}
|
||||
FROM public.projects AS s
|
||||
{'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''}
|
||||
where s.deleted_at IS NULL
|
||||
ORDER BY s.project_id;"""
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
if recording_state:
|
||||
for r in rows:
|
||||
query = cur.mogrify(
|
||||
"select COALESCE(MAX(start_ts),0) AS last from public.sessions where project_id=%(project_id)s;",
|
||||
{"project_id": r["project_id"]})
|
||||
cur.execute(
|
||||
query=query
|
||||
)
|
||||
status = cur.fetchone()
|
||||
if status["last"] < TimeUTC.now(-2):
|
||||
r["status"] = "red"
|
||||
elif status["last"] < TimeUTC.now(-1):
|
||||
r["status"] = "yellow"
|
||||
else:
|
||||
r["status"] = "green"
|
||||
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_project(tenant_id, project_id, include_last_session=False, include_gdpr=None):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
SELECT
|
||||
s.project_id,
|
||||
s.project_key,
|
||||
s.name
|
||||
{",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_id = %(project_id)s) AS last_recorded_session_at" if include_last_session else ""}
|
||||
{',s.gdpr' if include_gdpr else ''}
|
||||
FROM public.projects AS s
|
||||
where s.project_id =%(project_id)s
|
||||
AND s.deleted_at IS NULL
|
||||
LIMIT 1;""",
|
||||
{"project_id": project_id})
|
||||
|
||||
cur.execute(
|
||||
query=query
|
||||
)
|
||||
row = cur.fetchone()
|
||||
return helper.dict_to_camel_case(row)
|
||||
|
||||
|
||||
def create(tenant_id, user_id, data):
|
||||
admin = users.get(user_id=user_id, tenant_id=tenant_id)
|
||||
if not admin["admin"] and not admin["superAdmin"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
return {"data": __create(tenant_id=tenant_id, name=data.get("name", "my first project"))}
|
||||
|
||||
|
||||
def edit(tenant_id, user_id, project_id, data):
|
||||
admin = users.get(user_id=user_id, tenant_id=tenant_id)
|
||||
if not admin["admin"] and not admin["superAdmin"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
return {"data": __update(tenant_id=tenant_id, project_id=project_id,
|
||||
changes={"name": data.get("name", "my first project")})}
|
||||
|
||||
|
||||
def delete(tenant_id, user_id, project_id):
|
||||
admin = users.get(user_id=user_id, tenant_id=tenant_id)
|
||||
|
||||
if not admin["admin"] and not admin["superAdmin"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.projects
|
||||
SET
|
||||
deleted_at = timezone('utc'::text, now()),
|
||||
active = FALSE
|
||||
WHERE
|
||||
project_id = %(project_id)s;""",
|
||||
{"project_id": project_id})
|
||||
)
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def count_by_tenant(tenant_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute("""\
|
||||
SELECT
|
||||
count(s.project_id)
|
||||
FROM public.projects AS s
|
||||
where s.deleted_at IS NULL;""")
|
||||
return cur.fetchone()["count"]
|
||||
|
||||
|
||||
def get_gdpr(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT
|
||||
gdpr
|
||||
FROM public.projects AS s
|
||||
where s.project_id =%(project_id)s
|
||||
AND s.deleted_at IS NULL;""",
|
||||
{"project_id": project_id})
|
||||
)
|
||||
return cur.fetchone()["gdpr"]
|
||||
|
||||
|
||||
def edit_gdpr(project_id, gdpr):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.projects
|
||||
SET
|
||||
gdpr = gdpr|| %(gdpr)s
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND deleted_at ISNULL
|
||||
RETURNING gdpr;""",
|
||||
{"project_id": project_id, "gdpr": json.dumps(gdpr)})
|
||||
)
|
||||
return cur.fetchone()["gdpr"]
|
||||
|
||||
|
||||
def get_internal_project_id(project_key):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT project_id
|
||||
FROM public.projects
|
||||
where project_key =%(project_key)s AND deleted_at ISNULL;""",
|
||||
{"project_key": project_key})
|
||||
)
|
||||
return cur.fetchone()["project_id"]
|
||||
|
||||
|
||||
def get_project_key(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT project_key
|
||||
FROM public.projects
|
||||
where project_id =%(project_id)s AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id})
|
||||
)
|
||||
return cur.fetchone()["project_key"]
|
||||
|
||||
|
||||
def get_capture_status(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT
|
||||
sample_rate AS rate, sample_rate=100 AS capture_all
|
||||
FROM public.projects
|
||||
where project_id =%(project_id)s AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id})
|
||||
)
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def update_capture_status(project_id, changes):
|
||||
if "rate" not in changes and "captureAll" not in changes:
|
||||
return {"errors": ["please provide 'rate' and/or 'captureAll' attributes to update."]}
|
||||
if int(changes["rate"]) < 0 or int(changes["rate"]) > 100:
|
||||
return {"errors": ["'rate' must be between 0..100."]}
|
||||
sample_rate = 0
|
||||
if "rate" in changes:
|
||||
sample_rate = int(changes["rate"])
|
||||
if changes.get("captureAll"):
|
||||
sample_rate = 100
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.projects
|
||||
SET sample_rate= %(sample_rate)s
|
||||
WHERE project_id =%(project_id)s AND deleted_at ISNULL;""",
|
||||
{"project_id": project_id, "sample_rate": sample_rate})
|
||||
)
|
||||
|
||||
return changes
|
||||
66
api/chalicelib/core/reset_password.py
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import chalicelib.utils.TimeUTC
|
||||
from chalicelib.utils import email_helper, captcha, helper
|
||||
import secrets
|
||||
from chalicelib.utils import pg_client
|
||||
|
||||
from chalicelib.core import users
|
||||
|
||||
|
||||
def step1(data):
|
||||
print("====================== reset password 1 ===============")
|
||||
print(data)
|
||||
if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]):
|
||||
print("error: Invalid captcha.")
|
||||
return {"errors": ["Invalid captcha."]}
|
||||
if "email" not in data:
|
||||
return {"errors": ["email not found in body"]}
|
||||
|
||||
a_users = users.get_by_email_only(data["email"])
|
||||
if len(a_users) > 1:
|
||||
print(f"multiple users found for [{data['email']}] please contact our support")
|
||||
return {"errors": ["please contact our support"]}
|
||||
elif len(a_users) == 1:
|
||||
a_users = a_users[0]
|
||||
reset_token = secrets.token_urlsafe(6)
|
||||
users.update(tenant_id=a_users["tenantId"], user_id=a_users["id"],
|
||||
changes={"token": reset_token})
|
||||
email_helper.send_reset_code(recipient=data["email"], reset_code=reset_token)
|
||||
else:
|
||||
print(f"invalid email address [{data['email']}]")
|
||||
return {"errors": ["invalid email address"]}
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def step2(data):
|
||||
print("====================== change password 2 ===============")
|
||||
user = users.get_by_email_reset(data["email"], data["code"])
|
||||
if not user:
|
||||
print("error: wrong email or reset code")
|
||||
return {"errors": ["wrong email or reset code"]}
|
||||
users.update(tenant_id=user["tenantId"], user_id=user["id"],
|
||||
changes={"token": None, "password": data["password"], "generatedPassword": False,
|
||||
"verifiedEmail": True})
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def cron():
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT user_id
|
||||
FROM public.basic_authentication
|
||||
WHERE token notnull
|
||||
AND (token_requested_at isnull or (EXTRACT(EPOCH FROM token_requested_at)*1000)::BIGINT < %(time)s);""",
|
||||
{"time": chalicelib.utils.TimeUTC.TimeUTC.now(delta_days=-1)})
|
||||
)
|
||||
results = cur.fetchall()
|
||||
if len(results) == 0:
|
||||
return
|
||||
results = tuple([r["user_id"] for r in results])
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.basic_authentication
|
||||
SET token = NULL, token_requested_at = NULL
|
||||
WHERE user_id in %(ids)s;""",
|
||||
{"ids": results})
|
||||
)
|
||||
22
api/chalicelib/core/resources.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from chalicelib.utils import helper, pg_client
|
||||
|
||||
|
||||
def get_by_session_id(session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
ch_query = """\
|
||||
SELECT
|
||||
timestamp AS datetime,
|
||||
url,
|
||||
type,
|
||||
duration,
|
||||
ttfb,
|
||||
header_size,
|
||||
encoded_body_size,
|
||||
decoded_body_size,
|
||||
success
|
||||
FROM events.resources
|
||||
WHERE session_id = %(session_id)s;"""
|
||||
params = {"session_id": session_id}
|
||||
cur.execute(cur.mogrify(ch_query, params))
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
620
api/chalicelib/core/sessions.py
Normal file
|
|
@ -0,0 +1,620 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils import dev
|
||||
from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, sessions_mobs
|
||||
|
||||
if helper.is_free_open_source_edition():
|
||||
from chalicelib.core import projects, errors
|
||||
else:
|
||||
from chalicelib.ee import projects, errors
|
||||
|
||||
from chalicelib.core import resources
|
||||
|
||||
SESSION_PROJECTION_COLS = """s.project_id,
|
||||
s.session_id::text AS session_id,
|
||||
s.user_uuid,
|
||||
s.user_id,
|
||||
s.user_agent,
|
||||
s.user_os,
|
||||
s.user_browser,
|
||||
s.user_device,
|
||||
s.user_device_type,
|
||||
s.user_country,
|
||||
s.start_ts,
|
||||
s.duration,
|
||||
s.events_count,
|
||||
s.pages_count,
|
||||
s.errors_count,
|
||||
s.user_anonymous_id,
|
||||
s.platform,
|
||||
s.issue_score,
|
||||
s.issue_types::text[] AS issue_types,
|
||||
favorite_sessions.session_id NOTNULL AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
|
||||
"""
|
||||
|
||||
|
||||
def __group_metadata(session, project_metadata):
|
||||
meta = []
|
||||
for m in project_metadata.keys():
|
||||
if project_metadata[m] is not None and session.get(m) is not None:
|
||||
meta.append({project_metadata[m]: session[m]})
|
||||
session.pop(m)
|
||||
return meta
|
||||
|
||||
|
||||
def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_viewed=False, group_metadata=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
extra_query = []
|
||||
if include_fav_viewed:
|
||||
extra_query.append("""COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s), FALSE) AS favorite""")
|
||||
extra_query.append("""COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s), FALSE) AS viewed""")
|
||||
query = cur.mogrify(
|
||||
f"""\
|
||||
SELECT
|
||||
s.*,
|
||||
s.session_id::text AS session_id
|
||||
{"," if len(extra_query) > 0 else ""}{",".join(extra_query)}
|
||||
{(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata._get_column_names()]) + ") AS project_metadata") if group_metadata else ''}
|
||||
FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""}
|
||||
WHERE s.project_id = %(project_id)s
|
||||
AND s.session_id = %(session_id)s;""",
|
||||
{"project_id": project_id, "session_id": session_id, "userId": user_id}
|
||||
)
|
||||
# print("===============")
|
||||
# print(query)
|
||||
cur.execute(query=query)
|
||||
|
||||
data = cur.fetchone()
|
||||
if data is not None:
|
||||
data = helper.dict_to_camel_case(data)
|
||||
if full_data:
|
||||
if data["platform"] == 'ios':
|
||||
data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id)
|
||||
for e in data['events']:
|
||||
if e["type"].endswith("_IOS"):
|
||||
e["type"] = e["type"][:-len("_IOS")]
|
||||
data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id)
|
||||
data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id,
|
||||
session_id=session_id)
|
||||
data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id)
|
||||
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
||||
data["socket"] = socket_ios.start_replay(project_id=project_id, session_id=session_id,
|
||||
device=data["userDevice"],
|
||||
os_version=data["userOsVersion"],
|
||||
mob_url=data["mobsUrl"])
|
||||
else:
|
||||
data['events'] = events.get_by_sessionId2_pg(project_id=project_id, session_id=session_id,
|
||||
group_clickrage=True)
|
||||
all_errors = events.get_errors_by_session_id(session_id=session_id)
|
||||
data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"]
|
||||
# to keep only the first stack
|
||||
data['errors'] = [errors.format_first_stack_frame(e) for e in all_errors if
|
||||
e['source'] == "js_exception"][
|
||||
:500] # limit the number of errors to reduce the response-body size
|
||||
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
|
||||
session_id=session_id)
|
||||
data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id)
|
||||
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
|
||||
data['resources'] = resources.get_by_session_id(session_id=session_id)
|
||||
|
||||
return data
|
||||
return None
|
||||
|
||||
|
||||
def sessions_args(args, params):
|
||||
if params is not None:
|
||||
for key in ['userOs', 'userBrowser', 'userCountry', 'path', 'path_in_order', 'after', 'minDuration',
|
||||
'maxDuration', 'sortSessions', 'eventsCount', 'consoleLogCount', 'startDate', 'endDate',
|
||||
'consoleLog', 'location']:
|
||||
args[key] = params.get(key)
|
||||
|
||||
|
||||
new_line = "\n"
|
||||
|
||||
|
||||
def __get_sql_operator(op):
|
||||
op = op.lower()
|
||||
return "=" if op == "is" or op == "on" else "!=" if op == "isnot" else "ILIKE" if op == "contains" else "NOT ILIKE" if op == "notcontains" else "="
|
||||
|
||||
|
||||
def __is_negation_operator(op):
|
||||
return op in ("!=", "NOT ILIKE")
|
||||
|
||||
|
||||
def __reverse_sql_operator(op):
|
||||
return "=" if op == "!=" else "!=" if op == "=" else "ILIKE" if op == "NOT ILIKE" else "NOT ILIKE"
|
||||
|
||||
|
||||
def __get_sql_operator_multiple(op):
|
||||
op = op.lower()
|
||||
return " IN " if op == "is" else " NOT IN "
|
||||
|
||||
|
||||
def __get_sql_operator_boolean(op):
|
||||
op = op.lower()
|
||||
return True if op == "true" else False
|
||||
|
||||
|
||||
def __get_sql_value_multiple(values):
|
||||
if isinstance(values, tuple):
|
||||
return values
|
||||
return tuple([v for v in values])
|
||||
|
||||
|
||||
@dev.timed
|
||||
def search2_pg(data, project_id, user_id, favorite_only=False, errors_only=False, error_status="ALL",
|
||||
count_only=False, issue=None):
|
||||
sessions = []
|
||||
generic_args = {"startDate": data['startDate'], "endDate": data['endDate'],
|
||||
"projectId": project_id,
|
||||
"userId": user_id}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
extra_constraints = [
|
||||
cur.mogrify("s.project_id = %(project_id)s", {"project_id": project_id}),
|
||||
cur.mogrify("s.duration IS NOT NULL", {})
|
||||
]
|
||||
extra_from = ""
|
||||
fav_only_join = ""
|
||||
if favorite_only and not errors_only:
|
||||
fav_only_join = "LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id"
|
||||
extra_constraints.append(cur.mogrify("fs.user_id = %(userId)s", {"userId": user_id}))
|
||||
events_query_part = ""
|
||||
strict = True
|
||||
|
||||
if len(data.get("events", [])) > 0:
|
||||
events_query_from = []
|
||||
event_index = 0
|
||||
|
||||
for event in data["events"]:
|
||||
# TODO: remove this when message_id is removed
|
||||
seq_id = False
|
||||
event_type = event["type"].upper()
|
||||
if event.get("operator") is None:
|
||||
event["operator"] = "is"
|
||||
op = __get_sql_operator(event["operator"])
|
||||
is_not = False
|
||||
if __is_negation_operator(op) and event_index > 0:
|
||||
is_not = True
|
||||
op = __reverse_sql_operator(op)
|
||||
event_from = "%s INNER JOIN public.sessions AS ms USING (session_id)"
|
||||
event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s",
|
||||
"main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s",
|
||||
"ms.start_ts <= %(endDate)s"]
|
||||
event_args = {"value": helper.string_to_sql_like_with_op(event['value'], op)}
|
||||
if event_type not in list(events.SUPPORTED_TYPES.keys()) \
|
||||
or event.get("value") in [None, "", "*"] \
|
||||
and (event_type != events.event_type.ERROR.ui_type \
|
||||
or event_type != events.event_type.ERROR_IOS.ui_type):
|
||||
continue
|
||||
if event_type == events.event_type.CLICK.ui_type:
|
||||
event_from = event_from % f"{events.event_type.CLICK.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.CLICK.column} {op} %(value)s")
|
||||
|
||||
elif event_type == events.event_type.INPUT.ui_type:
|
||||
event_from = event_from % f"{events.event_type.INPUT.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.INPUT.column} {op} %(value)s")
|
||||
if len(event.get("custom", "")) > 0:
|
||||
event_where.append("main.value ILIKE %(custom)s")
|
||||
event_args["custom"] = helper.string_to_sql_like_with_op(event['custom'], "ILIKE")
|
||||
elif event_type == events.event_type.LOCATION.ui_type:
|
||||
event_from = event_from % f"{events.event_type.LOCATION.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.LOCATION.column} {op} %(value)s")
|
||||
elif event_type == events.event_type.CUSTOM.ui_type:
|
||||
seq_id = True
|
||||
event_from = event_from % f"{events.event_type.CUSTOM.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.CUSTOM.column} {op} %(value)s")
|
||||
elif event_type == events.event_type.REQUEST.ui_type:
|
||||
seq_id = True
|
||||
event_from = event_from % f"{events.event_type.REQUEST.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.REQUEST.column} {op} %(value)s")
|
||||
elif event_type == events.event_type.GRAPHQL.ui_type:
|
||||
event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.GRAPHQL.column} {op} %(value)s")
|
||||
elif event_type == events.event_type.STATEACTION.ui_type:
|
||||
event_from = event_from % f"{events.event_type.STATEACTION.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.STATEACTION.column} {op} %(value)s")
|
||||
elif event_type == events.event_type.ERROR.ui_type:
|
||||
if event.get("source") in [None, "*", ""]:
|
||||
event["source"] = "js_exception"
|
||||
event_from = event_from % f"{events.event_type.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)"
|
||||
if event.get("value") not in [None, "*", ""]:
|
||||
event_where.append(f"(main1.message {op} %(value)s OR main1.name {op} %(value)s)")
|
||||
if event.get("source") not in [None, "*", ""]:
|
||||
event_where.append(f"main1.source = %(source)s")
|
||||
event_args["source"] = event["source"]
|
||||
elif event.get("source") not in [None, "*", ""]:
|
||||
event_where.append(f"main1.source = %(source)s")
|
||||
event_args["source"] = event["source"]
|
||||
|
||||
# ----- IOS
|
||||
elif event_type == events.event_type.CLICK_IOS.ui_type:
|
||||
seq_id = True
|
||||
event_from = event_from % f"{events.event_type.CLICK_IOS.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.CLICK_IOS.column} {op} %(value)s")
|
||||
|
||||
elif event_type == events.event_type.INPUT_IOS.ui_type:
|
||||
seq_id = True
|
||||
event_from = event_from % f"{events.event_type.INPUT_IOS.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.INPUT_IOS.column} {op} %(value)s")
|
||||
|
||||
if len(event.get("custom", "")) > 0:
|
||||
event_where.append("main.value ILIKE %(custom)s")
|
||||
event_args["custom"] = helper.string_to_sql_like_with_op(event['custom'], "ILIKE")
|
||||
elif event_type == events.event_type.VIEW_IOS.ui_type:
|
||||
seq_id = True
|
||||
event_from = event_from % f"{events.event_type.VIEW_IOS.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.VIEW_IOS.column} {op} %(value)s")
|
||||
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
|
||||
seq_id = True
|
||||
event_from = event_from % f"{events.event_type.CUSTOM_IOS.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.CUSTOM_IOS.column} {op} %(value)s")
|
||||
elif event_type == events.event_type.REQUEST_IOS.ui_type:
|
||||
seq_id = True
|
||||
event_from = event_from % f"{events.event_type.REQUEST_IOS.table} AS main "
|
||||
event_where.append(f"main.{events.event_type.REQUEST_IOS.column} {op} %(value)s")
|
||||
elif event_type == events.event_type.ERROR_IOS.ui_type:
|
||||
seq_id = True
|
||||
event_from = event_from % f"{events.event_type.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)"
|
||||
if event.get("value") not in [None, "*", ""]:
|
||||
event_where.append(f"(main1.reason {op} %(value)s OR main1.name {op} %(value)s)")
|
||||
|
||||
else:
|
||||
continue
|
||||
|
||||
event_index += 1
|
||||
if is_not:
|
||||
event_from += f""" LEFT JOIN (SELECT session_id FROM {event_from} WHERE {" AND ".join(event_where)}) AS left_not USING (session_id)"""
|
||||
event_where[-1] = "left_not.session_id ISNULL"
|
||||
events_query_from.append(cur.mogrify(f"""\
|
||||
(SELECT
|
||||
main.session_id, {'seq_index' if seq_id else 'message_id %%%% 2147483647 AS seq_index'}, timestamp, {event_index} AS funnel_step
|
||||
FROM {event_from}
|
||||
WHERE {" AND ".join(event_where)}
|
||||
)\
|
||||
""", {**generic_args, **event_args}).decode('UTF-8'))
|
||||
|
||||
if len(events_query_from) > 0:
|
||||
events_query_part = f"""\
|
||||
SELECT
|
||||
session_id, MIN(timestamp) AS first_event_ts, MAX(timestamp) AS last_event_ts
|
||||
FROM
|
||||
({(" UNION ALL ").join(events_query_from)}) AS f_query
|
||||
GROUP BY 1
|
||||
{"" if event_index < 2 else f"HAVING events.funnel(array_agg(funnel_step ORDER BY timestamp,seq_index ASC), {event_index})" if strict
|
||||
else f"HAVING array_length(array_agg(DISTINCT funnel_step), 1) = {len(data['events'])}"}
|
||||
{fav_only_join}
|
||||
"""
|
||||
else:
|
||||
data["events"] = []
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
if "filters" in data:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
for f in data["filters"]:
|
||||
if not isinstance(f.get("value"), list):
|
||||
f["value"] = [f.get("value")]
|
||||
if len(f["value"]) == 0 or f["value"][0] is None:
|
||||
continue
|
||||
filter_type = f["type"].upper()
|
||||
f["value"] = __get_sql_value_multiple(f["value"])
|
||||
if filter_type == sessions_metas.meta_type.USERBROWSER:
|
||||
op = __get_sql_operator_multiple(f["operator"])
|
||||
extra_constraints.append(
|
||||
cur.mogrify(f's.user_browser {op} %(value)s', {"value": f["value"]}))
|
||||
|
||||
elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]:
|
||||
op = __get_sql_operator_multiple(f["operator"])
|
||||
extra_constraints.append(cur.mogrify(f's.user_os {op} %(value)s', {"value": f["value"]}))
|
||||
|
||||
elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]:
|
||||
op = __get_sql_operator_multiple(f["operator"])
|
||||
extra_constraints.append(cur.mogrify(f's.user_device {op} %(value)s', {"value": f["value"]}))
|
||||
|
||||
elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]:
|
||||
op = __get_sql_operator_multiple(f["operator"])
|
||||
extra_constraints.append(cur.mogrify(f's.user_country {op} %(value)s', {"value": f["value"]}))
|
||||
elif filter_type == "duration".upper():
|
||||
if len(f["value"]) > 0 and f["value"][0] is not None:
|
||||
extra_constraints.append(
|
||||
cur.mogrify("s.duration >= %(minDuration)s", {"minDuration": f["value"][0]}))
|
||||
if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0:
|
||||
extra_constraints.append(
|
||||
cur.mogrify("s.duration <= %(maxDuration)s", {"maxDuration": f["value"][1]}))
|
||||
elif filter_type == sessions_metas.meta_type.REFERRER:
|
||||
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||
extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
|
||||
op = __get_sql_operator_multiple(f["operator"])
|
||||
extra_constraints.append(
|
||||
cur.mogrify(f"p.base_referrer {op} %(referrer)s", {"referrer": f["value"]}))
|
||||
elif filter_type == events.event_type.METADATA.ui_type:
|
||||
op = __get_sql_operator(f["operator"])
|
||||
if f.get("key") in meta_keys.keys():
|
||||
extra_constraints.append(
|
||||
cur.mogrify(f"s.{metadata.index_to_colname(meta_keys[f['key']])} {op} %(value)s",
|
||||
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
|
||||
)
|
||||
elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
|
||||
op = __get_sql_operator(f["operator"])
|
||||
extra_constraints.append(
|
||||
cur.mogrify(f"s.user_id {op} %(value)s",
|
||||
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
|
||||
)
|
||||
elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID,
|
||||
sessions_metas.meta_type.USERANONYMOUSID_IOS]:
|
||||
op = __get_sql_operator(f["operator"])
|
||||
extra_constraints.append(
|
||||
cur.mogrify(f"s.user_anonymous_id {op} %(value)s",
|
||||
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
|
||||
)
|
||||
elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]:
|
||||
op = __get_sql_operator(f["operator"])
|
||||
extra_constraints.append(
|
||||
cur.mogrify(f"s.rev_id {op} %(value)s",
|
||||
{"value": helper.string_to_sql_like_with_op(f["value"][0], op)})
|
||||
)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
if data.get("startDate") is not None:
|
||||
extra_constraints.append(cur.mogrify("s.start_ts >= %(startDate)s", {"startDate": data['startDate']}))
|
||||
else:
|
||||
data['startDate'] = None
|
||||
if data.get("endDate") is not None:
|
||||
extra_constraints.append(cur.mogrify("s.start_ts <= %(endDate)s", {"endDate": data['endDate']}))
|
||||
else:
|
||||
data['endDate'] = None
|
||||
|
||||
if data.get('platform') is not None:
|
||||
if data['platform'] == 'mobile':
|
||||
extra_constraints.append(b"s.user_os in ('Android','BlackBerry OS','iOS','Tizen','Windows Phone')")
|
||||
elif data['platform'] == 'desktop':
|
||||
extra_constraints.append(
|
||||
b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')")
|
||||
|
||||
order = "DESC"
|
||||
if data.get("order") is not None:
|
||||
order = data["order"]
|
||||
sort = 'session_id'
|
||||
if data.get("sort") is not None and data["sort"] != "session_id":
|
||||
sort += " " + order + "," + helper.key_to_snake_case(data["sort"])
|
||||
else:
|
||||
sort = 'session_id'
|
||||
|
||||
if errors_only:
|
||||
extra_from += f" INNER JOIN {events.event_type.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)"
|
||||
extra_constraints.append(b"ser.source = 'js_exception'")
|
||||
if error_status != "ALL":
|
||||
extra_constraints.append(cur.mogrify("ser.status = %(status)s", {"status": error_status.lower()}))
|
||||
if favorite_only:
|
||||
extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)"
|
||||
extra_constraints.append(cur.mogrify("ufe.user_id = %(user_id)s", {"user_id": user_id}))
|
||||
|
||||
extra_constraints = [extra.decode('UTF-8') + "\n" for extra in extra_constraints]
|
||||
if not favorite_only and not errors_only:
|
||||
extra_from += """LEFT JOIN (SELECT user_id, session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE user_id = %(userId)s) AS favorite_sessions
|
||||
USING (session_id)"""
|
||||
extra_join = ""
|
||||
if issue is not None:
|
||||
extra_join = cur.mogrify("""
|
||||
INNER JOIN LATERAL(SELECT TRUE FROM events_common.issues INNER JOIN public.issues AS p_issues USING (issue_id)
|
||||
WHERE issues.session_id=f.session_id
|
||||
AND p_issues.type=%(type)s
|
||||
AND p_issues.context_string=%(contextString)s
|
||||
AND timestamp >= f.first_event_ts
|
||||
AND timestamp <= f.last_event_ts) AS issues ON(TRUE)
|
||||
""", {"contextString": issue["contextString"], "type": issue["type"]}).decode('UTF-8')
|
||||
|
||||
query_part = f"""\
|
||||
FROM {f"({events_query_part}) AS f" if len(events_query_part) > 0 else "public.sessions AS s"}
|
||||
{extra_join}
|
||||
{"INNER JOIN public.sessions AS s USING(session_id)" if len(events_query_part) > 0 else ""}
|
||||
{extra_from}
|
||||
WHERE
|
||||
|
||||
{" AND ".join(extra_constraints)}"""
|
||||
|
||||
if errors_only:
|
||||
main_query = cur.mogrify(f"""\
|
||||
SELECT DISTINCT er.error_id, ser.status, ser.parent_error_id, ser.payload,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_favorite_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s), FALSE) AS favorite,
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_errors AS ve
|
||||
WHERE er.error_id = ve.error_id
|
||||
AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed
|
||||
{query_part};""",
|
||||
generic_args)
|
||||
|
||||
elif count_only:
|
||||
main_query = cur.mogrify(f"""\
|
||||
SELECT COUNT(DISTINCT s.session_id) AS count_sessions, COUNT(DISTINCT s.user_uuid) AS count_users
|
||||
{query_part};""",
|
||||
generic_args)
|
||||
else:
|
||||
main_query = cur.mogrify(f"""\
|
||||
SELECT * FROM
|
||||
(SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS}
|
||||
{query_part}
|
||||
ORDER BY s.session_id desc) AS filtred_sessions
|
||||
ORDER BY favorite DESC, issue_score DESC, {sort} {order};""",
|
||||
generic_args)
|
||||
|
||||
# print("--------------------")
|
||||
# print(main_query)
|
||||
|
||||
cur.execute(main_query)
|
||||
|
||||
if count_only:
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
sessions = []
|
||||
total = cur.rowcount
|
||||
row = cur.fetchone()
|
||||
limit = 200
|
||||
while row is not None and len(sessions) < limit:
|
||||
if row.get("favorite"):
|
||||
limit += 1
|
||||
sessions.append(row)
|
||||
row = cur.fetchone()
|
||||
|
||||
if errors_only:
|
||||
return sessions
|
||||
if data.get("sort") is not None and data["sort"] != "session_id":
|
||||
sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data["sort"])],
|
||||
reverse=data.get("order", "DESC").upper() == "DESC")
|
||||
return {
|
||||
'total': total,
|
||||
'sessions': helper.list_to_camel_case(sessions)
|
||||
}
|
||||
|
||||
|
||||
def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None):
|
||||
if project_id is None:
|
||||
all_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False)
|
||||
else:
|
||||
all_projects = [
|
||||
projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False,
|
||||
include_gdpr=False)]
|
||||
|
||||
all_projects = {int(p["projectId"]): p["name"] for p in all_projects}
|
||||
project_ids = list(all_projects.keys())
|
||||
|
||||
available_keys = metadata.get_keys_by_projects(project_ids)
|
||||
for i in available_keys:
|
||||
available_keys[i]["user_id"] = sessions_metas.meta_type.USERID
|
||||
available_keys[i]["user_anonymous_id"] = sessions_metas.meta_type.USERANONYMOUSID
|
||||
results = {}
|
||||
for i in project_ids:
|
||||
if m_key not in available_keys[i].values():
|
||||
available_keys.pop(i)
|
||||
results[i] = {"total": 0, "sessions": [], "missingMetadata": True}
|
||||
project_ids = list(available_keys.keys())
|
||||
if len(project_ids) > 0:
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_queries = []
|
||||
for i in project_ids:
|
||||
col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)]
|
||||
sub_queries.append(cur.mogrify(
|
||||
f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"",
|
||||
{"id": i, "value": m_value}).decode('UTF-8'))
|
||||
query = f"""SELECT {", ".join(sub_queries)};"""
|
||||
cur.execute(query=query)
|
||||
|
||||
rows = cur.fetchone()
|
||||
|
||||
sub_queries = []
|
||||
for i in rows.keys():
|
||||
results[i] = {"total": rows[i], "sessions": [], "missingMetadata": False, "name": all_projects[int(i)]}
|
||||
if rows[i] > 0:
|
||||
col_name = list(available_keys[int(i)].keys())[list(available_keys[int(i)].values()).index(m_key)]
|
||||
sub_queries.append(
|
||||
cur.mogrify(
|
||||
f"""(
|
||||
SELECT *
|
||||
FROM (
|
||||
SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS}
|
||||
FROM public.sessions AS s LEFT JOIN (SELECT session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE user_favorite_sessions.user_id = %(userId)s
|
||||
) AS favorite_sessions USING (session_id)
|
||||
WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s
|
||||
) AS full_sessions
|
||||
ORDER BY favorite DESC, issue_score DESC
|
||||
LIMIT 10
|
||||
)""",
|
||||
{"id": i, "value": m_value, "userId": user_id}).decode('UTF-8'))
|
||||
if len(sub_queries) > 0:
|
||||
cur.execute("\nUNION\n".join(sub_queries))
|
||||
rows = cur.fetchall()
|
||||
for i in rows:
|
||||
results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i))
|
||||
return results
|
||||
|
||||
|
||||
def search_by_issue(user_id, issue, project_id, start_date, end_date):
|
||||
constraints = ["s.project_id = %(projectId)s",
|
||||
"p_issues.context_string = %(issueContextString)s",
|
||||
"p_issues.type = %(issueType)s"]
|
||||
if start_date is not None:
|
||||
constraints.append("start_ts >= %(startDate)s")
|
||||
if end_date is not None:
|
||||
constraints.append("start_ts <= %(endDate)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS}
|
||||
FROM public.sessions AS s
|
||||
INNER JOIN events_common.issues USING (session_id)
|
||||
INNER JOIN public.issues AS p_issues USING (issue_id)
|
||||
LEFT JOIN (SELECT user_id, session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE user_id = %(userId)s) AS favorite_sessions
|
||||
USING (session_id)
|
||||
WHERE {" AND ".join(constraints)}
|
||||
ORDER BY s.session_id DESC;""",
|
||||
{
|
||||
"issueContextString": issue["contextString"],
|
||||
"issueType": issue["type"], "userId": user_id,
|
||||
"projectId": project_id,
|
||||
"startDate": start_date,
|
||||
"endDate": end_date
|
||||
}))
|
||||
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_favorite_sessions(project_id, user_id, include_viewed=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query_part = cur.mogrify(f"""\
|
||||
FROM public.sessions AS s
|
||||
LEFT JOIN public.user_favorite_sessions AS fs ON fs.session_id = s.session_id
|
||||
WHERE fs.user_id = %(userId)s""",
|
||||
{"projectId": project_id, "userId": user_id}
|
||||
)
|
||||
|
||||
extra_query = b""
|
||||
if include_viewed:
|
||||
extra_query = cur.mogrify(""",\
|
||||
COALESCE((SELECT TRUE
|
||||
FROM public.user_viewed_sessions AS fs
|
||||
WHERE s.session_id = fs.session_id
|
||||
AND fs.user_id = %(userId)s), FALSE) AS viewed""",
|
||||
{"projectId": project_id, "userId": user_id})
|
||||
|
||||
cur.execute(f"""\
|
||||
SELECT s.project_id,
|
||||
s.session_id::text AS session_id,
|
||||
s.user_uuid,
|
||||
s.user_id,
|
||||
s.user_agent,
|
||||
s.user_os,
|
||||
s.user_browser,
|
||||
s.user_device,
|
||||
s.user_country,
|
||||
s.start_ts,
|
||||
s.duration,
|
||||
s.events_count,
|
||||
s.pages_count,
|
||||
s.errors_count,
|
||||
TRUE AS favorite
|
||||
{extra_query.decode('UTF-8')}
|
||||
{query_part.decode('UTF-8')}
|
||||
ORDER BY s.session_id
|
||||
LIMIT 50;""")
|
||||
|
||||
sessions = cur.fetchall()
|
||||
return helper.list_to_camel_case(sessions)
|
||||
158
api/chalicelib/core/sessions_assignments.py
Normal file
|
|
@ -0,0 +1,158 @@
|
|||
from chalicelib.utils.helper import environ as env
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.core import integrations_manager, integration_base_issue
|
||||
import json
|
||||
|
||||
|
||||
def __get_saved_data(project_id, session_id, issue_id, tool):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
SELECT *
|
||||
FROM public.assigned_sessions
|
||||
WHERE
|
||||
session_id = %(session_id)s
|
||||
AND issue_id = %(issue_id)s
|
||||
AND provider = %(provider)s;\
|
||||
""",
|
||||
{"session_id": session_id, "issue_id": issue_id, "provider": tool.lower()})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def create_new_assignment(tenant_id, project_id, session_id, creator_id, assignee, description, title, issue_type, integration_project_id):
|
||||
error, integration = integrations_manager.get_integration(tenant_id=tenant_id, user_id=creator_id)
|
||||
if error is not None:
|
||||
return error
|
||||
|
||||
i = integration.get()
|
||||
|
||||
if i is None:
|
||||
return {"errors": [f"integration not found"]}
|
||||
link = env["SITE_URL"] + f"/{project_id}/session/{session_id}"
|
||||
description += f"\n> {link}"
|
||||
try:
|
||||
issue = integration.issue_handler.create_new_assignment(title=title, assignee=assignee, description=description,
|
||||
issue_type=issue_type,
|
||||
integration_project_id=integration_project_id)
|
||||
except integration_base_issue.RequestException as e:
|
||||
return integration_base_issue.proxy_issues_handler(e)
|
||||
if issue is not None and "id" not in issue:
|
||||
return {"errors": ["something went wrong while creating the issue"]}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""\
|
||||
INSERT INTO public.assigned_sessions(session_id, issue_id, created_by, provider,provider_data)
|
||||
VALUES (%(session_id)s, %(issue_id)s, %(creator_id)s, %(provider)s,%(provider_data)s);\
|
||||
""",
|
||||
{"session_id": session_id, "creator_id": creator_id,
|
||||
"issue_id": issue["id"], "provider": integration.provider.lower(),
|
||||
"provider_data": json.dumps({"integrationProjectId": integration_project_id})})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
issue["provider"] = integration.provider.lower()
|
||||
return issue
|
||||
|
||||
|
||||
def get_all(project_id, user_id):
|
||||
available_integrations = integrations_manager.get_available_integrations(user_id=user_id)
|
||||
no_integration = not any(available_integrations.values())
|
||||
if no_integration:
|
||||
return []
|
||||
all_integrations = all(available_integrations.values())
|
||||
extra_query = ["sessions.project_id = %(project_id)s"]
|
||||
if not all_integrations:
|
||||
extra_query.append("provider IN %(providers)s")
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
SELECT assigned_sessions.*
|
||||
FROM public.assigned_sessions
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE {" AND ".join(extra_query)};\
|
||||
""",
|
||||
{"project_id": project_id,
|
||||
"providers": tuple(d for d in available_integrations if available_integrations[d])})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
assignments = helper.list_to_camel_case(cur.fetchall())
|
||||
for a in assignments:
|
||||
a["createdAt"] = TimeUTC.datetime_to_timestamp(a["createdAt"])
|
||||
return assignments
|
||||
|
||||
|
||||
def get_by_session(tenant_id, user_id, project_id, session_id):
|
||||
available_integrations = integrations_manager.get_available_integrations(user_id=user_id)
|
||||
if not any(available_integrations.values()):
|
||||
return []
|
||||
extra_query = ["session_id = %(session_id)s", "provider IN %(providers)s"]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
SELECT *
|
||||
FROM public.assigned_sessions
|
||||
WHERE {" AND ".join(extra_query)};""",
|
||||
{"session_id": session_id,
|
||||
"providers": tuple([k for k in available_integrations if available_integrations[k]])})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
results = cur.fetchall()
|
||||
issues = {}
|
||||
for i in results:
|
||||
if i["provider"] not in issues.keys():
|
||||
issues[i["provider"]] = []
|
||||
|
||||
issues[i["provider"]].append({"integrationProjectId": i["provider_data"]["integrationProjectId"],
|
||||
"id": i["issue_id"]})
|
||||
results = []
|
||||
for tool in issues.keys():
|
||||
error, integration = integrations_manager.get_integration(tool=tool, tenant_id=tenant_id, user_id=user_id)
|
||||
if error is not None:
|
||||
return error
|
||||
|
||||
i = integration.get()
|
||||
if i is None:
|
||||
print("integration not found")
|
||||
continue
|
||||
|
||||
r = integration.issue_handler.get_by_ids(saved_issues=issues[tool])
|
||||
print(r)
|
||||
for i in r["issues"]:
|
||||
i["provider"] = tool
|
||||
results += r["issues"]
|
||||
return results
|
||||
|
||||
|
||||
def get(tenant_id, user_id, project_id, session_id, assignment_id):
|
||||
error, integration = integrations_manager.get_integration(tenant_id=tenant_id, user_id=user_id)
|
||||
if error is not None:
|
||||
return error
|
||||
l = __get_saved_data(project_id, session_id, assignment_id, tool=integration.provider)
|
||||
if l is None:
|
||||
return {"errors": ["issue not found"]}
|
||||
i = integration.get()
|
||||
if i is None:
|
||||
return {"errors": ["integration not found"]}
|
||||
r = integration.issue_handler.get(integration_project_id=l["providerData"]["integrationProjectId"],
|
||||
assignment_id=assignment_id)
|
||||
|
||||
r["provider"] = integration.provider.lower()
|
||||
return r
|
||||
|
||||
|
||||
def comment(tenant_id, user_id, project_id, session_id, assignment_id, message):
|
||||
error, integration = integrations_manager.get_integration(tenant_id=tenant_id, user_id=user_id)
|
||||
if error is not None:
|
||||
return error
|
||||
i = integration.get()
|
||||
|
||||
if i is None:
|
||||
return {"errors": [f"integration not found"]}
|
||||
l = __get_saved_data(project_id, session_id, assignment_id, tool=integration.provider)
|
||||
|
||||
return integration.issue_handler.comment(integration_project_id=l["providerData"]["integrationProjectId"],
|
||||
assignment_id=assignment_id,
|
||||
comment=message)
|
||||
89
api/chalicelib/core/sessions_favorite_viewed.py
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
from chalicelib.utils import pg_client
|
||||
from chalicelib.core import sessions
|
||||
|
||||
|
||||
def add_favorite_session(project_id, user_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
INSERT INTO public.user_favorite_sessions
|
||||
(user_id, session_id)
|
||||
VALUES
|
||||
(%(userId)s,%(sessionId)s);""",
|
||||
{"userId": user_id, "sessionId": session_id})
|
||||
)
|
||||
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
|
||||
include_fav_viewed=True)
|
||||
|
||||
|
||||
def remove_favorite_session(project_id, user_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
DELETE FROM public.user_favorite_sessions
|
||||
WHERE
|
||||
user_id = %(userId)s
|
||||
AND session_id = %(sessionId)s;""",
|
||||
{"userId": user_id, "sessionId": session_id})
|
||||
)
|
||||
return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, user_id=user_id, full_data=False,
|
||||
include_fav_viewed=True)
|
||||
|
||||
|
||||
def add_viewed_session(project_id, user_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
INSERT INTO public.user_viewed_sessions
|
||||
(user_id, session_id)
|
||||
VALUES
|
||||
(%(userId)s,%(sessionId)s);""",
|
||||
{"userId": user_id, "sessionId": session_id})
|
||||
)
|
||||
|
||||
|
||||
def favorite_session(project_id, user_id, session_id):
|
||||
if favorite_session_exists(user_id=user_id, session_id=session_id):
|
||||
return remove_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
|
||||
|
||||
return add_favorite_session(project_id=project_id, user_id=user_id, session_id=session_id)
|
||||
|
||||
|
||||
def view_session(project_id, user_id, session_id):
|
||||
if viewed_session_exists(user_id=user_id, session_id=session_id):
|
||||
return None
|
||||
return add_viewed_session(project_id=project_id, user_id=user_id, session_id=session_id)
|
||||
|
||||
|
||||
def favorite_session_exists(user_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT
|
||||
session_id
|
||||
FROM public.user_favorite_sessions
|
||||
WHERE
|
||||
user_id = %(userId)s
|
||||
AND session_id = %(sessionId)s""",
|
||||
{"userId": user_id, "sessionId": session_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return r is not None
|
||||
|
||||
|
||||
def viewed_session_exists(user_id, session_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
"""SELECT
|
||||
session_id
|
||||
FROM public.user_viewed_sessions
|
||||
WHERE
|
||||
user_id = %(userId)s
|
||||
AND session_id = %(sessionId)s""",
|
||||
{"userId": user_id, "sessionId": session_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
if r:
|
||||
return True
|
||||
return False
|
||||
226
api/chalicelib/core/sessions_metas.py
Normal file
|
|
@ -0,0 +1,226 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.event_filter_definition import SupportedFilter
|
||||
|
||||
|
||||
def get_key_values(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
SELECT ARRAY_AGG(DISTINCT s.user_os
|
||||
ORDER BY s.user_os) FILTER ( WHERE s.user_os IS NOT NULL AND s.platform='web') AS {meta_type.USEROS},
|
||||
ARRAY_AGG(DISTINCT s.user_browser
|
||||
ORDER BY s.user_browser)
|
||||
FILTER ( WHERE s.user_browser IS NOT NULL AND s.platform='web') AS {meta_type.USERBROWSER},
|
||||
ARRAY_AGG(DISTINCT s.user_device
|
||||
ORDER BY s.user_device)
|
||||
FILTER ( WHERE s.user_device IS NOT NULL AND s.user_device != '' AND s.platform='web') AS {meta_type.USERDEVICE},
|
||||
ARRAY_AGG(DISTINCT s.user_country
|
||||
ORDER BY s.user_country)
|
||||
FILTER ( WHERE s.user_country IS NOT NULL AND s.platform='web')::text[] AS {meta_type.USERCOUNTRY},
|
||||
ARRAY_AGG(DISTINCT s.user_id
|
||||
ORDER BY s.user_id) FILTER ( WHERE s.user_id IS NOT NULL AND s.user_id != 'none' AND s.user_id != '' AND s.platform='web') AS {meta_type.USERID},
|
||||
ARRAY_AGG(DISTINCT s.user_anonymous_id
|
||||
ORDER BY s.user_anonymous_id) FILTER ( WHERE s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != 'none' AND s.user_anonymous_id != '' AND s.platform='web') AS {meta_type.USERANONYMOUSID},
|
||||
ARRAY_AGG(DISTINCT s.rev_id
|
||||
ORDER BY s.rev_id) FILTER ( WHERE s.rev_id IS NOT NULL AND s.platform='web') AS {meta_type.REVID},
|
||||
ARRAY_AGG(DISTINCT p.referrer
|
||||
ORDER BY p.referrer)
|
||||
FILTER ( WHERE p.referrer != '' ) AS {meta_type.REFERRER},
|
||||
|
||||
ARRAY_AGG(DISTINCT s.user_os
|
||||
ORDER BY s.user_os) FILTER ( WHERE s.user_os IS NOT NULL AND s.platform='ios' ) AS {meta_type.USEROS_IOS},
|
||||
ARRAY_AGG(DISTINCT s.user_device
|
||||
ORDER BY s.user_device)
|
||||
FILTER ( WHERE s.user_device IS NOT NULL AND s.user_device != '' AND s.platform='ios') AS {meta_type.USERDEVICE},
|
||||
ARRAY_AGG(DISTINCT s.user_country
|
||||
ORDER BY s.user_country)
|
||||
FILTER ( WHERE s.user_country IS NOT NULL AND s.platform='ios')::text[] AS {meta_type.USERCOUNTRY_IOS},
|
||||
ARRAY_AGG(DISTINCT s.user_id
|
||||
ORDER BY s.user_id) FILTER ( WHERE s.user_id IS NOT NULL AND s.user_id != 'none' AND s.user_id != '' AND s.platform='ios') AS {meta_type.USERID_IOS},
|
||||
ARRAY_AGG(DISTINCT s.user_anonymous_id
|
||||
ORDER BY s.user_anonymous_id) FILTER ( WHERE s.user_anonymous_id IS NOT NULL AND s.user_anonymous_id != 'none' AND s.user_anonymous_id != '' AND s.platform='ios') AS {meta_type.USERANONYMOUSID_IOS},
|
||||
ARRAY_AGG(DISTINCT s.rev_id
|
||||
ORDER BY s.rev_id) FILTER ( WHERE s.rev_id IS NOT NULL AND s.platform='ios') AS {meta_type.REVID_IOS}
|
||||
FROM public.sessions AS s
|
||||
LEFT JOIN events.pages AS p USING (session_id)
|
||||
WHERE s.project_id = %(site_id)s;""",
|
||||
{"site_id": project_id}
|
||||
)
|
||||
)
|
||||
|
||||
row = cur.fetchone()
|
||||
for k in row.keys():
|
||||
if row[k] is None:
|
||||
row[k] = []
|
||||
elif len(row[k]) > 500:
|
||||
row[k] = row[k][:500]
|
||||
return helper.dict_to_CAPITAL_keys(row)
|
||||
|
||||
|
||||
def get_top_key_values(project_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""\
|
||||
SELECT {",".join([f"ARRAY((SELECT value FROM public.autocomplete WHERE project_id = %(site_id)s AND type='{k}' GROUP BY value ORDER BY COUNT(*) DESC LIMIT %(limit)s)) AS {k}" for k in SUPPORTED_TYPES.keys()])};""",
|
||||
{"site_id": project_id, "limit": 5}
|
||||
)
|
||||
)
|
||||
|
||||
row = cur.fetchone()
|
||||
return helper.dict_to_CAPITAL_keys(row)
|
||||
|
||||
|
||||
def __generic_query(typename):
|
||||
return f"""\
|
||||
SELECT value, type
|
||||
FROM ((SELECT value, type
|
||||
FROM public.autocomplete
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND type ='{typename}'
|
||||
AND value ILIKE %(svalue)s
|
||||
ORDER BY value
|
||||
LIMIT 5)
|
||||
UNION
|
||||
(SELECT value, type
|
||||
FROM public.autocomplete
|
||||
WHERE
|
||||
project_id = %(project_id)s
|
||||
AND type ='{typename}'
|
||||
AND value ILIKE %(value)s
|
||||
ORDER BY value
|
||||
LIMIT 5)) AS met"""
|
||||
|
||||
|
||||
def __generic_autocomplete(typename):
|
||||
def f(project_id, text):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(__generic_query(typename),
|
||||
{"project_id": project_id, "value": helper.string_to_sql_like(text),
|
||||
"svalue": helper.string_to_sql_like("^" + text)})
|
||||
|
||||
cur.execute(query)
|
||||
rows = cur.fetchall()
|
||||
return rows
|
||||
|
||||
return f
|
||||
|
||||
|
||||
class meta_type:
|
||||
USEROS = "USEROS"
|
||||
USERBROWSER = "USERBROWSER"
|
||||
USERDEVICE = "USERDEVICE"
|
||||
USERCOUNTRY = "USERCOUNTRY"
|
||||
USERID = "USERID"
|
||||
USERANONYMOUSID = "USERANONYMOUSID"
|
||||
REFERRER = "REFERRER"
|
||||
REVID = "REVID"
|
||||
# IOS
|
||||
USEROS_IOS = "USEROS_IOS"
|
||||
USERDEVICE_IOS = "USERDEVICE_IOS"
|
||||
USERCOUNTRY_IOS = "USERCOUNTRY_IOS"
|
||||
USERID_IOS = "USERID_IOS"
|
||||
USERANONYMOUSID_IOS = "USERANONYMOUSID_IOS"
|
||||
REVID_IOS = "REVID_IOS"
|
||||
|
||||
|
||||
SUPPORTED_TYPES = {
|
||||
meta_type.USEROS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USEROS),
|
||||
query=__generic_query(typename=meta_type.USEROS),
|
||||
value_limit=0,
|
||||
starts_with="",
|
||||
starts_limit=0,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.USERBROWSER: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERBROWSER),
|
||||
query=__generic_query(typename=meta_type.USERBROWSER),
|
||||
value_limit=0,
|
||||
starts_with="",
|
||||
starts_limit=0,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.USERDEVICE: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERDEVICE),
|
||||
query=__generic_query(typename=meta_type.USERDEVICE),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.USERCOUNTRY: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERCOUNTRY),
|
||||
query=__generic_query(typename=meta_type.USERCOUNTRY),
|
||||
value_limit=2,
|
||||
starts_with="",
|
||||
starts_limit=2,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.USERID: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERID),
|
||||
query=__generic_query(typename=meta_type.USERID),
|
||||
value_limit=2,
|
||||
starts_with="",
|
||||
starts_limit=2,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.USERANONYMOUSID: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERANONYMOUSID),
|
||||
query=__generic_query(typename=meta_type.USERANONYMOUSID),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.REVID: SupportedFilter(get=__generic_autocomplete(typename=meta_type.REVID),
|
||||
query=__generic_query(typename=meta_type.REVID),
|
||||
value_limit=0,
|
||||
starts_with="",
|
||||
starts_limit=0,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.REFERRER: SupportedFilter(get=__generic_autocomplete(typename=meta_type.REFERRER),
|
||||
query=__generic_query(typename=meta_type.REFERRER),
|
||||
value_limit=5,
|
||||
starts_with="/",
|
||||
starts_limit=5,
|
||||
ignore_if_starts_with=[]),
|
||||
# IOS
|
||||
meta_type.USEROS_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USEROS_IOS),
|
||||
query=__generic_query(typename=meta_type.USEROS_IOS),
|
||||
value_limit=0,
|
||||
starts_with="",
|
||||
starts_limit=0,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.USERDEVICE_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERDEVICE_IOS),
|
||||
query=__generic_query(typename=meta_type.USERDEVICE_IOS),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.USERCOUNTRY_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERCOUNTRY_IOS),
|
||||
query=__generic_query(typename=meta_type.USERCOUNTRY_IOS),
|
||||
value_limit=2,
|
||||
starts_with="",
|
||||
starts_limit=2,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.USERID_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERID_IOS),
|
||||
query=__generic_query(typename=meta_type.USERID_IOS),
|
||||
value_limit=2,
|
||||
starts_with="",
|
||||
starts_limit=2,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.USERANONYMOUSID_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.USERANONYMOUSID_IOS),
|
||||
query=__generic_query(typename=meta_type.USERANONYMOUSID_IOS),
|
||||
value_limit=3,
|
||||
starts_with="",
|
||||
starts_limit=3,
|
||||
ignore_if_starts_with=["/"]),
|
||||
meta_type.REVID_IOS: SupportedFilter(get=__generic_autocomplete(typename=meta_type.REVID_IOS),
|
||||
query=__generic_query(typename=meta_type.REVID_IOS),
|
||||
value_limit=0,
|
||||
starts_with="",
|
||||
starts_limit=0,
|
||||
ignore_if_starts_with=["/"]),
|
||||
|
||||
}
|
||||
|
||||
|
||||
def search(text, meta_type, project_id):
|
||||
rows = []
|
||||
if meta_type.upper() not in list(SUPPORTED_TYPES.keys()):
|
||||
return {"errors": ["unsupported type"]}
|
||||
rows += SUPPORTED_TYPES[meta_type.upper()].get(project_id=project_id, text=text)
|
||||
if meta_type.upper() + "_IOS" in list(SUPPORTED_TYPES.keys()):
|
||||
rows += SUPPORTED_TYPES[meta_type.upper() + "_IOS"].get(project_id=project_id, text=text)
|
||||
return {"data": rows}
|
||||
25
api/chalicelib/core/sessions_mobs.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
from chalicelib.utils.helper import environ
|
||||
|
||||
import boto3
|
||||
|
||||
|
||||
def get_web(sessionId):
|
||||
return boto3.client('s3', region_name=environ["sessions_region"]).generate_presigned_url(
|
||||
'get_object',
|
||||
Params={
|
||||
'Bucket': environ["sessions_bucket"],
|
||||
'Key': sessionId
|
||||
},
|
||||
ExpiresIn=100000
|
||||
)
|
||||
|
||||
|
||||
def get_ios(sessionId):
|
||||
return boto3.client('s3', region_name=environ["ios_region"]).generate_presigned_url(
|
||||
'get_object',
|
||||
Params={
|
||||
'Bucket': environ["ios_bucket"],
|
||||
'Key': sessionId
|
||||
},
|
||||
ExpiresIn=100000
|
||||
)
|
||||
627
api/chalicelib/core/significance.py
Normal file
|
|
@ -0,0 +1,627 @@
|
|||
__author__ = "AZNAUROV David"
|
||||
__maintainer__ = "KRAIEM Taha Yassine"
|
||||
|
||||
from chalicelib.core import events, sessions_metas, metadata, sessions
|
||||
from chalicelib.utils import dev
|
||||
|
||||
"""
|
||||
todo: remove LIMIT from the query
|
||||
"""
|
||||
|
||||
from typing import List
|
||||
import math
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
|
||||
from psycopg2.extras import RealDictRow
|
||||
from chalicelib.utils import pg_client, helper
|
||||
|
||||
SIGNIFICANCE_THRSH = 0.4
|
||||
|
||||
T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.365, 8: 2.306, 9: 2.262, 10: 2.228,
|
||||
11: 2.201, 12: 2.179, 13: 2.160, 14: 2.145, 15: 2.13, 16: 2.120, 17: 2.110, 18: 2.101, 19: 2.093, 20: 2.086,
|
||||
21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042}
|
||||
|
||||
|
||||
@dev.timed
|
||||
def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]:
|
||||
"""
|
||||
Add minimal timestamp
|
||||
:param filter_d: dict contains events&filters&...
|
||||
:return:
|
||||
"""
|
||||
stages = filter_d["events"]
|
||||
filters = filter_d.get("filters", [])
|
||||
filter_issues = filter_d.get("issueTypes")
|
||||
if filter_issues is None or len(filter_issues) == 0:
|
||||
filter_issues = []
|
||||
stage_constraints = ["main.timestamp <= %(endTimestamp)s"]
|
||||
first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s",
|
||||
"s.start_ts <= %(endTimestamp)s"]
|
||||
extra_from = ""
|
||||
n_stages_query = []
|
||||
values = {}
|
||||
if len(filters) > 0:
|
||||
meta_keys = metadata.get(project_id=project_id)
|
||||
meta_keys = {m["key"]: m["index"] for m in meta_keys}
|
||||
for i, f in enumerate(filters):
|
||||
if not isinstance(f.get("value"), list):
|
||||
if isinstance(f.get("value"), tuple):
|
||||
f["value"] = list(f.get("value"))
|
||||
else:
|
||||
f["value"] = [f.get("value")]
|
||||
if len(f["value"]) == 0 or f["value"][0] is None:
|
||||
continue
|
||||
filter_type = f["type"].upper()
|
||||
values[f"f_value_{i}"] = sessions.__get_sql_value_multiple(f["value"])
|
||||
if filter_type == sessions_metas.meta_type.USERBROWSER:
|
||||
op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(f's.user_browser {op} %({f"f_value_{i}"})s')
|
||||
|
||||
elif filter_type in [sessions_metas.meta_type.USEROS, sessions_metas.meta_type.USEROS_IOS]:
|
||||
op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(f's.user_os {op} %({f"f_value_{i}"})s')
|
||||
|
||||
elif filter_type in [sessions_metas.meta_type.USERDEVICE, sessions_metas.meta_type.USERDEVICE_IOS]:
|
||||
op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(f's.user_device {op} %({f"f_value_{i}"})s')
|
||||
|
||||
elif filter_type in [sessions_metas.meta_type.USERCOUNTRY, sessions_metas.meta_type.USERCOUNTRY_IOS]:
|
||||
op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(f's.user_country {op} %({f"f_value_{i}"})s')
|
||||
elif filter_type == "duration".upper():
|
||||
if len(f["value"]) > 0 and f["value"][0] is not None:
|
||||
first_stage_extra_constraints.append(f's.duration >= %({f"f_value_{i}"})s')
|
||||
values[f"f_value_{i}"] = f["value"][0]
|
||||
if len(f["value"]) > 1 and f["value"][1] is not None and f["value"][1] > 0:
|
||||
first_stage_extra_constraints.append('s.duration <= %({f"f_value_{i}"})s')
|
||||
values[f"f_value_{i}"] = f["value"][1]
|
||||
elif filter_type == sessions_metas.meta_type.REFERRER:
|
||||
# events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)"
|
||||
extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)"
|
||||
op = sessions.__get_sql_operator_multiple(f["operator"])
|
||||
first_stage_extra_constraints.append(f"p.base_referrer {op} %(referrer)s")
|
||||
elif filter_type == events.event_type.METADATA.ui_type:
|
||||
op = sessions.__get_sql_operator(f["operator"])
|
||||
if f.get("key") in meta_keys.keys():
|
||||
first_stage_extra_constraints.append(
|
||||
f's.{metadata.index_to_colname(meta_keys[f["key"]])} {op} %({f"f_value_{i}"})s')
|
||||
values[f"f_value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]:
|
||||
op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(f's.user_id {op} %({f"f_value_{i}"})s')
|
||||
values[f"f_value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [sessions_metas.meta_type.USERANONYMOUSID,
|
||||
sessions_metas.meta_type.USERANONYMOUSID_IOS]:
|
||||
op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(f's.user_anonymous_id {op} %({f"f_value_{i}"})s')
|
||||
values[f"f_value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
elif filter_type in [sessions_metas.meta_type.REVID, sessions_metas.meta_type.REVID_IOS]:
|
||||
op = sessions.__get_sql_operator(f["operator"])
|
||||
first_stage_extra_constraints.append(f's.rev_id {op} %({f"f_value_{i}"})s')
|
||||
values[f"f_value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
|
||||
|
||||
for i, s in enumerate(stages):
|
||||
if i == 0:
|
||||
extra_from = ["INNER JOIN public.sessions AS s USING (session_id)"]
|
||||
else:
|
||||
extra_from = []
|
||||
if s.get("operator") is None:
|
||||
s["operator"] = "is"
|
||||
op = sessions.__get_sql_operator(s["operator"])
|
||||
event_type = s["type"].upper()
|
||||
next_label = s["value"]
|
||||
if event_type == events.event_type.CLICK.ui_type:
|
||||
next_table = events.event_type.CLICK.table
|
||||
next_col_name = events.event_type.CLICK.column
|
||||
elif event_type == events.event_type.INPUT.ui_type:
|
||||
next_table = events.event_type.INPUT.table
|
||||
next_col_name = events.event_type.INPUT.column
|
||||
elif event_type == events.event_type.LOCATION.ui_type:
|
||||
next_table = events.event_type.LOCATION.table
|
||||
next_col_name = events.event_type.LOCATION.column
|
||||
elif event_type == events.event_type.CUSTOM.ui_type:
|
||||
next_table = events.event_type.CUSTOM.table
|
||||
next_col_name = events.event_type.CUSTOM.column
|
||||
# IOS --------------
|
||||
elif event_type == events.event_type.CLICK_IOS.ui_type:
|
||||
next_table = events.event_type.CLICK_IOS.table
|
||||
next_col_name = events.event_type.CLICK_IOS.column
|
||||
elif event_type == events.event_type.INPUT_IOS.ui_type:
|
||||
next_table = events.event_type.INPUT_IOS.table
|
||||
next_col_name = events.event_type.INPUT_IOS.column
|
||||
elif event_type == events.event_type.VIEW_IOS.ui_type:
|
||||
next_table = events.event_type.VIEW_IOS.table
|
||||
next_col_name = events.event_type.VIEW_IOS.column
|
||||
elif event_type == events.event_type.CUSTOM_IOS.ui_type:
|
||||
next_table = events.event_type.CUSTOM_IOS.table
|
||||
next_col_name = events.event_type.CUSTOM_IOS.column
|
||||
else:
|
||||
print("=================UNDEFINED")
|
||||
continue
|
||||
|
||||
values[f"value{i + 1}"] = helper.string_to_sql_like_with_op(next_label, op)
|
||||
if sessions.__is_negation_operator(op) and i > 0:
|
||||
op = sessions.__reverse_sql_operator(op)
|
||||
main_condition = "left_not.session_id ISNULL"
|
||||
extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id
|
||||
FROM {next_table} AS s_main
|
||||
WHERE s_main.{next_col_name} {op} %(value{i + 1})s
|
||||
AND s_main.timestamp >= T{i}.stage{i}_timestamp
|
||||
AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""")
|
||||
else:
|
||||
main_condition = f"""main.{next_col_name} {op} %(value{i + 1})s"""
|
||||
n_stages_query.append(f"""
|
||||
(SELECT main.session_id,
|
||||
{"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp,
|
||||
'{event_type}' AS type,
|
||||
'{s["operator"]}' AS operator
|
||||
FROM {next_table} AS main {" ".join(extra_from)}
|
||||
WHERE main.timestamp >= {f"T{i}.stage{i}_timestamp" if i > 0 else "%(startTimestamp)s"}
|
||||
{f"AND main.session_id=T1.session_id" if i > 0 else ""}
|
||||
AND {main_condition}
|
||||
{(" AND " + " AND ".join(stage_constraints)) if len(stage_constraints) > 0 else ""}
|
||||
{(" AND " + " AND ".join(first_stage_extra_constraints)) if len(first_stage_extra_constraints) > 0 and i == 0 else ""}
|
||||
GROUP BY main.session_id)
|
||||
AS T{i + 1} {"USING (session_id)" if i > 0 else ""}
|
||||
""")
|
||||
if len(n_stages_query) == 0:
|
||||
return []
|
||||
n_stages_query = " LEFT JOIN LATERAL ".join(n_stages_query)
|
||||
n_stages_query += ") AS stages_t"
|
||||
|
||||
n_stages_query = f"""
|
||||
SELECT stages_and_issues_t.*,sessions.session_id, sessions.user_uuid FROM (
|
||||
SELECT * FROM (
|
||||
SELECT * FROM
|
||||
{n_stages_query}
|
||||
LEFT JOIN LATERAL
|
||||
(
|
||||
SELECT * FROM
|
||||
(SELECT ISE.session_id,
|
||||
ISS.type as issue_type,
|
||||
ISE.timestamp AS issue_timestamp,
|
||||
ISS.context_string as issue_context,
|
||||
ISS.issue_id as issue_id
|
||||
FROM events_common.issues AS ISE INNER JOIN issues AS ISS USING (issue_id)
|
||||
WHERE ISE.timestamp >= stages_t.stage1_timestamp
|
||||
AND ISE.timestamp <= stages_t.stage{len(stages)}_timestamp
|
||||
AND ISS.project_id=%(project_id)s
|
||||
{"AND ISS.type IN %(issueTypes)s" if len(filter_issues) > 0 else ""}) AS base_t
|
||||
) AS issues_t
|
||||
USING (session_id)) AS stages_and_issues_t
|
||||
inner join sessions USING(session_id);
|
||||
"""
|
||||
|
||||
# LIMIT 10000
|
||||
params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"],
|
||||
"issueTypes": tuple(filter_issues), **values}
|
||||
with pg_client.PostgresClient() as cur:
|
||||
# print("---------------------------------------------------")
|
||||
# print(cur.mogrify(n_stages_query, params))
|
||||
# print("---------------------------------------------------")
|
||||
cur.execute(cur.mogrify(n_stages_query, params))
|
||||
rows = cur.fetchall()
|
||||
return rows
|
||||
|
||||
|
||||
def pearson_corr(x: list, y: list):
|
||||
n = len(x)
|
||||
if n != len(y):
|
||||
raise ValueError(f'x and y must have the same length. Got {len(x)} and {len(y)} instead')
|
||||
|
||||
if n < 2:
|
||||
warnings.warn(f'x and y must have length at least 2. Got {n} instead')
|
||||
return None, None, False
|
||||
|
||||
# If an input is constant, the correlation coefficient is not defined.
|
||||
if all(t == x[0] for t in x) or all(t == y[0] for t in y):
|
||||
warnings.warn("An input array is constant; the correlation coefficent is not defined.")
|
||||
return None, None, False
|
||||
|
||||
if n == 2:
|
||||
return math.copysign(1, x[1] - x[0]) * math.copysign(1, y[1] - y[0]), 1.0
|
||||
|
||||
xmean = sum(x) / len(x)
|
||||
ymean = sum(y) / len(y)
|
||||
|
||||
xm = [el - xmean for el in x]
|
||||
ym = [el - ymean for el in y]
|
||||
|
||||
normxm = math.sqrt((sum([xm[i] * xm[i] for i in range(len(xm))])))
|
||||
normym = math.sqrt((sum([ym[i] * ym[i] for i in range(len(ym))])))
|
||||
|
||||
threshold = 1e-8
|
||||
if normxm < threshold * abs(xmean) or normym < threshold * abs(ymean):
|
||||
# If all the values in x (likewise y) are very close to the mean,
|
||||
# the loss of precision that occurs in the subtraction xm = x - xmean
|
||||
# might result in large errors in r.
|
||||
warnings.warn("An input array is constant; the correlation coefficent is not defined.")
|
||||
|
||||
r = sum(
|
||||
i[0] * i[1] for i in zip([xm[i] / normxm for i in range(len(xm))], [ym[i] / normym for i in range(len(ym))]))
|
||||
|
||||
# Presumably, if abs(r) > 1, then it is only some small artifact of floating point arithmetic.
|
||||
# However, if r < 0, we don't care, as our problem is to find only positive correlations
|
||||
r = max(min(r, 1.0), 0.0)
|
||||
|
||||
# approximated confidence
|
||||
if n < 31:
|
||||
t_c = T_VALUES[n]
|
||||
elif n < 50:
|
||||
t_c = 2.02
|
||||
else:
|
||||
t_c = 2
|
||||
if r >= 0.999:
|
||||
confidence = 1
|
||||
else:
|
||||
confidence = r * math.sqrt(n - 2) / math.sqrt(1 - r ** 2)
|
||||
|
||||
if confidence > SIGNIFICANCE_THRSH:
|
||||
return r, confidence, True
|
||||
else:
|
||||
return r, confidence, False
|
||||
|
||||
|
||||
@dev.timed
|
||||
def get_transitions_and_issues_of_each_type(rows: List[RealDictRow], all_issues_with_context, first_stage, last_stage):
|
||||
"""
|
||||
Returns two lists with binary values 0/1:
|
||||
|
||||
transitions ::: if transited from the first stage to the last - 1
|
||||
else - 0
|
||||
errors ::: a dictionary where the keys are all unique issues (currently context-wise)
|
||||
the values are lists
|
||||
if an issue happened between the first stage to the last - 1
|
||||
else - 0
|
||||
|
||||
For a small task of calculating a total drop due to issues,
|
||||
we need to disregard the issue type when creating the `errors`-like array.
|
||||
The `all_errors` array can be obtained by logical OR statement applied to all errors by issue
|
||||
The `transitions` array stays the same
|
||||
"""
|
||||
transitions = []
|
||||
n_sess_affected = 0
|
||||
errors = {}
|
||||
for issue in all_issues_with_context:
|
||||
split = issue.split('__^__')
|
||||
errors[issue] = {
|
||||
"errors": [],
|
||||
"issue_type": split[0],
|
||||
"context": split[1]}
|
||||
|
||||
for row in rows:
|
||||
t = 0
|
||||
first_ts = row[f'stage{first_stage}_timestamp']
|
||||
last_ts = row[f'stage{last_stage}_timestamp']
|
||||
if first_ts is None:
|
||||
continue
|
||||
elif first_ts is not None and last_ts is not None:
|
||||
t = 1
|
||||
transitions.append(t)
|
||||
|
||||
ic_present = False
|
||||
for issue_type_with_context in errors:
|
||||
ic = 0
|
||||
issue_type = errors[issue_type_with_context]["issue_type"]
|
||||
context = errors[issue_type_with_context]["context"]
|
||||
if row['issue_type'] is not None:
|
||||
if last_ts is None or (first_ts < row['issue_timestamp'] < last_ts):
|
||||
context_in_row = row['issue_context'] if row['issue_context'] is not None else ''
|
||||
if issue_type == row['issue_type'] and context == context_in_row:
|
||||
ic = 1
|
||||
ic_present = True
|
||||
errors[issue_type_with_context]["errors"].append(ic)
|
||||
|
||||
if ic_present and t:
|
||||
n_sess_affected += 1
|
||||
|
||||
# def tuple_or(t: tuple):
|
||||
# x = 0
|
||||
# for el in t:
|
||||
# x |= el
|
||||
# return x
|
||||
def tuple_or(t: tuple):
|
||||
for el in t:
|
||||
if el > 0:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
errors = {key: errors[key]["errors"] for key in errors}
|
||||
all_errors = [tuple_or(t) for t in zip(*errors.values())]
|
||||
|
||||
return transitions, errors, all_errors, n_sess_affected
|
||||
|
||||
|
||||
@dev.timed
|
||||
def get_affected_users_for_all_issues(rows, first_stage, last_stage):
|
||||
"""
|
||||
|
||||
:param rows:
|
||||
:param first_stage:
|
||||
:param last_stage:
|
||||
:return:
|
||||
"""
|
||||
affected_users = defaultdict(lambda: set())
|
||||
affected_sessions = defaultdict(lambda: set())
|
||||
contexts = defaultdict(lambda: None)
|
||||
n_affected_users_dict = defaultdict(lambda: None)
|
||||
n_affected_sessions_dict = defaultdict(lambda: None)
|
||||
all_issues_with_context = set()
|
||||
n_issues_dict = defaultdict(lambda: 0)
|
||||
issues_by_session = defaultdict(lambda: 0)
|
||||
|
||||
for row in rows:
|
||||
|
||||
# check that the session has reached the first stage of subfunnel:
|
||||
if row[f'stage{first_stage}_timestamp'] is None:
|
||||
continue
|
||||
|
||||
iss = row['issue_type']
|
||||
iss_ts = row['issue_timestamp']
|
||||
|
||||
# check that the issue exists and belongs to subfunnel:
|
||||
if iss is not None and (row[f'stage{last_stage}_timestamp'] is None or
|
||||
(row[f'stage{first_stage}_timestamp'] < iss_ts < row[f'stage{last_stage}_timestamp'])):
|
||||
context_string = row['issue_context'] if row['issue_context'] is not None else ''
|
||||
issue_with_context = iss + '__^__' + context_string
|
||||
contexts[issue_with_context] = {"context": context_string, "id": row["issue_id"]}
|
||||
all_issues_with_context.add(issue_with_context)
|
||||
n_issues_dict[issue_with_context] += 1
|
||||
if row['user_uuid'] is not None:
|
||||
affected_users[issue_with_context].add(row['user_uuid'])
|
||||
|
||||
affected_sessions[issue_with_context].add(row['session_id'])
|
||||
issues_by_session[row[f'session_id']] += 1
|
||||
|
||||
if len(affected_users) > 0:
|
||||
n_affected_users_dict.update({
|
||||
iss: len(affected_users[iss]) for iss in affected_users
|
||||
})
|
||||
if len(affected_sessions) > 0:
|
||||
n_affected_sessions_dict.update({
|
||||
iss: len(affected_sessions[iss]) for iss in affected_sessions
|
||||
})
|
||||
return all_issues_with_context, n_issues_dict, n_affected_users_dict, n_affected_sessions_dict, contexts
|
||||
|
||||
|
||||
@dev.timed
|
||||
def count_sessions(rows, n_stages):
|
||||
session_counts = {i: set() for i in range(1, n_stages + 1)}
|
||||
for ind, row in enumerate(rows):
|
||||
for i in range(1, n_stages + 1):
|
||||
if row[f"stage{i}_timestamp"] is not None:
|
||||
session_counts[i].add(row[f"session_id"])
|
||||
session_counts = {i: len(session_counts[i]) for i in session_counts}
|
||||
return session_counts
|
||||
|
||||
|
||||
def count_users(rows, n_stages):
|
||||
users_in_stages = defaultdict(lambda: set())
|
||||
|
||||
for ind, row in enumerate(rows):
|
||||
for i in range(1, n_stages + 1):
|
||||
if row[f"stage{i}_timestamp"] is not None:
|
||||
users_in_stages[i].add(row["user_uuid"])
|
||||
|
||||
users_count = {i: len(users_in_stages[i]) for i in range(1, n_stages + 1)}
|
||||
|
||||
return users_count
|
||||
|
||||
|
||||
def get_stages(stages, rows):
|
||||
n_stages = len(stages)
|
||||
session_counts = count_sessions(rows, n_stages)
|
||||
users_counts = count_users(rows, n_stages)
|
||||
|
||||
stages_list = []
|
||||
for i, stage in enumerate(stages):
|
||||
|
||||
drop = None
|
||||
if i != 0:
|
||||
if session_counts[i] == 0:
|
||||
drop = 0
|
||||
elif session_counts[i] > 0:
|
||||
drop = int(100 * (session_counts[i] - session_counts[i + 1]) / session_counts[i])
|
||||
|
||||
stages_list.append(
|
||||
{"value": stage["value"],
|
||||
"type": stage["type"],
|
||||
"operator": stage["operator"],
|
||||
"sessionsCount": session_counts[i + 1],
|
||||
"drop_pct": drop,
|
||||
"usersCount": users_counts[i + 1],
|
||||
"dropDueToIssues": 0
|
||||
}
|
||||
)
|
||||
return stages_list
|
||||
|
||||
|
||||
@dev.timed
|
||||
def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False):
|
||||
"""
|
||||
|
||||
:param stages:
|
||||
:param rows:
|
||||
:param first_stage: If it's a part of the initial funnel, provide a number of the first stage (starting from 1)
|
||||
:param last_stage: If it's a part of the initial funnel, provide a number of the last stage (starting from 1)
|
||||
:return:
|
||||
"""
|
||||
|
||||
n_stages = len(stages)
|
||||
|
||||
if first_stage is None:
|
||||
first_stage = 1
|
||||
if last_stage is None:
|
||||
last_stage = n_stages
|
||||
if last_stage > n_stages:
|
||||
print("The number of the last stage provided is greater than the number of stages. Using n_stages instead")
|
||||
last_stage = n_stages
|
||||
|
||||
n_critical_issues = 0
|
||||
issues_dict = dict({"significant": [],
|
||||
"insignificant": []})
|
||||
session_counts = count_sessions(rows, n_stages)
|
||||
drop = session_counts[first_stage] - session_counts[last_stage]
|
||||
|
||||
all_issues_with_context, n_issues_dict, affected_users_dict, affected_sessions, contexts = get_affected_users_for_all_issues(
|
||||
rows, first_stage, last_stage)
|
||||
transitions, errors, all_errors, n_sess_affected = get_transitions_and_issues_of_each_type(rows,
|
||||
all_issues_with_context,
|
||||
first_stage, last_stage)
|
||||
|
||||
print("len(transitions) =", len(transitions))
|
||||
|
||||
if any(all_errors):
|
||||
total_drop_corr, conf, is_sign = pearson_corr(transitions, all_errors)
|
||||
if total_drop_corr is not None and drop is not None:
|
||||
total_drop_due_to_issues = int(total_drop_corr * n_sess_affected)
|
||||
else:
|
||||
total_drop_due_to_issues = 0
|
||||
else:
|
||||
total_drop_due_to_issues = 0
|
||||
|
||||
if drop_only:
|
||||
return total_drop_due_to_issues
|
||||
for issue in all_issues_with_context:
|
||||
|
||||
if not any(errors[issue]):
|
||||
continue
|
||||
r, confidence, is_sign = pearson_corr(transitions, errors[issue])
|
||||
|
||||
if r is not None and drop is not None and is_sign:
|
||||
lost_conversions = int(r * affected_sessions[issue])
|
||||
else:
|
||||
lost_conversions = None
|
||||
if r is None:
|
||||
r = 0
|
||||
split = issue.split('__^__')
|
||||
issues_dict['significant' if is_sign else 'insignificant'].append({
|
||||
"type": split[0],
|
||||
"title": get_issue_title(split[0]),
|
||||
"affected_sessions": affected_sessions[issue],
|
||||
"unaffected_sessions": session_counts[1] - affected_sessions[issue],
|
||||
"lost_conversions": lost_conversions,
|
||||
"affected_users": affected_users_dict[issue],
|
||||
"conversion_impact": round(r * 100),
|
||||
"context_string": contexts[issue]["context"],
|
||||
"issue_id": contexts[issue]["id"]
|
||||
})
|
||||
|
||||
if is_sign:
|
||||
n_critical_issues += n_issues_dict[issue]
|
||||
|
||||
return n_critical_issues, issues_dict, total_drop_due_to_issues
|
||||
|
||||
|
||||
@dev.timed
|
||||
def get_top_insights(filter_d, project_id):
|
||||
output = []
|
||||
stages = filter_d["events"]
|
||||
# TODO: handle 1 stage alone
|
||||
if len(stages) == 0:
|
||||
print("no stages found")
|
||||
return output, 0
|
||||
elif len(stages) == 1:
|
||||
# TODO: count sessions, and users for single stage
|
||||
output = [{
|
||||
"type": stages[0]["type"],
|
||||
"value": stages[0]["value"],
|
||||
"dropPercentage": None,
|
||||
"operator": stages[0]["operator"],
|
||||
"sessionsCount": 0,
|
||||
"dropPct": 0,
|
||||
"usersCount": 0,
|
||||
"dropDueToIssues": 0
|
||||
|
||||
}]
|
||||
counts = sessions.search2_pg(data=filter_d, project_id=project_id, user_id=None, count_only=True)
|
||||
output[0]["sessionsCount"] = counts["countSessions"]
|
||||
output[0]["usersCount"] = counts["countUsers"]
|
||||
return output, 0
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
if len(rows) == 0:
|
||||
return get_stages(stages, []), 0
|
||||
# Obtain the first part of the output
|
||||
stages_list = get_stages(stages, rows)
|
||||
# Obtain the second part of the output
|
||||
total_drop_due_to_issues = get_issues(stages, rows, first_stage=filter_d.get("firstStage"),
|
||||
last_stage=filter_d.get("lastStage"), drop_only=True)
|
||||
return stages_list, total_drop_due_to_issues
|
||||
|
||||
|
||||
@dev.timed
|
||||
def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None):
|
||||
output = dict({'critical_issues_count': 0})
|
||||
stages = filter_d["events"]
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
# print(json.dumps(rows[0],indent=4))
|
||||
# return
|
||||
if len(rows) == 0:
|
||||
return output
|
||||
# Obtain the second part of the output
|
||||
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, first_stage=first_stage,
|
||||
last_stage=last_stage)
|
||||
output['total_drop_due_to_issues'] = total_drop_due_to_issues
|
||||
# output['critical_issues_count'] = n_critical_issues
|
||||
output = {**output, **issues_dict}
|
||||
return output
|
||||
|
||||
|
||||
def get_overview(filter_d, project_id, first_stage=None, last_stage=None):
|
||||
output = dict()
|
||||
stages = filter_d["events"]
|
||||
# TODO: handle 1 stage alone
|
||||
if len(stages) == 0:
|
||||
return {"stages": [],
|
||||
"criticalIssuesCount": 0}
|
||||
elif len(stages) == 1:
|
||||
# TODO: count sessions, and users for single stage
|
||||
output["stages"] = [{
|
||||
"type": stages[0]["type"],
|
||||
"value": stages[0]["value"],
|
||||
"sessionsCount": None,
|
||||
"dropPercentage": None,
|
||||
"usersCount": None
|
||||
}]
|
||||
return output
|
||||
# The result of the multi-stage query
|
||||
rows = get_stages_and_events(filter_d=filter_d, project_id=project_id)
|
||||
if len(rows) == 0:
|
||||
return output
|
||||
# Obtain the first part of the output
|
||||
stages_list = get_stages(stages, rows)
|
||||
|
||||
# Obtain the second part of the output
|
||||
n_critical_issues, issues_dict, total_drop_due_to_issues = get_issues(stages, rows, first_stage=first_stage,
|
||||
last_stage=last_stage)
|
||||
|
||||
output['stages'] = stages_list
|
||||
output['criticalIssuesCount'] = n_critical_issues
|
||||
return output
|
||||
|
||||
|
||||
def get_issue_title(issue_type):
|
||||
return {'click_rage': "Click Rage",
|
||||
'dead_click': "Dead Click",
|
||||
'excessive_scrolling': "Excessive Scrolling",
|
||||
'bad_request': "Bad Request",
|
||||
'missing_resource': "Missing Image",
|
||||
'memory': "High Memory Usage",
|
||||
'cpu': "High CPU",
|
||||
'slow_resource': "Slow Resource",
|
||||
'slow_page_load': "Slow Page Performance",
|
||||
'crash': "Crash",
|
||||
'ml_cpu': "High CPU",
|
||||
'ml_memory': "High Memory Usage",
|
||||
'ml_dead_click': "Dead Click",
|
||||
'ml_click_rage': "Click Rage",
|
||||
'ml_mouse_thrashing': "Mouse Thrashing",
|
||||
'ml_excessive_scrolling': "Excessive Scrolling",
|
||||
'ml_slow_resources': "Slow Resource",
|
||||
'custom': "Custom Event",
|
||||
'js_exception': "Error",
|
||||
'custom_event_error': "Custom Error",
|
||||
'js_error': "Error"}.get(issue_type, issue_type)
|
||||
171
api/chalicelib/core/signup.py
Normal file
|
|
@ -0,0 +1,171 @@
|
|||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.core import users, telemetry
|
||||
from chalicelib.utils import captcha
|
||||
import json
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
|
||||
|
||||
def get_signed_ups():
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute("SELECT tenant_id, name FROM public.tenants;")
|
||||
rows = cur.fetchall()
|
||||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def create_step1(data):
|
||||
print(f"===================== SIGNUP STEP 1 AT {TimeUTC.to_human_readable(TimeUTC.now())} UTC")
|
||||
errors = []
|
||||
|
||||
email = data.get("email")
|
||||
print(f"=====================> {email}")
|
||||
password = data.get("password")
|
||||
|
||||
print("Verifying email validity")
|
||||
email_exists = False
|
||||
if email is None or len(email) < 5 or not helper.is_valid_email(email):
|
||||
errors.append("Invalid email address.")
|
||||
else:
|
||||
print("Verifying email existance")
|
||||
if users.email_exists(email):
|
||||
# errors.append("Email address already in use.")
|
||||
email_exists = True
|
||||
if users.get_deleted_user_by_email(email) is not None:
|
||||
# errors.append("Email address previously deleted.")
|
||||
email_exists = True
|
||||
|
||||
print("Verifying captcha")
|
||||
if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]):
|
||||
errors.append("Invalid captcha.")
|
||||
|
||||
print("Verifying password validity")
|
||||
if len(data["password"]) < 6:
|
||||
errors.append("Password is too short, it must be at least 6 characters long.")
|
||||
|
||||
print("Verifying fullname validity")
|
||||
fullname = data.get("fullname")
|
||||
if fullname is None or len(fullname) < 1 or not helper.is_alphabet_space_dash(fullname):
|
||||
errors.append("Invalid full name.")
|
||||
|
||||
print("Verifying company's name validity")
|
||||
company_name = data.get("organizationName")
|
||||
if company_name is None or len(company_name) < 1 or not helper.is_alphanumeric_space(company_name):
|
||||
errors.append("invalid organization's name")
|
||||
|
||||
print("Verifying project's name validity")
|
||||
project_name = data.get("projectName")
|
||||
if project_name is None or len(project_name) < 1:
|
||||
project_name = "my first project"
|
||||
if len(get_signed_ups()) > 0 and data.get("tenantId") is None:
|
||||
errors.append("Tenant already exists, please select it from dropdown")
|
||||
if len(errors) > 0:
|
||||
print("==> error")
|
||||
print(errors)
|
||||
return {"errors": errors}
|
||||
print("No errors detected")
|
||||
params = {
|
||||
"email": email, "password": password,
|
||||
"fullname": fullname,
|
||||
"projectName": project_name,
|
||||
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()}),
|
||||
"organizationName": company_name,
|
||||
"versionNumber": "0.0.0"
|
||||
}
|
||||
if data.get("tenantId") is not None:
|
||||
update_user = """
|
||||
u AS (
|
||||
UPDATE public.users
|
||||
SET name = %(fullname)s, deleted_at=NULL
|
||||
WHERE email=%(email)s
|
||||
RETURNING user_id,email, role, name
|
||||
)
|
||||
UPDATE public.basic_authentication
|
||||
SET password= crypt(%(password)s, gen_salt('bf', 12))
|
||||
WHERE user_id = (SELECT user_id FROM u)"""
|
||||
insert_user = """
|
||||
a AS (
|
||||
UPDATE public.users
|
||||
SET role='admin'
|
||||
WHERE role ='owner'
|
||||
),
|
||||
u AS (
|
||||
INSERT INTO public.users (email, role, name, data)
|
||||
VALUES (%(email)s, 'owner', %(fullname)s,%(data)s)
|
||||
RETURNING user_id,email,role,name
|
||||
)
|
||||
INSERT INTO public.basic_authentication (user_id, password, generated_password)
|
||||
VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), FALSE)"""
|
||||
query = f"""\
|
||||
WITH t AS (
|
||||
UPDATE public.tenants
|
||||
SET name = %(organizationName)s,
|
||||
version_number = %(versionNumber)s
|
||||
RETURNING api_key
|
||||
),
|
||||
{update_user if email_exists else insert_user}
|
||||
RETURNING (SELECT api_key FROM t) AS api_key,(SELECT project_id FROM projects LIMIT 1) AS project_id;"""
|
||||
else:
|
||||
query = f"""\
|
||||
WITH t AS (
|
||||
INSERT INTO public.tenants (name, version_number, edition)
|
||||
VALUES (%(organizationName)s, %(versionNumber)s, 'fos')
|
||||
RETURNING api_key
|
||||
),
|
||||
u AS (
|
||||
INSERT INTO public.users (email, role, name, data)
|
||||
VALUES (%(email)s, 'owner', %(fullname)s,%(data)s)
|
||||
RETURNING user_id,email,role,name
|
||||
),
|
||||
au AS (INSERT
|
||||
INTO public.basic_authentication (user_id, password, generated_password)
|
||||
VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), FALSE)
|
||||
)
|
||||
INSERT INTO public.projects (name, active)
|
||||
VALUES (%(projectName)s, TRUE)
|
||||
RETURNING project_id, (SELECT api_key FROM t) AS api_key;"""
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify(query, params))
|
||||
cur = cur.fetchone()
|
||||
project_id = cur["project_id"]
|
||||
api_key = cur["api_key"]
|
||||
telemetry.new_client()
|
||||
created_at = TimeUTC.now()
|
||||
r = users.authenticate(email, password)
|
||||
r["banner"] = False
|
||||
r["limits"] = {
|
||||
"teamMember": {"limit": 99, "remaining": 98, "count": 1},
|
||||
"projects": {"limit": 99, "remaining": 98, "count": 1},
|
||||
"metadata": [{
|
||||
"projectId": project_id,
|
||||
"name": project_name,
|
||||
"limit": 10,
|
||||
"remaining": 10,
|
||||
"count": 0
|
||||
}]
|
||||
}
|
||||
c = {
|
||||
"tenantId": 1,
|
||||
"name": company_name,
|
||||
"apiKey": api_key,
|
||||
"remainingTrial": 14,
|
||||
"trialEnded": False,
|
||||
"billingPeriodStartDate": created_at,
|
||||
"hasActivePlan": True,
|
||||
"projects": [
|
||||
{
|
||||
"projectId": project_id,
|
||||
"name": project_name,
|
||||
"recorded": False,
|
||||
"stackIntegrations": False,
|
||||
"status": "red"
|
||||
}
|
||||
]
|
||||
}
|
||||
return {
|
||||
'jwt': r.pop('jwt'),
|
||||
'data': {
|
||||
"user": r,
|
||||
"client": c,
|
||||
}
|
||||
}
|
||||
32
api/chalicelib/core/slack.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
from datetime import datetime
|
||||
from chalicelib.utils.helper import environ
|
||||
|
||||
from chalicelib.core.collaboration_slack import Slack
|
||||
|
||||
|
||||
def send(notification, destination):
|
||||
if notification is None:
|
||||
return
|
||||
return Slack.send_text(tenant_id=notification["tenantId"],
|
||||
webhook_id=destination,
|
||||
text=notification["description"] \
|
||||
+ f"\n<{environ['SITE_URL']}{notification['buttonUrl']}|{notification['buttonText']}>",
|
||||
title=notification["title"],
|
||||
title_link=notification["buttonUrl"], )
|
||||
|
||||
|
||||
def send_batch(notifications_list):
|
||||
if notifications_list is None or len(notifications_list) == 0:
|
||||
return
|
||||
webhookId_map = {}
|
||||
for n in notifications_list:
|
||||
if n.get("destination") not in webhookId_map:
|
||||
webhookId_map[n.get("destination")] = {"tenantId": n["notification"]["tenantId"], "batch": []}
|
||||
webhookId_map[n.get("destination")]["batch"].append({"text": n["notification"]["description"] \
|
||||
+ f"\n<{environ['SITE_URL']}{n['notification']['buttonUrl']}|{n['notification']['buttonText']}>",
|
||||
"title": n["notification"]["title"],
|
||||
"title_link": n["notification"]["buttonUrl"],
|
||||
"ts": datetime.now().timestamp()})
|
||||
for batch in webhookId_map.keys():
|
||||
Slack.send_batch(tenant_id=webhookId_map[batch]["tenantId"], webhook_id=batch,
|
||||
attachments=webhookId_map[batch]["batch"])
|
||||
22
api/chalicelib/core/socket_ios.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
import requests
|
||||
from chalicelib.utils.helper import environ
|
||||
from chalicelib.core import projects
|
||||
|
||||
|
||||
def start_replay(project_id, session_id, device, os_version, mob_url):
|
||||
r = requests.post(environ["IOS_MIDDLEWARE"] + "/replay", json={
|
||||
"projectId": project_id,
|
||||
"projectKey": projects.get_project_key(project_id),
|
||||
"sessionId": session_id,
|
||||
"device": device,
|
||||
"osVersion": os_version,
|
||||
"mobUrl": mob_url
|
||||
})
|
||||
if r.status_code != 200:
|
||||
print("failed replay middleware")
|
||||
print("status code: %s" % r.status_code)
|
||||
print(r.text)
|
||||
return r.text
|
||||
result = r.json()
|
||||
result["url"] = environ["IOS_MIDDLEWARE"]
|
||||
return result
|
||||
157
api/chalicelib/core/sourcemaps.py
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
from chalicelib.utils.helper import environ
|
||||
from chalicelib.utils import helper
|
||||
|
||||
from chalicelib.utils import s3
|
||||
import hashlib
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from chalicelib.core import sourcemaps_parser
|
||||
|
||||
|
||||
def __get_key(project_id, url):
|
||||
u = urlparse(url)
|
||||
new_url = u.scheme + "://" + u.netloc + u.path
|
||||
return f"{project_id}/{hashlib.md5(new_url.encode()).hexdigest()}"
|
||||
|
||||
|
||||
def presign_share_urls(project_id, urls):
|
||||
results = []
|
||||
for u in urls:
|
||||
results.append(s3.get_presigned_url_for_sharing(bucket=environ['sourcemaps_bucket'], expires_in=120,
|
||||
key=__get_key(project_id, u),
|
||||
check_exists=True))
|
||||
return results
|
||||
|
||||
|
||||
def presign_upload_urls(project_id, urls):
|
||||
results = []
|
||||
for u in urls:
|
||||
results.append(s3.get_presigned_url_for_upload(bucket=environ['sourcemaps_bucket'],
|
||||
expires_in=1800,
|
||||
key=__get_key(project_id, u)))
|
||||
return results
|
||||
|
||||
|
||||
def __format_frame_old(f):
|
||||
if f.get("context") is None:
|
||||
f["context"] = []
|
||||
else:
|
||||
f["context"] = [[f["line"], f["context"]]]
|
||||
url = f.pop("url")
|
||||
f["absPath"] = url
|
||||
f["filename"] = urlparse(url).path
|
||||
f["lineNo"] = f.pop("line")
|
||||
f["colNo"] = f.pop("column")
|
||||
f["function"] = f.pop("func")
|
||||
return f
|
||||
|
||||
|
||||
def __frame_is_valid(f):
|
||||
return "columnNumber" in f and \
|
||||
"lineNumber" in f and \
|
||||
"fileName" in f
|
||||
|
||||
|
||||
def __format_frame(f):
|
||||
f["context"] = [] # no context by default
|
||||
if "source" in f: f.pop("source")
|
||||
url = f.pop("fileName")
|
||||
f["absPath"] = url
|
||||
f["filename"] = urlparse(url).path
|
||||
f["lineNo"] = f.pop("lineNumber")
|
||||
f["colNo"] = f.pop("columnNumber")
|
||||
f["function"] = f.pop("functionName") if "functionName" in f else None
|
||||
return f
|
||||
|
||||
|
||||
def format_payload(p, truncate_to_first=False):
|
||||
if type(p) is list:
|
||||
return [__format_frame(f) for f in (p[:1] if truncate_to_first else p) if __frame_is_valid(f)]
|
||||
if type(p) is dict:
|
||||
stack = p.get("stack", [])
|
||||
return [__format_frame_old(f) for f in (stack[:1] if truncate_to_first else stack)]
|
||||
return []
|
||||
|
||||
|
||||
def get_traces_group(project_id, payload):
|
||||
frames = format_payload(payload)
|
||||
|
||||
results = [{}] * len(frames)
|
||||
payloads = {}
|
||||
all_exists = True
|
||||
for i, u in enumerate(frames):
|
||||
key = __get_key(project_id, u["absPath"]) # use filename instead?
|
||||
if key not in payloads:
|
||||
file_exists = s3.exists(environ['sourcemaps_bucket'], key)
|
||||
all_exists = all_exists and file_exists
|
||||
if not file_exists:
|
||||
print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3")
|
||||
payloads[key] = None
|
||||
else:
|
||||
payloads[key] = []
|
||||
results[i] = dict(u)
|
||||
results[i]["frame"] = dict(u)
|
||||
if payloads[key] is not None:
|
||||
payloads[key].append({"resultIndex": i,
|
||||
"position": {"line": u["lineNo"], "column": u["colNo"]},
|
||||
"frame": dict(u)})
|
||||
for key in payloads.keys():
|
||||
if payloads[key] is None:
|
||||
continue
|
||||
key_results = sourcemaps_parser.get_original_trace(key=key, positions=[o["position"] for o in payloads[key]])
|
||||
for i, r in enumerate(key_results):
|
||||
res_index = payloads[key][i]["resultIndex"]
|
||||
# function name search by frontend lib is better than sourcemaps' one in most cases
|
||||
if results[res_index].get("function") is not None:
|
||||
r["function"] = results[res_index]["function"]
|
||||
r["frame"] = payloads[key][i]["frame"]
|
||||
results[res_index] = r
|
||||
return fetch_missed_contexts(results), all_exists
|
||||
|
||||
|
||||
def get_js_cache_path(fullURL):
|
||||
p = urlparse(fullURL)
|
||||
return p.scheme + '/' + p.netloc + p.path # TODO (Also in go assets library): What if URL with query? (like versions)
|
||||
|
||||
|
||||
MAX_COLUMN_OFFSET = 60
|
||||
|
||||
|
||||
def fetch_missed_contexts(frames):
|
||||
source_cache = {}
|
||||
for i in range(len(frames)):
|
||||
if len(frames[i]["context"]) != 0:
|
||||
continue
|
||||
if frames[i]["frame"]["absPath"] in source_cache:
|
||||
file = source_cache[frames[i]["frame"]["absPath"]]
|
||||
else:
|
||||
file = s3.get_file(environ['js_cache_bucket'], get_js_cache_path(frames[i]["frame"]["absPath"]))
|
||||
if file is None:
|
||||
print(
|
||||
f"File {get_js_cache_path(frames[i]['frame']['absPath'])} not found in {environ['js_cache_bucket']}")
|
||||
source_cache[frames[i]["frame"]["absPath"]] = file
|
||||
if file is None:
|
||||
continue
|
||||
lines = file.split("\n")
|
||||
|
||||
if frames[i]["lineNo"] is None:
|
||||
print("no original-source found for frame in sourcemap results")
|
||||
frames[i] = frames[i]["frame"]
|
||||
frames[i]["originalMapping"] = False
|
||||
|
||||
l = frames[i]["lineNo"] - 1 # starts from 1
|
||||
c = frames[i]["colNo"] - 1 # starts from 1
|
||||
if len(lines) == 1:
|
||||
print(f"minified asset")
|
||||
l = frames[i]["frame"]["lineNo"] - 1 # starts from 1
|
||||
c = frames[i]["frame"]["colNo"] - 1 # starts from 1
|
||||
elif l >= len(lines):
|
||||
print(f"line number {l} greater than file length {len(lines)}")
|
||||
continue
|
||||
|
||||
line = lines[l]
|
||||
offset = c - MAX_COLUMN_OFFSET
|
||||
if offset < 0: # if the line is shirt
|
||||
offset = 0
|
||||
frames[i]["context"].append([frames[i]["lineNo"], line[offset: c + MAX_COLUMN_OFFSET + 1]])
|
||||
return frames
|
||||
22
api/chalicelib/core/sourcemaps_parser.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
import requests
|
||||
|
||||
from chalicelib.utils.helper import environ
|
||||
|
||||
|
||||
def get_original_trace(key, positions):
|
||||
payload = {
|
||||
"key": key,
|
||||
"positions": positions,
|
||||
"padding": 5,
|
||||
"bucket": environ['sourcemaps_bucket'],
|
||||
"bucket_config": {
|
||||
"aws_access_key_id": environ["sourcemaps_bucket_key"],
|
||||
"aws_secret_access_key": environ["sourcemaps_bucket_secret"],
|
||||
"aws_region": environ["sourcemaps_bucket_region"]
|
||||
}
|
||||
}
|
||||
r = requests.post(environ["sourcemaps"], json=payload)
|
||||
if r.status_code != 200:
|
||||
return {}
|
||||
|
||||
return r.json()
|
||||
43
api/chalicelib/core/telemetry.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
from chalicelib.utils import pg_client
|
||||
import requests
|
||||
|
||||
|
||||
def process_data(data, edition='fos'):
|
||||
return {
|
||||
'edition': edition,
|
||||
'tracking': data["opt_out"],
|
||||
'version': data["version_number"],
|
||||
'user_id': data["user_id"],
|
||||
'owner_email': None if data["opt_out"] else data["email"],
|
||||
'organization_name': None if data["opt_out"] else data["name"],
|
||||
'users_count': data["t_users"],
|
||||
'projects_count': data["t_projects"],
|
||||
'sessions_count': data["t_sessions"],
|
||||
'integrations_count': data["t_integrations"]
|
||||
}
|
||||
|
||||
|
||||
def compute():
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
f"""UPDATE public.tenants
|
||||
SET t_integrations = COALESCE((SELECT COUNT(DISTINCT provider) FROM public.integrations) +
|
||||
(SELECT COUNT(*) FROM public.webhooks WHERE type = 'slack') +
|
||||
(SELECT COUNT(*) FROM public.jira_cloud), 0),
|
||||
t_projects=COALESCE((SELECT COUNT(*) FROM public.projects WHERE deleted_at ISNULL), 0),
|
||||
t_sessions=COALESCE((SELECT COUNT(*) FROM public.sessions), 0),
|
||||
t_users=COALESCE((SELECT COUNT(*) FROM public.users WHERE deleted_at ISNULL), 0)
|
||||
RETURNING *,(SELECT email FROM public.users WHERE role='owner' LIMIT 1);"""
|
||||
)
|
||||
data = cur.fetchone()
|
||||
requests.post('https://parrot.asayer.io/os/telemetry', json=process_data(data))
|
||||
|
||||
|
||||
def new_client():
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
f"""SELECT *,
|
||||
(SELECT email FROM public.users WHERE role='owner' LIMIT 1) AS email
|
||||
FROM public.tenants;""")
|
||||
data = cur.fetchone()
|
||||
requests.post('https://parrot.asayer.io/os/signup', json=process_data(data))
|
||||
83
api/chalicelib/core/tenants.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.core import users
|
||||
|
||||
|
||||
def get_by_tenant_id(tenant_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT
|
||||
tenant_id,
|
||||
name,
|
||||
api_key
|
||||
created_at,
|
||||
edition,
|
||||
version_number,
|
||||
opt_out
|
||||
FROM public.tenants
|
||||
LIMIT 1;""",
|
||||
{"tenantId": tenant_id})
|
||||
)
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def get_by_api_key(api_key):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT
|
||||
1 AS tenant_id,
|
||||
name,
|
||||
created_at
|
||||
FROM public.tenants
|
||||
WHERE api_key = %(api_key)s
|
||||
LIMIT 1;""",
|
||||
{"api_key": api_key})
|
||||
)
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def generate_new_api_key(tenant_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""UPDATE public.tenants
|
||||
SET api_key=generate_api_key(20)
|
||||
RETURNING api_key;""",
|
||||
{"tenant_id": tenant_id})
|
||||
)
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def edit_client(tenant_id, changes):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
UPDATE public.tenants
|
||||
SET {", ".join([f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys()])}
|
||||
RETURNING name, opt_out;""",
|
||||
{"tenantId": tenant_id, **changes})
|
||||
)
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def update(tenant_id, user_id, data):
|
||||
admin = users.get(user_id=user_id, tenant_id=tenant_id)
|
||||
|
||||
if not admin["admin"] and not admin["superAdmin"]:
|
||||
return {"error": "unauthorized"}
|
||||
if "name" not in data and "optOut" not in data:
|
||||
return {"errors": ["please provide 'name' of 'optOut' attribute for update"]}
|
||||
changes = {}
|
||||
if "name" in data:
|
||||
changes["name"] = data["name"]
|
||||
if "optOut" in data:
|
||||
changes["optOut"] = data["optOut"]
|
||||
return edit_client(tenant_id=tenant_id, changes=changes)
|
||||
|
||||
|
||||
def get_tenants():
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(f"SELECT name FROM public.tenants")
|
||||
return helper.list_to_camel_case(cur.fetchall())
|
||||
463
api/chalicelib/core/users.py
Normal file
|
|
@ -0,0 +1,463 @@
|
|||
import json
|
||||
|
||||
from chalicelib.core import authorizers
|
||||
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
from chalicelib.utils import dev
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.helper import environ
|
||||
|
||||
from chalicelib.core import tenants
|
||||
|
||||
|
||||
def create_new_member(email, password, admin, name, owner=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
WITH u AS (
|
||||
INSERT INTO public.users (email, role, name, data)
|
||||
VALUES (%(email)s, %(role)s, %(name)s, %(data)s)
|
||||
RETURNING user_id,email,role,name,appearance
|
||||
),
|
||||
au AS (INSERT
|
||||
INTO public.basic_authentication (user_id, password, generated_password)
|
||||
VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), TRUE))
|
||||
SELECT u.user_id AS id,
|
||||
u.email,
|
||||
u.role,
|
||||
u.name,
|
||||
TRUE AS change_password,
|
||||
(CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member,
|
||||
u.appearance
|
||||
FROM u;""",
|
||||
{"email": email, "password": password,
|
||||
"role": "owner" if owner else "admin" if admin else "member", "name": name,
|
||||
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()})})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def restore_member(user_id, email, password, admin, name, owner=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(f"""\
|
||||
UPDATE public.users
|
||||
SET name= %(name)s,
|
||||
role = %(role)s,
|
||||
deleted_at= NULL,
|
||||
created_at = timezone('utc'::text, now()),
|
||||
api_key= generate_api_key(20)
|
||||
WHERE user_id=%(user_id)s
|
||||
RETURNING user_id AS id,
|
||||
email,
|
||||
role,
|
||||
name,
|
||||
TRUE AS change_password,
|
||||
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
|
||||
appearance;""",
|
||||
{"user_id": user_id, "email": email,
|
||||
"role": "owner" if owner else "admin" if admin else "member", "name": name})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
result = helper.dict_to_camel_case(cur.fetchone())
|
||||
query = cur.mogrify("""\
|
||||
UPDATE public.basic_authentication
|
||||
SET password= crypt(%(password)s, gen_salt('bf', 12)),
|
||||
generated_password= TRUE,
|
||||
token=NULL,
|
||||
token_requested_at=NULL
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
{"user_id": user_id, "password": password})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def update(tenant_id, user_id, changes):
|
||||
AUTH_KEYS = ["password", "generatedPassword", "token"]
|
||||
if len(changes.keys()) == 0:
|
||||
return None
|
||||
|
||||
sub_query_users = []
|
||||
sub_query_bauth = []
|
||||
for key in changes.keys():
|
||||
if key in AUTH_KEYS:
|
||||
if key == "password":
|
||||
sub_query_bauth.append("password = crypt(%(password)s, gen_salt('bf', 12))")
|
||||
sub_query_bauth.append("changed_at = timezone('utc'::text, now())")
|
||||
elif key == "token":
|
||||
if changes[key] is not None:
|
||||
sub_query_bauth.append("token = %(token)s")
|
||||
sub_query_bauth.append("token_requested_at = timezone('utc'::text, now())")
|
||||
else:
|
||||
sub_query_bauth.append("token = NULL")
|
||||
sub_query_bauth.append("token_requested_at = NULL")
|
||||
else:
|
||||
sub_query_bauth.append(f"{helper.key_to_snake_case(key)} = %({key})s")
|
||||
else:
|
||||
if key == "appearance":
|
||||
sub_query_users.append(f"appearance = %(appearance)s::jsonb")
|
||||
changes["appearance"] = json.dumps(changes[key])
|
||||
else:
|
||||
sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s")
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
if len(sub_query_users) > 0:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
UPDATE public.users
|
||||
SET {" ,".join(sub_query_users)}
|
||||
FROM public.basic_authentication
|
||||
WHERE users.user_id = %(user_id)s
|
||||
AND users.user_id = basic_authentication.user_id
|
||||
RETURNING users.user_id AS id,
|
||||
users.email,
|
||||
users.role,
|
||||
users.name,
|
||||
basic_authentication.generated_password AS change_password,
|
||||
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
|
||||
users.appearance;""",
|
||||
{"user_id": user_id, **changes})
|
||||
)
|
||||
if len(sub_query_bauth) > 0:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
UPDATE public.basic_authentication
|
||||
SET {" ,".join(sub_query_bauth)}
|
||||
FROM public.users AS users
|
||||
WHERE basic_authentication.user_id = %(user_id)s
|
||||
AND users.user_id = basic_authentication.user_id
|
||||
RETURNING users.user_id AS id,
|
||||
users.email,
|
||||
users.role,
|
||||
users.name,
|
||||
basic_authentication.generated_password AS change_password,
|
||||
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
|
||||
users.appearance;""",
|
||||
{"user_id": user_id, **changes})
|
||||
)
|
||||
|
||||
return helper.dict_to_camel_case(cur.fetchone())
|
||||
|
||||
|
||||
def create_member(tenant_id, user_id, data):
|
||||
admin = get(tenant_id=tenant_id, user_id=user_id)
|
||||
if not admin["admin"] and not admin["superAdmin"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
if data.get("userId") is not None:
|
||||
return {"errors": ["please use POST/PUT /client/members/{memberId} for update"]}
|
||||
user = get_by_email_only(email=data["email"])
|
||||
if user:
|
||||
return {"errors": ["user already exists"]}
|
||||
name = data.get("name", None)
|
||||
if name is not None and not helper.is_alphabet_latin_space(name):
|
||||
return {"errors": ["invalid user name"]}
|
||||
if name is None:
|
||||
name = data["email"]
|
||||
temp_pass = helper.generate_salt()[:8]
|
||||
user = get_deleted_user_by_email(email=data["email"])
|
||||
if user is not None:
|
||||
new_member = restore_member(email=data["email"], password=temp_pass,
|
||||
admin=data.get("admin", False), name=name, user_id=user["userId"])
|
||||
else:
|
||||
new_member = create_new_member(email=data["email"], password=temp_pass,
|
||||
admin=data.get("admin", False), name=name)
|
||||
|
||||
helper.async_post(environ['email_basic'] % 'member_invitation',
|
||||
{
|
||||
"email": data["email"],
|
||||
"userName": data["email"],
|
||||
"tempPassword": temp_pass,
|
||||
"clientId": tenants.get_by_tenant_id(tenant_id)["name"],
|
||||
"senderName": admin["name"]
|
||||
})
|
||||
return {"data": new_member}
|
||||
|
||||
|
||||
def get(user_id, tenant_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT
|
||||
users.user_id AS id,
|
||||
email,
|
||||
role,
|
||||
name,
|
||||
basic_authentication.generated_password,
|
||||
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
|
||||
appearance,
|
||||
api_key
|
||||
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
|
||||
WHERE
|
||||
users.user_id = %(userId)s
|
||||
AND deleted_at IS NULL
|
||||
LIMIT 1;""",
|
||||
{"userId": user_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return helper.dict_to_camel_case(r, ignore_keys=["appearance"])
|
||||
|
||||
|
||||
def generate_new_api_key(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""UPDATE public.users
|
||||
SET api_key=generate_api_key(20)
|
||||
WHERE
|
||||
users.user_id = %(userId)s
|
||||
AND deleted_at IS NULL
|
||||
RETURNING api_key;""",
|
||||
{"userId": user_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return helper.dict_to_camel_case(r)
|
||||
|
||||
|
||||
def edit(user_id_to_update, tenant_id, changes, editor_id):
|
||||
ALLOW_EDIT = ["name", "email", "admin", "appearance"]
|
||||
user = get(user_id=user_id_to_update, tenant_id=tenant_id)
|
||||
if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]:
|
||||
admin = get(tenant_id=tenant_id, user_id=editor_id)
|
||||
if not admin["superAdmin"] and not admin["admin"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
|
||||
keys = list(changes.keys())
|
||||
for k in keys:
|
||||
if k not in ALLOW_EDIT:
|
||||
changes.pop(k)
|
||||
keys = list(changes.keys())
|
||||
|
||||
if len(keys) > 0:
|
||||
if "email" in keys and changes["email"] != user["email"]:
|
||||
if email_exists(changes["email"]):
|
||||
return {"errors": ["email already exists."]}
|
||||
if get_deleted_user_by_email(changes["email"]) is not None:
|
||||
return {"errors": ["email previously deleted."]}
|
||||
if "admin" in keys:
|
||||
changes["role"] = "admin" if changes.pop("admin") else "member"
|
||||
if len(changes.keys()) > 0:
|
||||
updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=changes)
|
||||
|
||||
return {"data": updated_user}
|
||||
return {"data": user}
|
||||
|
||||
|
||||
def get_by_email_only(email):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT
|
||||
users.user_id AS id,
|
||||
1 AS tenant_id,
|
||||
users.email,
|
||||
users.role,
|
||||
users.name,
|
||||
basic_authentication.generated_password,
|
||||
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
|
||||
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
|
||||
WHERE
|
||||
users.email = %(email)s
|
||||
AND users.deleted_at IS NULL;""",
|
||||
{"email": email})
|
||||
)
|
||||
r = cur.fetchall()
|
||||
return helper.list_to_camel_case(r)
|
||||
|
||||
|
||||
def get_by_email_reset(email, reset_token):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT
|
||||
users.user_id AS id,
|
||||
1 AS tenant_id,
|
||||
users.email,
|
||||
users.role,
|
||||
users.name,
|
||||
basic_authentication.generated_password,
|
||||
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
|
||||
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
|
||||
WHERE
|
||||
users.email = %(email)s
|
||||
AND basic_authentication.token =%(token)s
|
||||
AND users.deleted_at IS NULL""",
|
||||
{"email": email, "token": reset_token})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return helper.dict_to_camel_case(r)
|
||||
|
||||
|
||||
def get_members(tenant_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
f"""SELECT
|
||||
users.user_id AS id,
|
||||
users.email,
|
||||
users.role,
|
||||
users.name,
|
||||
basic_authentication.generated_password,
|
||||
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
|
||||
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
|
||||
WHERE users.deleted_at IS NULL
|
||||
ORDER BY name, id"""
|
||||
)
|
||||
r = cur.fetchall()
|
||||
if len(r):
|
||||
return helper.list_to_camel_case(r)
|
||||
|
||||
return []
|
||||
|
||||
|
||||
def delete_member(user_id, tenant_id, id_to_delete):
|
||||
if user_id == id_to_delete:
|
||||
return {"errors": ["unauthorized, cannot delete self"]}
|
||||
|
||||
admin = get(user_id=user_id, tenant_id=tenant_id)
|
||||
if admin["member"]:
|
||||
return {"errors": ["unauthorized"]}
|
||||
|
||||
to_delete = get(user_id=id_to_delete, tenant_id=tenant_id)
|
||||
if to_delete is None:
|
||||
return {"errors": ["not found"]}
|
||||
|
||||
if to_delete["superAdmin"]:
|
||||
return {"errors": ["cannot delete super admin"]}
|
||||
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(f"""UPDATE public.users
|
||||
SET deleted_at = timezone('utc'::text, now())
|
||||
WHERE user_id=%(user_id)s;""",
|
||||
{"user_id": id_to_delete}))
|
||||
return {"data": get_members(tenant_id=tenant_id)}
|
||||
|
||||
|
||||
def change_password(tenant_id, user_id, email, old_password, new_password):
|
||||
item = get(tenant_id=tenant_id, user_id=user_id)
|
||||
if item is None:
|
||||
return {"errors": ["access denied"]}
|
||||
if old_password == new_password:
|
||||
return {"errors": ["old and new password are the same"]}
|
||||
auth = authenticate(email, old_password, for_change_password=True)
|
||||
if auth is None:
|
||||
return {"errors": ["wrong password"]}
|
||||
changes = {"password": new_password, "generatedPassword": False}
|
||||
return {"data": update(tenant_id=tenant_id, user_id=user_id, changes=changes),
|
||||
"jwt": authenticate(email, new_password)["jwt"]}
|
||||
|
||||
|
||||
def email_exists(email):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT
|
||||
count(user_id)
|
||||
FROM public.users
|
||||
WHERE
|
||||
email = %(email)s
|
||||
AND deleted_at IS NULL
|
||||
LIMIT 1;""",
|
||||
{"email": email})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return r["count"] > 0
|
||||
|
||||
|
||||
def get_deleted_user_by_email(email):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"""SELECT
|
||||
*
|
||||
FROM public.users
|
||||
WHERE
|
||||
email = %(email)s
|
||||
AND deleted_at NOTNULL
|
||||
LIMIT 1;""",
|
||||
{"email": email})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return helper.dict_to_camel_case(r)
|
||||
|
||||
|
||||
def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify(
|
||||
f"SELECT user_id AS id,jwt_iat, changed_at FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE user_id = %(userId)s AND deleted_at IS NULL LIMIT 1;",
|
||||
{"userId": user_id})
|
||||
)
|
||||
r = cur.fetchone()
|
||||
return r is not None \
|
||||
and r.get("jwt_iat") is not None \
|
||||
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
|
||||
or (jwt_aud.startswith("plugin") \
|
||||
and (r["changed_at"] is None \
|
||||
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
|
||||
)
|
||||
|
||||
|
||||
@dev.timed
|
||||
def authenticate(email, password, for_change_password=False, for_plugin=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify(
|
||||
f"""SELECT
|
||||
users.user_id AS id,
|
||||
1 AS tenant_id,
|
||||
users.role,
|
||||
users.name,
|
||||
basic_authentication.generated_password AS change_password,
|
||||
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
|
||||
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
|
||||
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
|
||||
users.appearance
|
||||
FROM public.users INNER JOIN public.basic_authentication USING(user_id)
|
||||
WHERE users.email = %(email)s
|
||||
AND basic_authentication.password = crypt(%(password)s, basic_authentication.password)
|
||||
AND basic_authentication.user_id = (SELECT su.user_id FROM public.users AS su WHERE su.email=%(email)s AND su.deleted_at IS NULL LIMIT 1)
|
||||
LIMIT 1;""",
|
||||
{"email": email, "password": password})
|
||||
|
||||
cur.execute(query)
|
||||
r = cur.fetchone()
|
||||
|
||||
if r is not None:
|
||||
if for_change_password:
|
||||
return True
|
||||
r = helper.dict_to_camel_case(r, ignore_keys=["appearance"])
|
||||
query = cur.mogrify(
|
||||
f"""UPDATE public.users
|
||||
SET jwt_iat = timezone('utc'::text, now())
|
||||
WHERE user_id = %(user_id)s
|
||||
RETURNING jwt_iat;""",
|
||||
{"user_id": r["id"]})
|
||||
cur.execute(query)
|
||||
return {
|
||||
"jwt": authorizers.generate_jwt(r['id'], r['tenantId'],
|
||||
TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]),
|
||||
aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"),
|
||||
"email": email,
|
||||
**r
|
||||
}
|
||||
return None
|
||||
178
api/chalicelib/core/webhook.py
Normal file
|
|
@ -0,0 +1,178 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
import requests
|
||||
|
||||
|
||||
def get_by_id(webhook_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT
|
||||
w.*
|
||||
FROM public.webhooks AS w
|
||||
where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
|
||||
{"webhook_id": webhook_id})
|
||||
)
|
||||
w = helper.dict_to_camel_case(cur.fetchone())
|
||||
if w:
|
||||
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
|
||||
return w
|
||||
|
||||
|
||||
def get(tenant_id, webhook_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT
|
||||
w.*
|
||||
FROM public.webhooks AS w
|
||||
where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
|
||||
{"webhook_id": webhook_id})
|
||||
)
|
||||
w = helper.dict_to_camel_case(cur.fetchone())
|
||||
if w:
|
||||
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
|
||||
return w
|
||||
|
||||
|
||||
def get_by_type(tenant_id, webhook_type):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
SELECT
|
||||
w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
|
||||
FROM public.webhooks AS w
|
||||
WHERE w.type =%(type)s AND deleted_at ISNULL;""",
|
||||
{"type": webhook_type})
|
||||
)
|
||||
webhooks = helper.list_to_camel_case(cur.fetchall())
|
||||
for w in webhooks:
|
||||
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
|
||||
return webhooks
|
||||
|
||||
|
||||
def get_by_tenant(tenant_id, replace_none=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute("""\
|
||||
SELECT
|
||||
w.*
|
||||
FROM public.webhooks AS w
|
||||
WHERE deleted_at ISNULL;"""
|
||||
)
|
||||
all = helper.list_to_camel_case(cur.fetchall())
|
||||
if replace_none:
|
||||
for w in all:
|
||||
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
|
||||
for k in w.keys():
|
||||
if w[k] is None:
|
||||
w[k] = ''
|
||||
else:
|
||||
for w in all:
|
||||
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
|
||||
return all
|
||||
|
||||
|
||||
def update(tenant_id, webhook_id, changes, replace_none=False):
|
||||
allow_update = ["name", "index", "authHeader", "endpoint"]
|
||||
with pg_client.PostgresClient() as cur:
|
||||
sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys() if k in allow_update]
|
||||
cur.execute(
|
||||
cur.mogrify(f"""\
|
||||
UPDATE public.webhooks
|
||||
SET {','.join(sub_query)}
|
||||
WHERE webhook_id =%(id)s AND deleted_at ISNULL
|
||||
RETURNING *;""",
|
||||
{"id": webhook_id, **changes})
|
||||
)
|
||||
w = helper.dict_to_camel_case(cur.fetchone())
|
||||
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
|
||||
if replace_none:
|
||||
for k in w.keys():
|
||||
if w[k] is None:
|
||||
w[k] = ''
|
||||
return w
|
||||
|
||||
|
||||
def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", replace_none=False):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
query = cur.mogrify("""\
|
||||
INSERT INTO public.webhooks(endpoint,auth_header,type,name)
|
||||
VALUES (%(endpoint)s, %(auth_header)s, %(type)s,%(name)s)
|
||||
RETURNING *;""",
|
||||
{"endpoint": endpoint, "auth_header": auth_header,
|
||||
"type": webhook_type, "name": name})
|
||||
cur.execute(
|
||||
query
|
||||
)
|
||||
w = helper.dict_to_camel_case(cur.fetchone())
|
||||
w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"])
|
||||
if replace_none:
|
||||
for k in w.keys():
|
||||
if w[k] is None:
|
||||
w[k] = ''
|
||||
return w
|
||||
|
||||
|
||||
def add_edit(tenant_id, data, replace_none=None):
|
||||
if "webhookId" in data:
|
||||
return update(tenant_id=tenant_id, webhook_id=data["webhookId"],
|
||||
changes={"endpoint": data["endpoint"],
|
||||
"authHeader": None if "authHeader" not in data else data["authHeader"],
|
||||
"name": data["name"] if "name" in data else ""}, replace_none=replace_none)
|
||||
else:
|
||||
return add(tenant_id=tenant_id,
|
||||
endpoint=data["endpoint"],
|
||||
auth_header=None if "authHeader" not in data else data["authHeader"],
|
||||
name=data["name"] if "name" in data else "", replace_none=replace_none)
|
||||
|
||||
|
||||
def delete(tenant_id, webhook_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(
|
||||
cur.mogrify("""\
|
||||
UPDATE public.webhooks
|
||||
SET deleted_at = (now() at time zone 'utc')
|
||||
WHERE webhook_id =%(id)s AND deleted_at ISNULL
|
||||
RETURNING *;""",
|
||||
{"id": webhook_id})
|
||||
)
|
||||
return {"data": {"state": "success"}}
|
||||
|
||||
|
||||
def trigger_batch(data_list):
|
||||
webhooks_map = {}
|
||||
for w in data_list:
|
||||
if w["destination"] not in webhooks_map:
|
||||
webhooks_map[w["destination"]] = get_by_id(webhook_id=w["destination"])
|
||||
__trigger(hook=webhooks_map[w["destination"]], data=w["data"])
|
||||
|
||||
|
||||
def __trigger(hook, data):
|
||||
if hook["type"] == 'webhook':
|
||||
headers = {}
|
||||
if hook["authHeader"] is not None and len(hook["authHeader"]) > 0:
|
||||
headers = {"Authorization": hook["authHeader"]}
|
||||
|
||||
# body = {
|
||||
# "webhookId": hook["id"],
|
||||
# "createdAt": TimeUTC.now(),
|
||||
# "event": event,
|
||||
# "data": data
|
||||
# }
|
||||
|
||||
r = requests.post(url=hook["endpoint"], json=data, headers=headers)
|
||||
if r.status_code != 200:
|
||||
print("=======> webhook: something went wrong")
|
||||
print(r)
|
||||
print(r.status_code)
|
||||
print(r.text)
|
||||
return
|
||||
response = None
|
||||
try:
|
||||
response = r.json()
|
||||
except:
|
||||
try:
|
||||
response = r.text
|
||||
except:
|
||||
print("no response found")
|
||||
return response
|
||||
233
api/chalicelib/core/weekly_report.py
Normal file
|
|
@ -0,0 +1,233 @@
|
|||
from chalicelib.utils import pg_client, helper
|
||||
from chalicelib.utils.helper import environ
|
||||
from chalicelib.utils.helper import get_issue_title
|
||||
|
||||
LOWEST_BAR_VALUE = 3
|
||||
|
||||
|
||||
def get_config(user_id):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT users.weekly_report
|
||||
FROM public.users
|
||||
WHERE users.deleted_at ISNULL AND users.user_id=%(user_id)s
|
||||
LIMIT 1;""", {"user_id": user_id}))
|
||||
result = cur.fetchone()
|
||||
return helper.dict_to_camel_case(result)
|
||||
|
||||
|
||||
def edit_config(user_id, weekly_report):
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute(cur.mogrify("""\
|
||||
UPDATE public.users
|
||||
SET weekly_report= %(weekly_report)s
|
||||
WHERE users.deleted_at ISNULL
|
||||
AND users.user_id=%(user_id)s
|
||||
RETURNING weekly_report;""", {"user_id": user_id, "weekly_report": weekly_report}))
|
||||
result = cur.fetchone()
|
||||
return helper.dict_to_camel_case(result)
|
||||
|
||||
|
||||
def cron():
|
||||
with pg_client.PostgresClient() as cur:
|
||||
cur.execute("""\
|
||||
SELECT project_id,
|
||||
name AS project_name,
|
||||
users.emails AS emails,
|
||||
TO_CHAR(DATE_TRUNC('day', now()) - INTERVAL '1 week', 'Mon. DDth, YYYY') AS period_start,
|
||||
TO_CHAR(DATE_TRUNC('day', now()), 'Mon. DDth, YYYY') AS period_end,
|
||||
COALESCE(week_0_issues.count, 0) AS this_week_issues_count,
|
||||
COALESCE(week_1_issues.count, 0) AS past_week_issues_count,
|
||||
COALESCE(month_1_issues.count, 0) AS past_month_issues_count
|
||||
FROM public.projects
|
||||
INNER JOIN LATERAL (
|
||||
SELECT sessions.project_id
|
||||
FROM public.sessions
|
||||
WHERE sessions.project_id = projects.project_id
|
||||
AND start_ts >= (EXTRACT(EPOCH FROM now() - INTERVAL '3 days') * 1000)::BIGINT
|
||||
LIMIT 1) AS recently_active USING (project_id)
|
||||
INNER JOIN LATERAL (
|
||||
SELECT COALESCE(ARRAY_AGG(email), '{}') AS emails
|
||||
FROM public.users
|
||||
WHERE users.tenant_id = projects.tenant_id
|
||||
AND users.deleted_at ISNULL
|
||||
AND users.weekly_report
|
||||
) AS users ON (TRUE)
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT COUNT(issues.*) AS count
|
||||
FROM events_common.issues
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE sessions.project_id = projects.project_id
|
||||
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
|
||||
) AS week_0_issues ON (TRUE)
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT COUNT(issues.*) AS count
|
||||
FROM events_common.issues
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE sessions.project_id = projects.project_id
|
||||
AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
|
||||
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 week') * 1000)::BIGINT
|
||||
) AS week_1_issues ON (TRUE)
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT COUNT(issues.*) AS count
|
||||
FROM events_common.issues
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE sessions.project_id = projects.project_id
|
||||
AND issues.timestamp <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
|
||||
AND issues.timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 week') * 1000)::BIGINT
|
||||
) AS month_1_issues ON (TRUE)
|
||||
WHERE projects.deleted_at ISNULL;""")
|
||||
projects_data = cur.fetchall()
|
||||
for p in projects_data:
|
||||
print(f"checking {p['project_name']} : {p['project_id']}")
|
||||
if len(p["emails"]) == 0 \
|
||||
or p["this_week_issues_count"] + p["past_week_issues_count"] + p["past_month_issues_count"] == 0:
|
||||
print('ignore')
|
||||
continue
|
||||
print("valid")
|
||||
p["past_week_issues_evolution"] = helper.__decimal_limit(
|
||||
helper.__progress(p["this_week_issues_count"], p["past_week_issues_count"]), 1)
|
||||
p["past_month_issues_evolution"] = helper.__decimal_limit(
|
||||
helper.__progress(p["this_week_issues_count"], p["past_month_issues_count"]), 1)
|
||||
cur.execute(cur.mogrify("""
|
||||
SELECT LEFT(TO_CHAR(timestamp_i, 'Dy'),1) AS day_short,
|
||||
TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM events_common.issues INNER JOIN public.issues USING (issue_id)
|
||||
WHERE project_id = %(project_id)s
|
||||
AND timestamp >= (EXTRACT(EPOCH FROM timestamp_i) * 1000)::BIGINT
|
||||
AND timestamp <= (EXTRACT(EPOCH FROM timestamp_i + INTERVAL '1 day') * 1000)::BIGINT
|
||||
) AS issues_count
|
||||
FROM generate_series(
|
||||
DATE_TRUNC('day', now()) - INTERVAL '7 days',
|
||||
DATE_TRUNC('day', now()) - INTERVAL '1 day',
|
||||
'1 day'::INTERVAL
|
||||
) AS timestamp_i
|
||||
ORDER BY timestamp_i;""", {"project_id": p["project_id"]}))
|
||||
days_partition = cur.fetchall()
|
||||
max_days_partition = max(x['issues_count'] for x in days_partition)
|
||||
for d in days_partition:
|
||||
if max_days_partition <= 0:
|
||||
d["value"] = LOWEST_BAR_VALUE
|
||||
else:
|
||||
d["value"] = d["issues_count"] * 100 / max_days_partition
|
||||
d["value"] = d["value"] if d["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT type, COUNT(*) AS count
|
||||
FROM events_common.issues INNER JOIN public.issues USING (issue_id)
|
||||
WHERE project_id = %(project_id)s
|
||||
AND timestamp >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '7 days') * 1000)::BIGINT
|
||||
GROUP BY type
|
||||
ORDER BY count DESC, type
|
||||
LIMIT 4;""", {"project_id": p["project_id"]}))
|
||||
issues_by_type = cur.fetchall()
|
||||
max_issues_by_type = sum(i["count"] for i in issues_by_type)
|
||||
for i in issues_by_type:
|
||||
i["type"] = get_issue_title(i["type"])
|
||||
if max_issues_by_type <= 0:
|
||||
i["value"] = LOWEST_BAR_VALUE
|
||||
else:
|
||||
i["value"] = i["count"] * 100 / max_issues_by_type
|
||||
cur.execute(cur.mogrify("""\
|
||||
SELECT TO_CHAR(timestamp_i, 'Dy') AS day_short,
|
||||
TO_CHAR(timestamp_i, 'Mon. DD, YYYY') AS day_long,
|
||||
COALESCE((SELECT JSONB_AGG(sub)
|
||||
FROM (
|
||||
SELECT type, COUNT(*) AS count
|
||||
FROM events_common.issues
|
||||
INNER JOIN public.issues USING (issue_id)
|
||||
WHERE project_id = %(project_id)s
|
||||
AND timestamp >= (EXTRACT(EPOCH FROM timestamp_i) * 1000)::BIGINT
|
||||
AND timestamp <= (EXTRACT(EPOCH FROM timestamp_i + INTERVAL '1 day') * 1000)::BIGINT
|
||||
GROUP BY type
|
||||
ORDER BY count
|
||||
) AS sub), '[]'::JSONB) AS partition
|
||||
FROM generate_series(
|
||||
DATE_TRUNC('day', now()) - INTERVAL '7 days',
|
||||
DATE_TRUNC('day', now()) - INTERVAL '1 day',
|
||||
'1 day'::INTERVAL
|
||||
) AS timestamp_i
|
||||
GROUP BY timestamp_i
|
||||
ORDER BY timestamp_i;""", {"project_id": p["project_id"]}))
|
||||
issues_breakdown_by_day = cur.fetchall()
|
||||
for i in issues_breakdown_by_day:
|
||||
i["sum"] = sum(x["count"] for x in i["partition"])
|
||||
for j in i["partition"]:
|
||||
j["type"] = get_issue_title(j["type"])
|
||||
max_days_partition = max(i["sum"] for i in issues_breakdown_by_day)
|
||||
for i in issues_breakdown_by_day:
|
||||
for j in i["partition"]:
|
||||
if max_days_partition <= 0:
|
||||
j["value"] = LOWEST_BAR_VALUE
|
||||
else:
|
||||
j["value"] = j["count"] * 100 / max_days_partition
|
||||
j["value"] = j["value"] if j["value"] > LOWEST_BAR_VALUE else LOWEST_BAR_VALUE
|
||||
cur.execute(cur.mogrify("""
|
||||
SELECT type,
|
||||
COUNT(*) AS issue_count,
|
||||
COUNT(DISTINCT session_id) AS sessions_count,
|
||||
(SELECT COUNT(DISTINCT sessions.session_id)
|
||||
FROM public.sessions
|
||||
INNER JOIN events_common.issues AS sci USING (session_id)
|
||||
INNER JOIN public.issues AS si USING (issue_id)
|
||||
WHERE si.project_id = %(project_id)s
|
||||
AND sessions.project_id = %(project_id)s
|
||||
AND sessions.start_ts <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
|
||||
AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '2 weeks') * 1000)::BIGINT
|
||||
AND si.type = mi.type
|
||||
AND sessions.duration IS NOT NULL
|
||||
) AS last_week_sessions_count,
|
||||
(SELECT COUNT(DISTINCT sci.session_id)
|
||||
FROM public.sessions
|
||||
INNER JOIN events_common.issues AS sci USING (session_id)
|
||||
INNER JOIN public.issues AS si USING (issue_id)
|
||||
WHERE si.project_id = %(project_id)s
|
||||
AND sessions.project_id = %(project_id)s
|
||||
AND sessions.start_ts <= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
|
||||
AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '5 weeks') * 1000)::BIGINT
|
||||
AND si.type = mi.type
|
||||
AND sessions.duration IS NOT NULL
|
||||
) AS last_month_sessions_count
|
||||
FROM events_common.issues
|
||||
INNER JOIN public.issues AS mi USING (issue_id)
|
||||
INNER JOIN public.sessions USING (session_id)
|
||||
WHERE mi.project_id = %(project_id)s AND sessions.project_id = %(project_id)s AND sessions.duration IS NOT NULL
|
||||
AND sessions.start_ts >= (EXTRACT(EPOCH FROM DATE_TRUNC('day', now()) - INTERVAL '1 week') * 1000)::BIGINT
|
||||
GROUP BY type
|
||||
ORDER BY issue_count DESC;""", {"project_id": p["project_id"]}))
|
||||
issues_breakdown_list = cur.fetchall()
|
||||
if len(issues_breakdown_list) > 4:
|
||||
others = {"type": "Others",
|
||||
"sessions_count": sum(i["sessions_count"] for i in issues_breakdown_list[4:]),
|
||||
"issue_count": sum(i["issue_count"] for i in issues_breakdown_list[4:]),
|
||||
"last_week_sessions_count": sum(
|
||||
i["last_week_sessions_count"] for i in issues_breakdown_list[4:]),
|
||||
"last_month_sessions_count": sum(
|
||||
i["last_month_sessions_count"] for i in issues_breakdown_list[4:])}
|
||||
issues_breakdown_list = issues_breakdown_list[:4]
|
||||
issues_breakdown_list.append(others)
|
||||
for i in issues_breakdown_list:
|
||||
i["type"] = get_issue_title(i["type"])
|
||||
i["last_week_sessions_evolution"] = helper.__decimal_limit(
|
||||
helper.__progress(i["sessions_count"], i["last_week_sessions_count"]), 1)
|
||||
i["last_month_sessions_evolution"] = helper.__decimal_limit(
|
||||
helper.__progress(i["sessions_count"], i["last_month_sessions_count"]), 1)
|
||||
i["sessions_count"] = f'{i["sessions_count"]:,}'
|
||||
keep_types = [i["type"] for i in issues_breakdown_list]
|
||||
for i in issues_breakdown_by_day:
|
||||
keep = []
|
||||
for j in i["partition"]:
|
||||
if j["type"] in keep_types:
|
||||
keep.append(j)
|
||||
i["partition"] = keep
|
||||
helper.async_post(environ['email_funnel'] % "weekly_report2",
|
||||
{"email": p.pop("emails"),
|
||||
"data": {
|
||||
**p,
|
||||
"days_partition": days_partition,
|
||||
"issues_by_type": issues_by_type,
|
||||
"issues_breakdown_by_day": issues_breakdown_by_day,
|
||||
"issues_breakdown_list": issues_breakdown_list
|
||||
}
|
||||
})
|
||||
115
api/chalicelib/utils/TimeUTC.py
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
from datetime import datetime, timedelta
|
||||
from calendar import monthrange
|
||||
import pytz
|
||||
|
||||
|
||||
class TimeUTC:
|
||||
MS_MINUTE = 60 * 1000
|
||||
MS_HOUR = MS_MINUTE * 60
|
||||
MS_DAY = MS_HOUR * 24
|
||||
MS_MONTH = MS_DAY * 30
|
||||
MS_MONTH_TRUE = monthrange(datetime.now(pytz.utc).astimezone(pytz.utc).year,
|
||||
datetime.now(pytz.utc).astimezone(pytz.utc).month)[1] * MS_DAY
|
||||
RANGE_VALUE = None
|
||||
|
||||
@staticmethod
|
||||
def midnight(delta_days=0):
|
||||
return int((datetime.now(pytz.utc) + timedelta(delta_days)) \
|
||||
.replace(hour=0, minute=0, second=0, microsecond=0) \
|
||||
.astimezone(pytz.utc).timestamp() * 1000)
|
||||
|
||||
@staticmethod
|
||||
def __now(delta_days=0, delta_minutes=0, delta_seconds=0):
|
||||
return (datetime.now(pytz.utc) + timedelta(days=delta_days, minutes=delta_minutes, seconds=delta_seconds)) \
|
||||
.astimezone(pytz.utc)
|
||||
|
||||
@staticmethod
|
||||
def now(delta_days=0, delta_minutes=0, delta_seconds=0):
|
||||
return int(TimeUTC.__now(delta_days=delta_days, delta_minutes=delta_minutes,
|
||||
delta_seconds=delta_seconds).timestamp() * 1000)
|
||||
|
||||
@staticmethod
|
||||
def month_start(delta_month=0):
|
||||
month = TimeUTC.__now().month + delta_month
|
||||
return int(datetime.now(pytz.utc) \
|
||||
.replace(year=TimeUTC.__now().year + ((-12 + month) // 12 if month % 12 <= 0 else month // 12),
|
||||
month=12 + month % 12 if month % 12 <= 0 else month % 12 if month > 12 else month,
|
||||
day=1,
|
||||
hour=0, minute=0,
|
||||
second=0,
|
||||
microsecond=0) \
|
||||
.astimezone(pytz.utc).timestamp() * 1000)
|
||||
|
||||
@staticmethod
|
||||
def year_start(delta_year=0):
|
||||
return int(datetime.now(pytz.utc) \
|
||||
.replace(year=TimeUTC.__now().year + delta_year, month=1, day=1, hour=0, minute=0, second=0,
|
||||
microsecond=0) \
|
||||
.astimezone(pytz.utc).timestamp() * 1000)
|
||||
|
||||
@staticmethod
|
||||
def custom(year=None, month=None, day=None, hour=None, minute=None):
|
||||
args = locals()
|
||||
return int(datetime.now(pytz.utc) \
|
||||
.replace(**{key: args[key] for key in args if args[key] is not None}, second=0, microsecond=0) \
|
||||
.astimezone(pytz.utc).timestamp() * 1000)
|
||||
|
||||
@staticmethod
|
||||
def future(delta_day, delta_hour, delta_minute, minutes_period=None, start=None):
|
||||
this_time = TimeUTC.__now()
|
||||
if delta_day == -1:
|
||||
if this_time.hour < delta_hour or this_time.hour == delta_hour and this_time.minute < delta_minute:
|
||||
return TimeUTC.custom(hour=delta_hour, minute=delta_minute)
|
||||
|
||||
return TimeUTC.custom(day=TimeUTC.__now(1).day, hour=delta_hour, minute=delta_minute)
|
||||
elif delta_day > -1:
|
||||
if this_time.weekday() < delta_day or this_time.weekday() == delta_day and (
|
||||
this_time.hour < delta_hour or this_time.hour == delta_hour and this_time.minute < delta_minute):
|
||||
return TimeUTC.custom(day=TimeUTC.__now(delta_day - this_time.weekday()).day, hour=delta_hour,
|
||||
minute=delta_minute)
|
||||
|
||||
return TimeUTC.custom(day=TimeUTC.__now(7 + delta_day - this_time.weekday()).day, hour=delta_hour,
|
||||
minute=delta_minute)
|
||||
if start is not None:
|
||||
return start + minutes_period * 60 * 1000
|
||||
|
||||
return TimeUTC.now(delta_minutes=minutes_period)
|
||||
|
||||
@staticmethod
|
||||
def from_ms_timestamp(ts):
|
||||
return datetime.fromtimestamp(ts // 1000, pytz.utc)
|
||||
|
||||
@staticmethod
|
||||
def to_human_readable(ts, fmt='%Y-%m-%d %H:%M:%S UTC'):
|
||||
return datetime.utcfromtimestamp(ts // 1000).strftime(fmt)
|
||||
|
||||
@staticmethod
|
||||
def human_to_timestamp(ts, pattern):
|
||||
return int(datetime.strptime(ts, pattern).timestamp() * 1000)
|
||||
|
||||
@staticmethod
|
||||
def datetime_to_timestamp(date):
|
||||
if date is None:
|
||||
return None
|
||||
return int(datetime.timestamp(date) * 1000)
|
||||
|
||||
@staticmethod
|
||||
def get_start_end_from_range(range_value):
|
||||
range_value = range_value.upper()
|
||||
if TimeUTC.RANGE_VALUE is None:
|
||||
this_instant = TimeUTC.now()
|
||||
TimeUTC.RANGE_VALUE = {
|
||||
"TODAY": {"start": TimeUTC.midnight(), "end": this_instant},
|
||||
"YESTERDAY": {"start": TimeUTC.midnight(delta_days=-1), "end": TimeUTC.midnight()},
|
||||
"LAST_7_DAYS": {"start": TimeUTC.midnight(delta_days=-7), "end": this_instant},
|
||||
"LAST_30_DAYS": {"start": TimeUTC.midnight(delta_days=-30), "end": this_instant},
|
||||
"THIS_MONTH": {"start": TimeUTC.month_start(), "end": this_instant},
|
||||
"LAST_MONTH": {"start": TimeUTC.month_start(delta_month=-1), "end": TimeUTC.month_start()},
|
||||
"THIS_YEAR": {"start": TimeUTC.year_start(), "end": this_instant},
|
||||
"CUSTOM_RANGE": {"start": TimeUTC.midnight(delta_days=-7), "end": this_instant} # Default is 7 days
|
||||
}
|
||||
return TimeUTC.RANGE_VALUE[range_value]["start"], TimeUTC.RANGE_VALUE[range_value]["end"]
|
||||
|
||||
@staticmethod
|
||||
def get_utc_offset():
|
||||
return int((datetime.now(pytz.utc).now() - datetime.now(pytz.utc).replace(tzinfo=None)).total_seconds() * 1000)
|
||||
0
api/chalicelib/utils/__init__.py
Normal file
10
api/chalicelib/utils/args_transformer.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
def int_arg(x):
|
||||
return int(x) if x is not None else int(0)
|
||||
|
||||
|
||||
def float_arg(x):
|
||||
return float(x) if x is not None else float(0)
|
||||
|
||||
|
||||
def string(x):
|
||||
return x
|
||||
24
api/chalicelib/utils/captcha.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
from chalicelib.utils.helper import environ as env
|
||||
import requests
|
||||
from chalicelib.utils import helper
|
||||
|
||||
|
||||
def __get_captcha_config():
|
||||
return env["captcha_server"], env["captcha_key"]
|
||||
|
||||
|
||||
def is_valid(response):
|
||||
if not helper.allow_captcha():
|
||||
print("!! Captcha is disabled")
|
||||
return True
|
||||
url, secret = __get_captcha_config()
|
||||
r = requests.post(url=url, data={"secret": secret, "response": response})
|
||||
if r.status_code != 200:
|
||||
print("something went wrong")
|
||||
print(r)
|
||||
print(r.status_code)
|
||||
print(r.text)
|
||||
return
|
||||
r = r.json()
|
||||
print(r)
|
||||
return r["success"]
|
||||
28
api/chalicelib/utils/dev.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
from functools import wraps
|
||||
from time import time
|
||||
import inspect
|
||||
from chalicelib.utils import helper
|
||||
|
||||
|
||||
def timed(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwds):
|
||||
if not helper.TRACK_TIME:
|
||||
return f(*args, **kwds)
|
||||
start = time()
|
||||
result = f(*args, **kwds)
|
||||
elapsed = time() - start
|
||||
if inspect.stack()[1][3] == "_view_func":
|
||||
print("DEBUG: %s: took %d s to finish" % (f.__name__, elapsed))
|
||||
else:
|
||||
call_stack = [i[3] for i in inspect.stack()[1:] if i[3] != "wrapper"]
|
||||
call_stack = [c for c in call_stack if
|
||||
c not in ['__init__', '__call__', 'finish_request', 'process_request_thread',
|
||||
'handle_request', '_generic_handle', 'handle', '_bootstrap_inner', 'run',
|
||||
'_bootstrap', '_main_rest_api_handler', '_user_handler',
|
||||
'_get_view_function_response', 'wrapped_event', 'handle_one_request',
|
||||
'_global_error_handler', 'asayer_middleware']]
|
||||
print("DEBUG: %s > %s took %d s to finish" % (" > ".join(call_stack), f.__name__, elapsed))
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
90
api/chalicelib/utils/email_handler.py
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
import base64
|
||||
import re
|
||||
from email.header import Header
|
||||
from email.mime.image import MIMEImage
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
from chalicelib.utils import helper, smtp
|
||||
from chalicelib.utils.helper import environ
|
||||
|
||||
|
||||
def __get_subject(subject):
|
||||
return subject if helper.is_production() else f"{helper.get_stage_name()}: {subject}"
|
||||
|
||||
|
||||
def __get_html_from_file(source, formatting_variables):
|
||||
if formatting_variables is None:
|
||||
formatting_variables = {}
|
||||
formatting_variables["frontend_url"] = environ["SITE_URL"]
|
||||
with open(source, "r") as body:
|
||||
BODY_HTML = body.read()
|
||||
if formatting_variables is not None and len(formatting_variables.keys()) > 0:
|
||||
BODY_HTML = re.sub(r"%(?![(])", "%%", BODY_HTML)
|
||||
BODY_HTML = BODY_HTML % {**formatting_variables}
|
||||
return BODY_HTML
|
||||
|
||||
|
||||
def __replace_images(HTML):
|
||||
pattern_holder = re.compile(r'<img[\w\W\n]+?(src="[a-zA-Z0-9.+\/\\-]+")')
|
||||
pattern_src = re.compile(r'src="(.*?)"')
|
||||
mime_img = []
|
||||
swap = []
|
||||
for m in re.finditer(pattern_holder, HTML):
|
||||
sub = m.groups()[0]
|
||||
sub = str(re.findall(pattern_src, sub)[0])
|
||||
if sub not in swap:
|
||||
swap.append(sub)
|
||||
HTML = HTML.replace(sub, f"cid:img-{len(mime_img)}")
|
||||
sub = "chalicelib/utils/html/" + sub
|
||||
with open(sub, "rb") as image_file:
|
||||
img = base64.b64encode(image_file.read()).decode('utf-8')
|
||||
mime_img.append(MIMEImage(base64.standard_b64decode(img)))
|
||||
mime_img[-1].add_header('Content-ID', f'<img-{len(mime_img) - 1}>')
|
||||
return HTML, mime_img
|
||||
|
||||
|
||||
def send_html(BODY_HTML, SUBJECT, recipient, bcc=None):
|
||||
BODY_HTML, mime_img = __replace_images(BODY_HTML)
|
||||
if not isinstance(recipient, list):
|
||||
recipient = [recipient]
|
||||
msg = MIMEMultipart()
|
||||
msg['Subject'] = Header(__get_subject(SUBJECT), 'utf-8')
|
||||
msg['From'] = environ["EMAIL_FROM"]
|
||||
msg['To'] = ""
|
||||
body = MIMEText(BODY_HTML.encode('utf-8'), 'html', "utf-8")
|
||||
msg.attach(body)
|
||||
for m in mime_img:
|
||||
msg.attach(m)
|
||||
|
||||
with smtp.SMTPClient() as s:
|
||||
for r in recipient:
|
||||
msg.replace_header("To", r)
|
||||
r = [r]
|
||||
if bcc is not None and len(bcc) > 0:
|
||||
r += [bcc]
|
||||
try:
|
||||
print(f"Email sending to: {r}")
|
||||
s.sendmail(msg['FROM'], r, msg.as_string().encode('ascii'))
|
||||
except Exception as e:
|
||||
print("!!! Email error!")
|
||||
print(e)
|
||||
|
||||
|
||||
def send_text(recipients, text, subject):
|
||||
with smtp.SMTPClient() as s:
|
||||
msg = MIMEMultipart()
|
||||
msg['Subject'] = Header(__get_subject(subject), 'utf-8')
|
||||
msg['From'] = environ["EMAIL_FROM"]
|
||||
msg['To'] = ", ".join(recipients)
|
||||
body = MIMEText(text)
|
||||
msg.attach(body)
|
||||
try:
|
||||
s.sendmail(msg['FROM'], recipients, msg.as_string().encode('ascii'))
|
||||
except Exception as e:
|
||||
print("!! Text-email failed: " + subject),
|
||||
print(e)
|
||||
|
||||
|
||||
def __escape_text_html(text):
|
||||
return text.replace("@", "<span>@</span>").replace(".", "<span>.</span>").replace("=", "<span>=</span>")
|
||||
119
api/chalicelib/utils/email_helper.py
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
from chalicelib.utils.TimeUTC import TimeUTC
|
||||
from chalicelib.utils.email_handler import __get_html_from_file, send_html, __escape_text_html
|
||||
|
||||
|
||||
def send_team_invitation(recipient, user_name, temp_password, client_id, sender_name):
|
||||
BODY_HTML = __get_html_from_file("chalicelib/utils/html/invitation.html",
|
||||
formatting_variables={"userName": __escape_text_html(user_name),
|
||||
"password": temp_password, "clientId": client_id,
|
||||
"sender": sender_name})
|
||||
SUBJECT = "Welcome to Asayer"
|
||||
send_html(BODY_HTML, SUBJECT, recipient)
|
||||
|
||||
|
||||
def send_reset_code(recipient, reset_code):
|
||||
BODY_HTML = __get_html_from_file("chalicelib/utils/html/reset_password.html",
|
||||
formatting_variables={"code": reset_code})
|
||||
SUBJECT = "Password recovery"
|
||||
send_html(BODY_HTML, SUBJECT, recipient)
|
||||
|
||||
|
||||
def send_assign_session(recipient, message, link):
|
||||
BODY_HTML = __get_html_from_file("chalicelib/utils/html/assignment.html",
|
||||
formatting_variables={"message": message,
|
||||
"now": TimeUTC.to_human_readable(TimeUTC.now()),
|
||||
"link": link})
|
||||
SUBJECT = "assigned session"
|
||||
send_html(BODY_HTML, SUBJECT, recipient)
|
||||
|
||||
|
||||
def alert_email(recipients, subject, data):
|
||||
BODY_HTML = __get_html_from_file("chalicelib/utils/html/alert_notification.html", formatting_variables=data)
|
||||
send_html(BODY_HTML=BODY_HTML, SUBJECT=subject, recipient=recipients)
|
||||
|
||||
|
||||
def __get_color(idx):
|
||||
return "#3EAAAF" if idx == 0 else "#77C3C7" if idx == 1 else "#9ED4D7" if idx == 2 else "#99d59a"
|
||||
|
||||
|
||||
def weekly_report2(recipients, data):
|
||||
data["o_tr_u"] = ""
|
||||
data["o_tr_d"] = ""
|
||||
for d in data["days_partition"]:
|
||||
data[
|
||||
"o_tr_u"] += f"""<td valign="bottom" style="padding:0 5px 0 0;width:14%;font-weight:300;margin:0;text-align:left">
|
||||
<table style="width:100%;font-weight:300;margin-bottom:0;border-collapse:collapse">
|
||||
<tbody>
|
||||
<tr style="font-weight:300">
|
||||
<td height="{d["value"]}px" title="{d["issues_count"]}" style="font-size:0;padding:0;font-weight:300;margin:0;line-height:0;background-color:#C5E5E7;text-align:left"> </td>
|
||||
</tr>
|
||||
</tbody></table>
|
||||
</td>"""
|
||||
data[
|
||||
"o_tr_d"] += f"""<td title="{d["day_long"]}, midnight" style="font-size:10px;color:#333333;padding:3px 5px 0 0;width:14%;font-weight:300;margin:0;text-align:center">{d["day_short"]}</td>"""
|
||||
|
||||
data[
|
||||
"past_week_issues_status"] = f'<img src="img/weekly/arrow-{"increase" if data["past_week_issues_evolution"] > 0 else "decrease"}.png" width="15px" height="10px" style="font-weight:300;vertical-align:middle">'
|
||||
data["week_decision"] = "More" if data["past_week_issues_evolution"] > 0 else "Fewer"
|
||||
data["past_week_issues_evolution"] = abs(data["past_week_issues_evolution"])
|
||||
data[
|
||||
"past_month_issues_status"] = f'<img src="img/weekly/arrow-{"increase" if data["past_month_issues_evolution"] > 0 else "decrease"}.png" width="15px" height="10px" style="font-weight:300;vertical-align:middle">'
|
||||
data["month_decision"] = "More" if data["past_month_issues_evolution"] > 0 else "Fewer"
|
||||
data["past_month_issues_evolution"] = abs(data["past_month_issues_evolution"])
|
||||
data["progress_legend"] = []
|
||||
data["progress_tr"] = ""
|
||||
for idx, i in enumerate(data["issues_by_type"]):
|
||||
color = __get_color(idx)
|
||||
data["progress_legend"].append(
|
||||
f"""<td style="padding:0;font-weight:300;margin:0;text-align:left;">
|
||||
<span style="white-space:nowrap;"><span style="border-radius:50%;font-weight:300;vertical-align:bottom;color:#fff;width:16px;height:16px;margin:0 8px;display:inline-block;background-color:{color}"></span>{i["count"]}</span><span style="font-weight:300;margin-left:5px;margin-right:0px;white-space:nowrap;">{i["type"]}</span>
|
||||
</td>""")
|
||||
data[
|
||||
"progress_tr"] += f'<td width="{i["value"]}%" title="{i["count"]} {i["type"]}" style="padding:0;font-weight:300;margin:0;background-color:{color};text-align:left"> </td>'
|
||||
|
||||
data["progress_legend"] = '<tr style="font-weight:300;font-size:13px;">' + "".join(
|
||||
data["progress_legend"]) + "</tr>"
|
||||
data["breakdown_list"] = ""
|
||||
color_breakdown = {}
|
||||
data["breakdown_list_other"] = ""
|
||||
for idx, i in enumerate(data["issues_breakdown_list"]):
|
||||
if idx < len(data["issues_breakdown_list"]) - 1 or i["type"].lower() != "others":
|
||||
color = __get_color(idx)
|
||||
color_breakdown[i["type"]] = color
|
||||
data["breakdown_list"] += f"""<tr style="font-weight:300">
|
||||
<td style="font-size:14px;padding:5px 0;font-weight:300;margin:0;text-align:left;white-space:nowrap;"><span style="vertical-align: middle;border-radius:50%;width:1em;font-weight:300;display:inline-block;background-color:{color};height:1em"></span> {i["type"]}</td>
|
||||
<td style="font-size:14px;padding:5px 0;font-weight:300;margin:0;text-align:left"><a href="%(frontend_url)s" style="color:#394EFF;font-weight:300;text-decoration:none" target="_blank" data-saferedirecturl="#">{i["sessions_count"]}</a></td>
|
||||
<td style="font-size:14px;padding:5px 0;font-weight:300;margin:0;text-align:left"><img src="img/weekly/arrow-{"increase" if i["last_week_sessions_evolution"] > 0 else "decrease"}.png" width="10px" height="7px" style="font-weight:300;vertical-align:middle;margin-right: 3px;"> {abs(i["last_week_sessions_evolution"])}%</td>
|
||||
<td style="font-size:14px;padding:5px 0;font-weight:300;margin:0;text-align:left"><img src="img/weekly/arrow-{"increase" if i["last_month_sessions_evolution"] > 0 else "decrease"}.png" width="10px" height="7px" style="font-weight:300;vertical-align:middle;margin-right: 3px;"> {abs(i["last_month_sessions_evolution"])}%</td>
|
||||
</tr>"""
|
||||
else:
|
||||
data["breakdown_list_other"] = f"""<tfoot style="font-weight:300">
|
||||
<tr style="font-weight:300">
|
||||
<td style="font-size:14px;padding:5px 0;font-weight:300;margin:0;text-align:left;white-space:nowrap;"><span style="vertical-align: middle;border-radius:50%;width:1em;font-weight:300;display:inline-block;background-color:#999999;height:1em"></span> {i["type"]}</td>
|
||||
<td style="font-size:14px;padding:5px 0;font-weight:300;margin:0;text-align:left"><a href="%(frontend_url)s" style="color:#394EFF;font-weight:300;text-decoration:none" target="_blank" data-saferedirecturl="#">{i["sessions_count"]}</a></td>
|
||||
<td style="font-size:14px;padding:5px 0;font-weight:300;margin:0;text-align:left"><img src="img/weekly/arrow-{"increase" if i["last_week_sessions_evolution"] > 0 else "decrease"}.png" width="10px" height="7px" style="font-weight:300;vertical-align:middle;margin-right: 3px;"> {abs(i["last_week_sessions_evolution"])}%</td>
|
||||
<td style="font-size:14px;padding:5px 0;font-weight:300;margin:0;text-align:left"><img src="img/weekly/arrow-{"increase" if i["last_month_sessions_evolution"] > 0 else "decrease"}.png" width="10px" height="7px" style="font-weight:300;vertical-align:middle;margin-right: 3px;"> {abs(i["last_month_sessions_evolution"])}%</td>
|
||||
</tr>
|
||||
</tfoot>"""
|
||||
data["b_tr_u"] = ""
|
||||
data["b_tr_d"] = ""
|
||||
for i in data["issues_breakdown_by_day"]:
|
||||
data[
|
||||
"b_tr_d"] += f"""<td title="{i["day_long"]}" style="font-size:14px;color:#333333;padding:10px 0 0;width:14%;border-right:10px solid #fff;font-weight:300;margin:0;text-align:center">
|
||||
{i["day_short"]}
|
||||
</td>"""
|
||||
if len(i["partition"]) > 0:
|
||||
sup_partition = ""
|
||||
for j in i["partition"]:
|
||||
sup_partition += f'<tr style="font-weight:300"><td height="{j["value"]}" title="{j["count"]} {j["type"]}" style="font-size:0;padding:0;border-right:none;font-weight:300;margin:0;line-height:0;background-color:{color_breakdown[j["type"]]};text-align:left"></td></tr>'
|
||||
else:
|
||||
sup_partition = '<tr style="font-weight:300"><td height="3" style="font-size:0;padding:0;border-right:none;font-weight:300;margin:0;line-height:0;background-color:#999999;text-align:left"></td></tr>'
|
||||
data[
|
||||
"b_tr_u"] += f"""<td valign="bottom" style="font-size:0;font-weight:300;padding:0;width:14%;border-right:10px solid #fff;height:110px;margin:0;text-align:left">
|
||||
<table style="width:100%;font-weight:300;margin-bottom:0;border-collapse:collapse">
|
||||
<tbody>{sup_partition}</tbody>
|
||||
</table>
|
||||
</td>"""
|
||||
BODY_HTML = __get_html_from_file("chalicelib/utils/html/Project-Weekly-Report.html", formatting_variables=data)
|
||||
SUBJECT = "Asayer Project Weekly Report"
|
||||
send_html(BODY_HTML=BODY_HTML, SUBJECT=SUBJECT, recipient=recipients)
|
||||
15
api/chalicelib/utils/event_filter_definition.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
class Event:
|
||||
def __init__(self, ui_type, table, column):
|
||||
self.ui_type = ui_type
|
||||
self.table = table
|
||||
self.column = column
|
||||
|
||||
|
||||
class SupportedFilter:
|
||||
def __init__(self, get, query, value_limit, starts_with, starts_limit, ignore_if_starts_with):
|
||||
self.get = get
|
||||
self.query = query
|
||||
self.valueLimit = value_limit
|
||||
self.startsWith = starts_with
|
||||
self.startsLimit = starts_limit
|
||||
self.ignoreIfStartsWith = ignore_if_starts_with
|
||||
133
api/chalicelib/utils/github_client_v3.py
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
import requests
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class github_formatters:
|
||||
|
||||
@staticmethod
|
||||
def get_timestamp(ts):
|
||||
ts = ts[:-1]
|
||||
pattern = '%Y-%m-%dT%H:%M:%S'
|
||||
creation = datetime.strptime(ts, pattern)
|
||||
return int(creation.timestamp() * 1000)
|
||||
|
||||
@staticmethod
|
||||
def label(label):
|
||||
return {
|
||||
'id': label["id"],
|
||||
'name': label["name"],
|
||||
'description': label["description"],
|
||||
'color': label["color"]
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def comment(comment):
|
||||
return {
|
||||
'id': str(comment["id"]),
|
||||
'message': comment["body"],
|
||||
'author': str(github_formatters.user(comment["user"])["id"]),
|
||||
'createdAt': github_formatters.get_timestamp(comment["created_at"])
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def issue(issue):
|
||||
labels = [github_formatters.label(l) for l in issue["labels"]]
|
||||
result = {
|
||||
'id': str(issue["number"]),
|
||||
'creator': str(github_formatters.user(issue["user"])["id"]),
|
||||
'assignees': [str(github_formatters.user(a)["id"]) for a in issue["assignees"]],
|
||||
'title': issue["title"],
|
||||
'description': issue["body"],
|
||||
'status': issue["state"],
|
||||
'createdAt': github_formatters.get_timestamp(issue["created_at"]),
|
||||
'closed': issue["closed_at"] is not None,
|
||||
'commentsCount': issue["comments"],
|
||||
'issueType': [str(l["id"]) for l in labels if l["name"].lower() != "asayer"],
|
||||
'labels': [l["name"] for l in labels]
|
||||
}
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def user(user):
|
||||
if not user:
|
||||
return None
|
||||
result = {
|
||||
'id': user["id"],
|
||||
'name': user["login"],
|
||||
'avatarUrls': {'24x24': user["avatar_url"]},
|
||||
'email': ""
|
||||
}
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def team_to_dict(team):
|
||||
if not team:
|
||||
return None
|
||||
|
||||
result = {'id': team.id, 'name': team.name, 'members_count': team.members_count}
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def repo(repo):
|
||||
if not repo:
|
||||
return None
|
||||
return {
|
||||
"id": str(repo["id"]),
|
||||
"name": repo["name"],
|
||||
"description": repo["description"],
|
||||
"creator": str(repo["owner"]["id"])
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def organization(org):
|
||||
if not org:
|
||||
return None
|
||||
return {
|
||||
"id": org["id"],
|
||||
"name": org["login"],
|
||||
"description": org["description"],
|
||||
"avatarUrls": {'24x42': org["avatar_url"]}
|
||||
}
|
||||
|
||||
|
||||
def get_response_links(response):
|
||||
links = {}
|
||||
if "Link" in response.headers:
|
||||
link_headers = response.headers["Link"].split(", ")
|
||||
for link_header in link_headers:
|
||||
(url, rel) = link_header.split("; ")
|
||||
url = url[1:-1]
|
||||
rel = rel[5:-1]
|
||||
links[rel] = url
|
||||
return links
|
||||
|
||||
|
||||
class githubV3Request:
|
||||
__base = "https://api.github.com"
|
||||
|
||||
def __init__(self, token):
|
||||
self.__token = token
|
||||
|
||||
def __get_request_header(self):
|
||||
return {"Accept": "application/vnd.github.v3+json", 'Authorization': f'token {self.__token}'}
|
||||
|
||||
def get(self, url, params={}):
|
||||
results = []
|
||||
params = {"per_page": 100, **params}
|
||||
pages = {"next": f"{self.__base}{url}", "last": ""}
|
||||
while len(pages.keys()) > 0 and pages["next"] != pages["last"]:
|
||||
response = requests.get(pages["next"], headers=self.__get_request_header(), params=params)
|
||||
pages = get_response_links(response)
|
||||
result = response.json()
|
||||
if response.status_code != 200:
|
||||
print("!-------- error")
|
||||
print(result)
|
||||
raise Exception(result["message"])
|
||||
if isinstance(result, dict):
|
||||
return result
|
||||
results += result
|
||||
return results
|
||||
|
||||
def post(self, url, body):
|
||||
response = requests.post(f"{self.__base}{url}", headers=self.__get_request_header(), json=body)
|
||||
return response.json()
|
||||
380
api/chalicelib/utils/helper.py
Normal file
|
|
@ -0,0 +1,380 @@
|
|||
import math
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
|
||||
import requests
|
||||
|
||||
local_prefix = 'local-'
|
||||
from os import environ, path
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def get_version_number():
|
||||
return environ["version"]
|
||||
|
||||
|
||||
def get_stage_name():
|
||||
stage = environ["stage"]
|
||||
return stage[len(local_prefix):] if stage.startswith(local_prefix) else stage
|
||||
|
||||
|
||||
def is_production():
|
||||
return get_stage_name() == "production"
|
||||
|
||||
|
||||
def is_staging():
|
||||
return get_stage_name() == "staging"
|
||||
|
||||
|
||||
def is_onprem():
|
||||
return not is_production() and not is_staging()
|
||||
|
||||
|
||||
def is_local():
|
||||
return environ["stage"].startswith(local_prefix)
|
||||
|
||||
|
||||
def generate_salt():
|
||||
return "".join(random.choices(string.hexdigits, k=36))
|
||||
|
||||
|
||||
def remove_empty_none_values(dictionary):
|
||||
aux = {}
|
||||
for key in dictionary.keys():
|
||||
if dictionary[key] is not None:
|
||||
if isinstance(dictionary[key], dict):
|
||||
aux[key] = remove_empty_none_values(dictionary[key])
|
||||
elif not isinstance(dictionary[key], str) or len(dictionary[key]) > 0:
|
||||
aux[key] = dictionary[key]
|
||||
return aux
|
||||
|
||||
|
||||
def unique_ordered_list(array):
|
||||
uniq = []
|
||||
[uniq.append(x) for x in array if x not in uniq]
|
||||
return uniq
|
||||
|
||||
|
||||
def unique_unordered_list(array):
|
||||
return list(set(array))
|
||||
|
||||
|
||||
def list_to_camel_case(items, flatten=False):
|
||||
for i in range(len(items)):
|
||||
if flatten:
|
||||
items[i] = flatten_nested_dicts(items[i])
|
||||
items[i] = dict_to_camel_case(items[i])
|
||||
|
||||
return items
|
||||
|
||||
|
||||
def dict_to_camel_case(variable, delimiter='_', ignore_keys=[]):
|
||||
if variable is None:
|
||||
return None
|
||||
if isinstance(variable, str):
|
||||
return variable
|
||||
elif isinstance(variable, dict):
|
||||
aux = {}
|
||||
for key in variable.keys():
|
||||
if key in ignore_keys:
|
||||
aux[key] = variable[key]
|
||||
elif isinstance(variable[key], dict):
|
||||
aux[key_to_camel_case(key, delimiter)] = dict_to_camel_case(variable[key])
|
||||
elif isinstance(variable[key], list):
|
||||
aux[key_to_camel_case(key, delimiter)] = list_to_camel_case(variable[key])
|
||||
else:
|
||||
aux[key_to_camel_case(key, delimiter)] = variable[key]
|
||||
return aux
|
||||
else:
|
||||
return variable
|
||||
|
||||
|
||||
def dict_to_CAPITAL_keys(variable):
|
||||
if variable is None:
|
||||
return None
|
||||
if isinstance(variable, str):
|
||||
return variable.upper()
|
||||
elif isinstance(variable, dict):
|
||||
aux = {}
|
||||
for key in variable.keys():
|
||||
if isinstance(variable[key], dict):
|
||||
aux[key.upper()] = dict_to_CAPITAL_keys(variable[key])
|
||||
else:
|
||||
aux[key.upper()] = variable[key]
|
||||
return aux
|
||||
else:
|
||||
return variable
|
||||
|
||||
|
||||
def variable_to_snake_case(variable, delimiter='_', split_number=False):
|
||||
if isinstance(variable, str):
|
||||
return key_to_snake_case(variable, delimiter, split_number)
|
||||
elif isinstance(variable, dict):
|
||||
aux = {}
|
||||
for key in variable.keys():
|
||||
if isinstance(variable[key], dict):
|
||||
aux[key_to_snake_case(key, delimiter, split_number)] = variable_to_snake_case(variable[key], delimiter,
|
||||
split_number)
|
||||
else:
|
||||
aux[key_to_snake_case(key, delimiter, split_number)] = variable[key]
|
||||
return aux
|
||||
else:
|
||||
return variable
|
||||
|
||||
|
||||
def key_to_camel_case(snake_str, delimiter='_'):
|
||||
if snake_str.startswith(delimiter):
|
||||
snake_str = snake_str[1:]
|
||||
components = snake_str.split(delimiter)
|
||||
return components[0] + ''.join(x.title() for x in components[1:])
|
||||
|
||||
|
||||
def key_to_snake_case(name, delimiter='_', split_number=False):
|
||||
s1 = re.sub('(.)([A-Z][a-z]+)', fr'\1{delimiter}\2', name)
|
||||
return re.sub('([a-z])([A-Z0-9])' if split_number else '([a-z0-9])([A-Z])', fr'\1{delimiter}\2', s1).lower()
|
||||
|
||||
|
||||
TRACK_TIME = True
|
||||
|
||||
|
||||
def __sbool_to_bool(value):
|
||||
if value is None or not isinstance(value, str):
|
||||
return False
|
||||
return value.lower() in ["true", "yes", "1"]
|
||||
|
||||
|
||||
def allow_cron():
|
||||
return "allowCron" not in environ or __sbool_to_bool(environ["allowCron"])
|
||||
|
||||
|
||||
def allow_captcha():
|
||||
return environ.get("captcha_server") is not None and environ.get("captcha_key") is not None \
|
||||
and len(environ["captcha_server"]) > 0 and len(environ["captcha_key"]) > 0
|
||||
|
||||
|
||||
def allow_sentry():
|
||||
return "sentry" not in environ or __sbool_to_bool(environ["sentry"])
|
||||
|
||||
|
||||
def async_post(endpoint, data):
|
||||
data["auth"] = environ["async_Token"]
|
||||
try:
|
||||
requests.post(endpoint, timeout=1, json=data)
|
||||
except requests.exceptions.ReadTimeout:
|
||||
pass
|
||||
|
||||
|
||||
def string_to_sql_like(value):
|
||||
value = re.sub(' +', ' ', value)
|
||||
value = value.replace("*", "%")
|
||||
if value.startswith("^"):
|
||||
value = value[1:]
|
||||
elif not value.startswith("%"):
|
||||
value = '%' + value
|
||||
|
||||
if value.endswith("$"):
|
||||
value = value[:-1]
|
||||
elif not value.endswith("%"):
|
||||
value = value + '%'
|
||||
# value = value.replace(" ", "%")
|
||||
return value
|
||||
|
||||
|
||||
def string_to_sql_like_with_op(value, op):
|
||||
if isinstance(value, list) and len(value) > 0:
|
||||
_value = value[0]
|
||||
else:
|
||||
_value = value
|
||||
if _value is None:
|
||||
return _value
|
||||
if op.lower() != 'ilike':
|
||||
return _value.replace("%", "%%")
|
||||
_value = _value.replace("*", "%")
|
||||
if _value.startswith("^"):
|
||||
_value = _value[1:]
|
||||
elif not _value.startswith("%"):
|
||||
_value = '%' + _value
|
||||
|
||||
if _value.endswith("$"):
|
||||
_value = _value[:-1]
|
||||
elif not _value.endswith("%"):
|
||||
_value = _value + '%'
|
||||
return _value.replace("%", "%%")
|
||||
|
||||
|
||||
def is_valid_email(email):
|
||||
return re.match(r"[^@]+@[^@]+\.[^@]+", email) is not None
|
||||
|
||||
|
||||
def is_valid_http_url(url):
|
||||
regex = re.compile(
|
||||
r'^(?:http|ftp)s?://' # http:// or https://
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
|
||||
r'localhost|' # localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||
r'(?::\d+)?' # optional port
|
||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||
|
||||
return re.match(regex, url) is not None
|
||||
|
||||
|
||||
def is_valid_url(url):
|
||||
regex = re.compile(
|
||||
# r'^(?:http|ftp)s?://' # http:// or https://
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
|
||||
r'localhost|' # localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||
r'(?::\d+)?' # optional port
|
||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||
|
||||
return re.match(regex, url) is not None
|
||||
|
||||
|
||||
def is_alphabet_space(word):
|
||||
r = re.compile("^[a-zA-Z ]*$")
|
||||
return r.match(word) is not None
|
||||
|
||||
|
||||
def is_alphabet_latin_space(word):
|
||||
r = re.compile("^[a-zA-Z\u00C0-\u00D6\u00D8-\u00f6\u00f8-\u00ff\s ]*$")
|
||||
return r.match(word) is not None
|
||||
|
||||
|
||||
def is_alphabet_space_dash(word):
|
||||
r = re.compile("^[a-zA-Z -]*$")
|
||||
return r.match(word) is not None
|
||||
|
||||
|
||||
def is_alphanumeric_space(word):
|
||||
r = re.compile("^[a-zA-Z0-9._\- ]*$")
|
||||
return r.match(word) is not None
|
||||
|
||||
|
||||
def merge_lists_by_key(l1, l2, key):
|
||||
merged = {}
|
||||
for item in l1 + l2:
|
||||
if item[key] in merged:
|
||||
merged[item[key]].update(item)
|
||||
else:
|
||||
merged[item[key]] = item
|
||||
return [val for (_, val) in merged.items()]
|
||||
|
||||
|
||||
def flatten_nested_dicts(obj):
|
||||
if obj is None:
|
||||
return None
|
||||
result = {}
|
||||
for key in obj.keys():
|
||||
if isinstance(obj[key], dict):
|
||||
result = {**result, **flatten_nested_dicts(obj[key])}
|
||||
else:
|
||||
result[key] = obj[key]
|
||||
return result
|
||||
|
||||
|
||||
def delete_keys_from_dict(d, to_delete):
|
||||
if isinstance(to_delete, str):
|
||||
to_delete = [to_delete]
|
||||
if isinstance(d, dict):
|
||||
for single_to_delete in set(to_delete):
|
||||
if single_to_delete in d:
|
||||
del d[single_to_delete]
|
||||
for k, v in d.items():
|
||||
delete_keys_from_dict(v, to_delete)
|
||||
elif isinstance(d, list):
|
||||
for i in d:
|
||||
delete_keys_from_dict(i, to_delete)
|
||||
return d
|
||||
|
||||
|
||||
def explode_widget(data, key=None):
|
||||
result = []
|
||||
for k in data.keys():
|
||||
if k.endswith("Progress") or k == "chart":
|
||||
continue
|
||||
result.append({"key": key_to_snake_case(k) if key is None else key, "data": {"value": data[k]}})
|
||||
if k + "Progress" in data:
|
||||
result[-1]["data"]["progress"] = data[k + "Progress"]
|
||||
if "chart" in data:
|
||||
result[-1]["data"]["chart"] = []
|
||||
for c in data["chart"]:
|
||||
result[-1]["data"]["chart"].append({"timestamp": c["timestamp"], "value": c[k]})
|
||||
return result
|
||||
|
||||
|
||||
TEMP_PATH = "./" if is_local() else "/tmp/"
|
||||
|
||||
|
||||
def get_issue_title(issue_type):
|
||||
return {'click_rage': "Click Rage",
|
||||
'dead_click': "Dead Click",
|
||||
'excessive_scrolling': "Excessive Scrolling",
|
||||
'bad_request': "Bad Request",
|
||||
'missing_resource': "Missing Image",
|
||||
'memory': "High Memory Usage",
|
||||
'cpu': "High CPU",
|
||||
'slow_resource': "Slow Resource",
|
||||
'slow_page_load': "Slow Page",
|
||||
'crash': "Crash",
|
||||
'ml_cpu': "High CPU",
|
||||
'ml_memory': "High Memory Usage",
|
||||
'ml_dead_click': "Dead Click",
|
||||
'ml_click_rage': "Click Rage",
|
||||
'ml_mouse_thrashing': "Mouse Thrashing",
|
||||
'ml_excessive_scrolling': "Excessive Scrolling",
|
||||
'ml_slow_resources': "Slow Resource",
|
||||
'custom': "Custom Event",
|
||||
'js_exception': "Error",
|
||||
'custom_event_error': "Custom Error",
|
||||
'js_error': "Error"}.get(issue_type, issue_type)
|
||||
|
||||
|
||||
def __progress(old_val, new_val):
|
||||
return ((old_val - new_val) / new_val) * 100 if new_val > 0 else 0 if old_val == 0 else 100
|
||||
|
||||
|
||||
def __decimal_limit(value, limit):
|
||||
factor = pow(10, limit)
|
||||
value = math.floor(value * factor)
|
||||
if value % factor == 0:
|
||||
return value // factor
|
||||
return value / factor
|
||||
|
||||
|
||||
def is_free_open_source_edition():
|
||||
return __sbool_to_bool(environ.get("isFOS"))
|
||||
|
||||
|
||||
def is_enterprise_edition():
|
||||
return __sbool_to_bool(environ.get("isEE"))
|
||||
|
||||
|
||||
stag_config_file = f"chalicelib/.configs/{environ['stage']}.json"
|
||||
if not path.isfile(stag_config_file):
|
||||
print("!! stage config file not found, using .chalice/config.json only")
|
||||
else:
|
||||
print("!! stage config file found, merging with priority to .chalice/config.json")
|
||||
with open(stag_config_file) as json_file:
|
||||
config = json.load(json_file)
|
||||
environ = {**config, **environ}
|
||||
|
||||
if (is_free_open_source_edition() or is_enterprise_edition()) and environ.get("config_file"):
|
||||
if not path.isfile(environ.get("config_file")):
|
||||
print("!! config file not found, using default environment")
|
||||
else:
|
||||
with open(environ.get("config_file")) as json_file:
|
||||
config = json.load(json_file)
|
||||
environ = {**environ, **config}
|
||||
|
||||
|
||||
def get_internal_project_id(project_id64):
|
||||
if project_id64 < 0x10000000000000 or project_id64 >= 0x20000000000000:
|
||||
return None
|
||||
|
||||
project_id64 = (project_id64 - 0x10000000000000) * 4212451012670231 & 0xfffffffffffff
|
||||
if project_id64 > 0xffffffff:
|
||||
return None
|
||||
project_id = int(project_id64)
|
||||
return project_id
|
||||
170
api/chalicelib/utils/html/Project-Weekly-Report.html
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
<html>
|
||||
<body style="padding: 1em; background-color: #f6f6f6;">
|
||||
<div style="font-size:16px;color:#2f2936;padding:0;font-family:"Roboto","Helvetica Neue",helvetica,sans-serif;background-color:#f6f6f6">
|
||||
<div style="padding:0;font-size:0;display:none;max-height:0;font-weight:300;line-height:0"></div>
|
||||
<table style="border-radius:4px;font-size:16px;color:#2f2936;border-collapse:separate;border-spacing:0;max-width:700px;font-family:"Roboto","Helvetica Neue",helvetica,sans-serif;border:1px solid #c7d0d4;padding:0;width:100%;font-weight:300;margin:15px auto;background-color:#fff">
|
||||
<tbody><tr style="font-weight:300">
|
||||
<td style="padding:0;font-weight:300;margin:0;text-align:left">
|
||||
<div style="padding:23px 0;font-size:14px;font-weight:300;border-bottom:1px solid #dee7eb">
|
||||
<div style="padding:10px 20px;max-width:600px;font-weight:300;margin:0 auto;text-align:left">
|
||||
|
||||
<table style="width:100%;font-weight:300;margin-bottom:0;border-collapse:collapse">
|
||||
<tbody><tr style="font-weight:300">
|
||||
<td width="125px" style="font-size:14px;padding:0;font-weight:300;margin:0;text-align:left">
|
||||
<a href="%(frontend_url)s" style="color:#394EFF;font-weight:300;text-decoration:none" target="_blank" data-saferedirecturl="" width="125px" height="29px" alt="asayer" style="font-weight:300"><img src="img/weekly/asayer-logo.png" style="width: 160px;"></a>
|
||||
</td>
|
||||
<td style="font-size:14px;padding:0;font-weight:300;margin:0;text-align:right">
|
||||
<table style="width:100%;font-weight:300;margin-bottom:0;border-collapse:collapse">
|
||||
<tbody><tr style="font-weight:300"><td><strong style="font-weight:bold;white-space: nowrap;">%(project_name)s | Weekly Report</strong><br style="font-weight:300"></td></tr>
|
||||
<tr><td><span style="white-space: nowrap;">%(period_start)s - %(period_end)s</span></td></tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody></table>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr style="font-weight:300">
|
||||
<td style="padding:0;font-weight:300;margin:0;text-align:left">
|
||||
|
||||
|
||||
<div style="padding:10px 20px;max-width:600px;font-weight:300;margin:0 auto;text-align:left">
|
||||
|
||||
<div id="m_-641467990864663910m_245367554500624151events-seen" style="font-weight:300">
|
||||
|
||||
<table style="width:100%;font-weight:300;margin-bottom:10px;border-collapse:collapse">
|
||||
<tbody><tr style="font-weight:300">
|
||||
<td style="padding:0;font-weight:300;margin:0;text-align:left">
|
||||
<h4 style="font-size:18px;font-weight:700;margin:20px 0;white-space: nowrap;"><span>287</span> Issues Occured This Week</h4>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody></table>
|
||||
|
||||
<table style="width:100%;font-weight:300;margin-bottom:20px;border-collapse:collapse">
|
||||
<tbody><tr style="font-weight:300">
|
||||
<td width="33%" style="vertical-align:bottom;padding:0 15px 0 0;font-weight:300;margin:0;text-align:right">
|
||||
<table style="width:100%;font-weight:300;border-collapse:collapse;margin-bottom:0;table-layout:fixed">
|
||||
<tbody><tr style="font-weight:300">
|
||||
%(o_tr_u)s
|
||||
</tr>
|
||||
<tr style="font-weight:300">
|
||||
%(o_tr_d)s
|
||||
</tr>
|
||||
</tbody></table>
|
||||
</td>
|
||||
<td width="66%" style="padding:0;font-weight:300;margin:0;text-align:left;">
|
||||
<table width="100%" style="width:100%;font-weight:300;border-collapse:collapse;margin:0">
|
||||
<tbody><tr style="font-weight:300">
|
||||
|
||||
<td width="50%" style="padding:0 0;font-weight:300;margin:0;text-align:right">
|
||||
<div style="font-size:30px;font-weight:300;margin-top:5px;margin-bottom:5px;white-space:nowrap;">%(past_week_issues_status)s %(past_week_issues_evolution)s<span style="font-size: 18px">%</span></div>
|
||||
</td>
|
||||
<td width="50%" style="padding:0;font-weight:300;margin:0;text-align:right">
|
||||
<div style="font-size:30px;font-weight:300;margin-top:5px;margin-bottom:5px;white-space:nowrap;">%(past_month_issues_status)s %(past_month_issues_evolution)s<span style="font-size: 18px">%</span></div>
|
||||
</td>
|
||||
</tr><tr style="font-weight:300">
|
||||
<td width="50%" style="padding:0 0;font-weight:300;margin:0;text-align:right">
|
||||
<small style="color:#333333;font-size:12px;font-weight:300"><span style="white-space:nowrap;"> %(week_decision)s than</span> <span style="white-space:nowrap;">past 7 days</span></small>
|
||||
</td>
|
||||
<td width="50%" style="padding:0;font-weight:300;margin:0;text-align:right">
|
||||
<small style="color:#333333;font-size:12px;font-weight:300"><span style="white-space:nowrap;">%(month_decision)s than</span> <span style="white-space:nowrap;">past 30 days</span></small>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody></table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody></table>
|
||||
|
||||
</div>
|
||||
|
||||
<hr style="background-color: #ddd; border:none; height: 1px;">
|
||||
|
||||
<div id="m_-641467990864663910m_245367554500624151events-by-issue-type" style="font-weight:300">
|
||||
|
||||
|
||||
<table style="width:100%;font-weight:300;margin-bottom:20px;border-collapse:collapse">
|
||||
<tbody><tr style="font-weight:300">
|
||||
<td style="padding:0;font-weight:300;margin:0;text-align:left">
|
||||
<h4 style="font-size:18px;font-weight:700;margin:20px 0;white-space: nowrap;">Issues by Type</h4>
|
||||
</td>
|
||||
</tr>
|
||||
<tr style="font-weight:300">
|
||||
<td style="font-size:14px;padding:0;font-weight:300;margin:0;text-align:right">
|
||||
<table style="width:100%;font-weight:300;margin-bottom:20px;border-collapse:collapse">
|
||||
<tbody>
|
||||
%(progress_legend)s
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
<tr style="font-weight:300">
|
||||
<td style="padding:0;font-weight:300;margin:0;text-align:left">
|
||||
<table style="width:100%;font-weight:300;margin-bottom:20px;border-collapse:collapse">
|
||||
<tbody><tr style="font-weight:300">
|
||||
%(progress_tr)s
|
||||
</tr>
|
||||
</tbody></table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody></table>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
<hr style="background-color: #ddd; border:none; height: 1px;">
|
||||
|
||||
<div style="font-weight:300;margin-bottom:20px">
|
||||
|
||||
|
||||
|
||||
<h4 style="font-size:18px;font-weight:700;margin:20px 0;white-space: nowrap;">Issues Breakdown</h4>
|
||||
|
||||
<table style="width:100%;font-weight:300;table-layout:fixed;margin-bottom:20px;border-collapse:collapse">
|
||||
<tbody><tr style="font-weight:300">
|
||||
%(b_tr_u)s
|
||||
</tr>
|
||||
<tr style="font-weight:300">
|
||||
%(b_tr_d)s
|
||||
</tr>
|
||||
</tbody></table>
|
||||
|
||||
<table style="width:100%;font-weight:300;margin-bottom:20px;border-collapse:collapse">
|
||||
<thead style="font-weight:300">
|
||||
<tr style="font-weight:300">
|
||||
<th style="font-size:12px;color:#999999;padding:5px 0;width:25%;font-weight:500;text-transform:uppercase;text-align:left">Type</th>
|
||||
<th style="font-size:12px;color:#999999;padding:5px 0;width:15%;font-weight:500;text-transform:uppercase;text-align:left">Sessions</th>
|
||||
<th style="font-size:12px;color:#999999;padding:5px 0;width:20%;font-weight:500;text-transform:uppercase;text-align:left">Past 7 Days</th>
|
||||
<th style="font-size:12px;color:#999999;padding:5px 0;width:20%;font-weight:500;text-transform:uppercase;text-align:left">Past 30 Days</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody style="font-weight:300">
|
||||
%(breakdown_list)s
|
||||
</tbody>
|
||||
%(breakdown_list_other)s
|
||||
</table>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div style="padding:10px 20px;max-width:600px;font-weight:300;margin:0 auto;text-align:left">
|
||||
<div style="padding:35px 0;border-top:1px solid #e7ebee;font-weight:300; font-size: 13px;">
|
||||
|
||||
<a href="%(frontend_url)s/%(project_id)s/metrics" style="color:#394EFF;float:right;font-weight:300;text-decoration:none" target="_blank" data-saferedirecturl="#">Asayer Metrics</a>
|
||||
|
||||
<a href="%(frontend_url)s/client/notifications" style="color:#394EFF;font-weight:300;text-decoration:none" target="_blank" data-saferedirecturl="#">Manage Notifications</a>
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody></table>
|
||||
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
60
api/chalicelib/utils/html/alert_notification.html
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body style="margin: 0; padding: 0; font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
|
||||
<table width="100%" border="0" style="background-color: #f6f6f6">
|
||||
<tr>
|
||||
<td>
|
||||
<div style="border-radius: 3px; border-radius:4px; overflow: hidden; background-color: #ffffff; max-width: 600px; margin:20px auto;">
|
||||
<table style="margin:20px auto; border:1px solid transparent; border-collapse:collapse; background-color: #ffffff; max-width:600px"
|
||||
width="100%">
|
||||
<!--Main Image-->
|
||||
<tr>
|
||||
<td style="padding:10px 30px;">
|
||||
<center>
|
||||
<img src="img/asayer-logo.png" alt="Asayer" width="100%" style="max-width: 120px;">
|
||||
</center>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!--Main Title and Overview -->
|
||||
|
||||
<tr>
|
||||
<td style="padding:0 15px;">
|
||||
<h1 style="font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #286f6a">
|
||||
New alert!</h1>
|
||||
<p style="font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
|
||||
%(message)s</p>
|
||||
<p style="font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
|
||||
<a href="%(frontend_url)s/%(project_id)s/metrics">See metrics</a> for more details.</p>
|
||||
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
|
||||
<!--Footer-->
|
||||
<tr>
|
||||
<td style="padding: 0 15px">
|
||||
<div style="border-top:1px dotted rgba(0,0,0,0.2); display: block; margin-top: 20px"></div>
|
||||
<center>
|
||||
<p style="font-size: 12px; font-family: -apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,'Helvetica Neue',sans-serif; color: #6c757d">
|
||||
Sent with ♡ from Asayer © 2021 - All rights reserved.<br><br>
|
||||
<a href="https://asayer.io" target="_blank"
|
||||
style="text-decoration: none; color: #6c757d">https://asayer.io/</a>
|
||||
</p>
|
||||
|
||||
</center>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
</table>
|
||||
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
556
api/chalicelib/utils/html/assignment.html
Normal file
|
|
@ -0,0 +1,556 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional //EN"
|
||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:o="urn:schemas-microsoft-com:office:office"
|
||||
xmlns:v="urn:schemas-microsoft-com:vml">
|
||||
<head>
|
||||
<!--[if gte mso 9]>
|
||||
<xml>
|
||||
<o:OfficeDocumentSettings>
|
||||
<o:AllowPNG/>
|
||||
<o:PixelsPerInch>96</o:PixelsPerInch>
|
||||
</o:OfficeDocumentSettings>
|
||||
</xml><![endif]-->
|
||||
<meta content="text/html; charset=utf-8" http-equiv="Content-Type"/>
|
||||
<meta content="width=device-width" name="viewport"/>
|
||||
<!--[if !mso]><!-->
|
||||
<meta content="IE=edge" http-equiv="X-UA-Compatible"/>
|
||||
<!--<![endif]-->
|
||||
<title></title>
|
||||
<!--[if !mso]><!-->
|
||||
<!--<![endif]-->
|
||||
<style type="text/css">
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
table,
|
||||
td,
|
||||
tr {
|
||||
vertical-align: top;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
* {
|
||||
line-height: inherit;
|
||||
}
|
||||
|
||||
a[x-apple-data-detectors=true] {
|
||||
color: inherit !important;
|
||||
text-decoration: none !important;
|
||||
}
|
||||
|
||||
.ie-browser table {
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
[owa] .img-container div,
|
||||
[owa] .img-container button {
|
||||
display: block !important;
|
||||
}
|
||||
|
||||
[owa] .fullwidth button {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
[owa] .block-grid .col {
|
||||
display: table-cell;
|
||||
float: none !important;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid,
|
||||
.ie-browser .num12,
|
||||
[owa] .num12,
|
||||
[owa] .block-grid {
|
||||
width: 500px !important;
|
||||
}
|
||||
|
||||
.ie-browser .mixed-two-up .num4,
|
||||
[owa] .mixed-two-up .num4 {
|
||||
width: 164px !important;
|
||||
}
|
||||
|
||||
.ie-browser .mixed-two-up .num8,
|
||||
[owa] .mixed-two-up .num8 {
|
||||
width: 328px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.two-up .col,
|
||||
[owa] .block-grid.two-up .col {
|
||||
width: 246px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.three-up .col,
|
||||
[owa] .block-grid.three-up .col {
|
||||
width: 246px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.four-up .col [owa] .block-grid.four-up .col {
|
||||
width: 123px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.five-up .col [owa] .block-grid.five-up .col {
|
||||
width: 100px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.six-up .col,
|
||||
[owa] .block-grid.six-up .col {
|
||||
width: 83px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.seven-up .col,
|
||||
[owa] .block-grid.seven-up .col {
|
||||
width: 71px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.eight-up .col,
|
||||
[owa] .block-grid.eight-up .col {
|
||||
width: 62px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.nine-up .col,
|
||||
[owa] .block-grid.nine-up .col {
|
||||
width: 55px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.ten-up .col,
|
||||
[owa] .block-grid.ten-up .col {
|
||||
width: 60px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.eleven-up .col,
|
||||
[owa] .block-grid.eleven-up .col {
|
||||
width: 54px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.twelve-up .col,
|
||||
[owa] .block-grid.twelve-up .col {
|
||||
width: 50px !important;
|
||||
}
|
||||
|
||||
|
||||
|
||||
</style>
|
||||
<style id="media-query" type="text/css">
|
||||
@media only screen and (min-width: 520px) {
|
||||
.block-grid {
|
||||
width: 500px !important;
|
||||
}
|
||||
|
||||
.block-grid .col {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.block-grid .col.num12 {
|
||||
width: 500px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num3 {
|
||||
width: 123px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num4 {
|
||||
width: 164px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num8 {
|
||||
width: 328px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num9 {
|
||||
width: 369px !important;
|
||||
}
|
||||
|
||||
.block-grid.two-up .col {
|
||||
width: 250px !important;
|
||||
}
|
||||
|
||||
.block-grid.three-up .col {
|
||||
width: 166px !important;
|
||||
}
|
||||
|
||||
.block-grid.four-up .col {
|
||||
width: 125px !important;
|
||||
}
|
||||
|
||||
.block-grid.five-up .col {
|
||||
width: 100px !important;
|
||||
}
|
||||
|
||||
.block-grid.six-up .col {
|
||||
width: 83px !important;
|
||||
}
|
||||
|
||||
.block-grid.seven-up .col {
|
||||
width: 71px !important;
|
||||
}
|
||||
|
||||
.block-grid.eight-up .col {
|
||||
width: 62px !important;
|
||||
}
|
||||
|
||||
.block-grid.nine-up .col {
|
||||
width: 55px !important;
|
||||
}
|
||||
|
||||
.block-grid.ten-up .col {
|
||||
width: 50px !important;
|
||||
}
|
||||
|
||||
.block-grid.eleven-up .col {
|
||||
width: 45px !important;
|
||||
}
|
||||
|
||||
.block-grid.twelve-up .col {
|
||||
width: 41px !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 520px) {
|
||||
|
||||
.block-grid,
|
||||
.col {
|
||||
min-width: 320px !important;
|
||||
max-width: 100% !important;
|
||||
display: block !important;
|
||||
}
|
||||
|
||||
.block-grid {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.col {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.col>div {
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
img.fullwidth,
|
||||
img.fullwidthOnMobile {
|
||||
max-width: 100% !important;
|
||||
}
|
||||
|
||||
.no-stack .col {
|
||||
min-width: 0 !important;
|
||||
display: table-cell !important;
|
||||
}
|
||||
|
||||
.no-stack.two-up .col {
|
||||
width: 50% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num4 {
|
||||
width: 33% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num8 {
|
||||
width: 66% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num4 {
|
||||
width: 33% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num3 {
|
||||
width: 25% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num6 {
|
||||
width: 50% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num9 {
|
||||
width: 75% !important;
|
||||
}
|
||||
|
||||
.video-block {
|
||||
max-width: none !important;
|
||||
}
|
||||
|
||||
.mobile_hide {
|
||||
min-height: 0px;
|
||||
max-height: 0px;
|
||||
max-width: 0px;
|
||||
display: none;
|
||||
overflow: hidden;
|
||||
font-size: 0px;
|
||||
}
|
||||
|
||||
.desktop_hide {
|
||||
display: block !important;
|
||||
max-height: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body class="clean-body" style="margin: 0; padding: 0; -webkit-text-size-adjust: 100%; background-color: #FFFFFF;">
|
||||
<style id="media-query-bodytag" type="text/css">
|
||||
@media (max-width: 520px) {
|
||||
.block-grid {
|
||||
min-width: 320px!important;
|
||||
max-width: 100%!important;
|
||||
width: 100%!important;
|
||||
display: block!important;
|
||||
}
|
||||
.col {
|
||||
min-width: 320px!important;
|
||||
max-width: 100%!important;
|
||||
width: 100%!important;
|
||||
display: block!important;
|
||||
}
|
||||
.col > div {
|
||||
margin: 0 auto;
|
||||
}
|
||||
img.fullwidth {
|
||||
max-width: 100%!important;
|
||||
height: auto!important;
|
||||
}
|
||||
img.fullwidthOnMobile {
|
||||
max-width: 100%!important;
|
||||
height: auto!important;
|
||||
}
|
||||
.no-stack .col {
|
||||
min-width: 0!important;
|
||||
display: table-cell!important;
|
||||
}
|
||||
.no-stack.two-up .col {
|
||||
width: 50%!important;
|
||||
}
|
||||
.no-stack.mixed-two-up .col.num4 {
|
||||
width: 33%!important;
|
||||
}
|
||||
.no-stack.mixed-two-up .col.num8 {
|
||||
width: 66%!important;
|
||||
}
|
||||
.no-stack.three-up .col.num4 {
|
||||
width: 33%!important
|
||||
}
|
||||
.no-stack.four-up .col.num3 {
|
||||
width: 25%!important
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
</style>
|
||||
<!--[if IE]>
|
||||
<div class="ie-browser"><![endif]-->
|
||||
<table bgcolor="#FFFFFF" cellpadding="0" cellspacing="0" class="nl-container" role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; min-width: 320px; Margin: 0 auto; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; background-color: #FFFFFF; width: 540px;"
|
||||
valign="top" width="540px">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td style="word-break: break-word; vertical-align: top; border-collapse: collapse;" valign="top">
|
||||
<!--[if (mso)|(IE)]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td align="center" style="background-color:#FFFFFF"><![endif]-->
|
||||
<div style="background-color:transparent;">
|
||||
<div class="block-grid"
|
||||
style="Margin: 0 auto; min-width: 320px; max-width: 500px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;;">
|
||||
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
|
||||
<!--[if (mso)|(IE)]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0"
|
||||
style="background-color:transparent;">
|
||||
<tr>
|
||||
<td align="center">
|
||||
<table cellpadding="0" cellspacing="0" border="0" style="width:500px">
|
||||
<tr class="layout-full-width" style="background-color:transparent"><![endif]-->
|
||||
<!--[if (mso)|(IE)]>
|
||||
<td align="center" width="500"
|
||||
style="background-color:transparent;width:500px; border-top: 0px solid transparent; border-left: 0px solid transparent; border-bottom: 0px solid transparent; border-right: 0px solid transparent;"
|
||||
valign="top">
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 0px; padding-left: 0px; padding-top:5px; padding-bottom:5px;">
|
||||
<![endif]-->
|
||||
<div class="col num12"
|
||||
style="min-width: 320px; max-width: 500px; display: table-cell; vertical-align: top;;">
|
||||
<div style="width:100% !important;">
|
||||
<!--[if (!mso)&(!IE)]><!-->
|
||||
<div style="border-top:0px solid transparent; border-left:0px solid transparent; border-bottom:0px solid transparent; border-right:0px solid transparent; padding-top:5px; padding-bottom:5px; padding-right: 0px; padding-left: 0px;">
|
||||
<!--<![endif]-->
|
||||
<div align="center" class="img-container center fixedwidth"
|
||||
style="padding-right: 0px;padding-left: 0px;">
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr style="line-height:0px">
|
||||
<td style="padding-right: 0px;padding-left: 0px;" align="center">
|
||||
<![endif]-->
|
||||
<img style="width=124px; height=35px;" width="124px" height="35px"
|
||||
src="img/asayer-logo.png"/>
|
||||
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
</div>
|
||||
<div style="font-size:16px;text-align:center;font-family:Arial, 'Helvetica Neue', Helvetica, sans-serif">
|
||||
<div class="our-class">
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:'Helvetica Neue', Helvetica, Arial, sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; color: #555555;">
|
||||
<!-- <p style="font-size: 18px; line-height: 21px; text-align: center; margin: 0;">
|
||||
<span style="font-size: 18px;">Welcome to Asayer!</span>
|
||||
</p>-->
|
||||
<h1 style="text-align: center; margin-top: 30px; line-height: 30px">Assigned session</h1>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="divider"
|
||||
role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td class="divider_inner"
|
||||
style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; border-collapse: collapse;"
|
||||
valign="top">
|
||||
<table align="center" border="0" cellpadding="0" cellspacing="0"
|
||||
class="divider_content" height="0" role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; width: 100%; border-top: 1px solid #EEE; height: 0px;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td height="0"
|
||||
style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; border-collapse: collapse;"
|
||||
valign="top"><span></span></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 16px; text-align: center; margin: 0; margin-bottom: 30px">
|
||||
%(message)s.</p>
|
||||
<p style="font-size: 14px; line-height: 21px; text-align: center; margin: 0; margin-bottom: 10px">
|
||||
<span style="font-size: 18px;"><a href="%(link)s"
|
||||
rel="noopener"
|
||||
style="text-decoration: none; color: #ffffff; background-color: #3eaaaf; border-radius: 3px; padding: 10px 30px;"
|
||||
target="_blank" title="See Session">See Session</a></span><span
|
||||
style="font-size: 18px; line-height: 21px;"></span></p>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="divider"
|
||||
role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td class="divider_inner"
|
||||
style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; border-collapse: collapse;"
|
||||
valign="top">
|
||||
<table align="center" border="0" cellpadding="0" cellspacing="0"
|
||||
class="divider_content" height="0" role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; width: 100%; border-top: 1px solid #EEE; height: 0px;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td height="0"
|
||||
style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; border-collapse: collapse;"
|
||||
valign="top"><span></span></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 16px; text-align: center; margin: 0;">
|
||||
<a href="mailto:support@asayer.io?subject=[User Invite] - Reporting issue"
|
||||
style="text-decoration: underline; color: #009193;"
|
||||
title="support@asayer.io">Report an issue</a> | <a
|
||||
href="https://asayer.io/" rel="noopener"
|
||||
style="text-decoration: underline; color: #009193;" target="_blank">Take
|
||||
a tour</a></p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 13px; text-align: center; margin: 0;">
|
||||
<span style="font-size: 11px;">Replies to this message are undeliverable and will not reach us. Please do not reply.</span>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if (!mso)&(!IE)]><!-->
|
||||
</div>
|
||||
<!--<![endif]-->
|
||||
</div>
|
||||
</div>
|
||||
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
|
||||
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!--[if (IE)]></div><![endif]-->
|
||||
</body>
|
||||
</html>
|
||||
BIN
api/chalicelib/utils/html/img/asayer-logo.png
Normal file
|
After Width: | Height: | Size: 2.6 KiB |
BIN
api/chalicelib/utils/html/img/header.png
Normal file
|
After Width: | Height: | Size: 495 KiB |
BIN
api/chalicelib/utils/html/img/icn-facebook.png
Normal file
|
After Width: | Height: | Size: 1.9 KiB |
BIN
api/chalicelib/utils/html/img/icn-linkedin.png
Normal file
|
After Width: | Height: | Size: 2.1 KiB |
BIN
api/chalicelib/utils/html/img/icn-medium.png
Normal file
|
After Width: | Height: | Size: 3.1 KiB |
BIN
api/chalicelib/utils/html/img/icn-twitter.png
Normal file
|
After Width: | Height: | Size: 2.9 KiB |
BIN
api/chalicelib/utils/html/img/illustrations/img-no-signal.png
Normal file
|
After Width: | Height: | Size: 9.1 KiB |
BIN
api/chalicelib/utils/html/img/illustrations/img-welcome.png
Normal file
|
After Width: | Height: | Size: 10 KiB |
BIN
api/chalicelib/utils/html/img/laptop.png
Normal file
|
After Width: | Height: | Size: 331 KiB |
BIN
api/chalicelib/utils/html/img/weekly/arrow-decrease.png
Normal file
|
After Width: | Height: | Size: 1.3 KiB |
BIN
api/chalicelib/utils/html/img/weekly/arrow-increase.png
Normal file
|
After Width: | Height: | Size: 1 KiB |
BIN
api/chalicelib/utils/html/img/weekly/asayer-logo.png
Normal file
|
After Width: | Height: | Size: 17 KiB |
595
api/chalicelib/utils/html/invitation.html
Normal file
|
|
@ -0,0 +1,595 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional //EN"
|
||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:o="urn:schemas-microsoft-com:office:office"
|
||||
xmlns:v="urn:schemas-microsoft-com:vml">
|
||||
<head>
|
||||
<!--[if gte mso 9]>
|
||||
<xml>
|
||||
<o:OfficeDocumentSettings>
|
||||
<o:AllowPNG/>
|
||||
<o:PixelsPerInch>96</o:PixelsPerInch>
|
||||
</o:OfficeDocumentSettings>
|
||||
</xml><![endif]-->
|
||||
<meta content="text/html; charset=utf-8" http-equiv="Content-Type"/>
|
||||
<meta content="width=device-width" name="viewport"/>
|
||||
<!--[if !mso]><!-->
|
||||
<meta content="IE=edge" http-equiv="X-UA-Compatible"/>
|
||||
<!--<![endif]-->
|
||||
<title></title>
|
||||
<!--[if !mso]><!-->
|
||||
<!--<![endif]-->
|
||||
<style type="text/css">
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
table,
|
||||
td,
|
||||
tr {
|
||||
vertical-align: top;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
* {
|
||||
line-height: inherit;
|
||||
}
|
||||
|
||||
a[x-apple-data-detectors=true] {
|
||||
color: inherit !important;
|
||||
text-decoration: none !important;
|
||||
}
|
||||
|
||||
.ie-browser table {
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
[owa] .img-container div,
|
||||
[owa] .img-container button {
|
||||
display: block !important;
|
||||
}
|
||||
|
||||
[owa] .fullwidth button {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
[owa] .block-grid .col {
|
||||
display: table-cell;
|
||||
float: none !important;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid,
|
||||
.ie-browser .num12,
|
||||
[owa] .num12,
|
||||
[owa] .block-grid {
|
||||
width: 500px !important;
|
||||
}
|
||||
|
||||
.ie-browser .mixed-two-up .num4,
|
||||
[owa] .mixed-two-up .num4 {
|
||||
width: 164px !important;
|
||||
}
|
||||
|
||||
.ie-browser .mixed-two-up .num8,
|
||||
[owa] .mixed-two-up .num8 {
|
||||
width: 328px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.two-up .col,
|
||||
[owa] .block-grid.two-up .col {
|
||||
width: 246px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.three-up .col,
|
||||
[owa] .block-grid.three-up .col {
|
||||
width: 246px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.four-up .col [owa] .block-grid.four-up .col {
|
||||
width: 123px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.five-up .col [owa] .block-grid.five-up .col {
|
||||
width: 100px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.six-up .col,
|
||||
[owa] .block-grid.six-up .col {
|
||||
width: 83px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.seven-up .col,
|
||||
[owa] .block-grid.seven-up .col {
|
||||
width: 71px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.eight-up .col,
|
||||
[owa] .block-grid.eight-up .col {
|
||||
width: 62px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.nine-up .col,
|
||||
[owa] .block-grid.nine-up .col {
|
||||
width: 55px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.ten-up .col,
|
||||
[owa] .block-grid.ten-up .col {
|
||||
width: 60px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.eleven-up .col,
|
||||
[owa] .block-grid.eleven-up .col {
|
||||
width: 54px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.twelve-up .col,
|
||||
[owa] .block-grid.twelve-up .col {
|
||||
width: 50px !important;
|
||||
}
|
||||
|
||||
</style>
|
||||
<style id="media-query" type="text/css">
|
||||
@media only screen and (min-width: 520px) {
|
||||
.block-grid {
|
||||
width: 500px !important;
|
||||
}
|
||||
|
||||
.block-grid .col {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.block-grid .col.num12 {
|
||||
width: 500px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num3 {
|
||||
width: 123px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num4 {
|
||||
width: 164px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num8 {
|
||||
width: 328px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num9 {
|
||||
width: 369px !important;
|
||||
}
|
||||
|
||||
.block-grid.two-up .col {
|
||||
width: 250px !important;
|
||||
}
|
||||
|
||||
.block-grid.three-up .col {
|
||||
width: 166px !important;
|
||||
}
|
||||
|
||||
.block-grid.four-up .col {
|
||||
width: 125px !important;
|
||||
}
|
||||
|
||||
.block-grid.five-up .col {
|
||||
width: 100px !important;
|
||||
}
|
||||
|
||||
.block-grid.six-up .col {
|
||||
width: 83px !important;
|
||||
}
|
||||
|
||||
.block-grid.seven-up .col {
|
||||
width: 71px !important;
|
||||
}
|
||||
|
||||
.block-grid.eight-up .col {
|
||||
width: 62px !important;
|
||||
}
|
||||
|
||||
.block-grid.nine-up .col {
|
||||
width: 55px !important;
|
||||
}
|
||||
|
||||
.block-grid.ten-up .col {
|
||||
width: 50px !important;
|
||||
}
|
||||
|
||||
.block-grid.eleven-up .col {
|
||||
width: 45px !important;
|
||||
}
|
||||
|
||||
.block-grid.twelve-up .col {
|
||||
width: 41px !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 520px) {
|
||||
|
||||
.block-grid,
|
||||
.col {
|
||||
min-width: 320px !important;
|
||||
max-width: 100% !important;
|
||||
display: block !important;
|
||||
}
|
||||
|
||||
.block-grid {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.col {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.col>div {
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
img.fullwidth,
|
||||
img.fullwidthOnMobile {
|
||||
max-width: 100% !important;
|
||||
}
|
||||
|
||||
.no-stack .col {
|
||||
min-width: 0 !important;
|
||||
display: table-cell !important;
|
||||
}
|
||||
|
||||
.no-stack.two-up .col {
|
||||
width: 50% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num4 {
|
||||
width: 33% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num8 {
|
||||
width: 66% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num4 {
|
||||
width: 33% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num3 {
|
||||
width: 25% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num6 {
|
||||
width: 50% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num9 {
|
||||
width: 75% !important;
|
||||
}
|
||||
|
||||
.video-block {
|
||||
max-width: none !important;
|
||||
}
|
||||
|
||||
.mobile_hide {
|
||||
min-height: 0px;
|
||||
max-height: 0px;
|
||||
max-width: 0px;
|
||||
display: none;
|
||||
overflow: hidden;
|
||||
font-size: 0px;
|
||||
}
|
||||
|
||||
.desktop_hide {
|
||||
display: block !important;
|
||||
max-height: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body class="clean-body" style="margin: 0; padding: 0; -webkit-text-size-adjust: 100%; background-color: #FFFFFF;">
|
||||
<style id="media-query-bodytag" type="text/css">
|
||||
@media (max-width: 520px) {
|
||||
.block-grid {
|
||||
min-width: 320px!important;
|
||||
max-width: 100%!important;
|
||||
width: 100%!important;
|
||||
display: block!important;
|
||||
}
|
||||
.col {
|
||||
min-width: 320px!important;
|
||||
max-width: 100%!important;
|
||||
width: 100%!important;
|
||||
display: block!important;
|
||||
}
|
||||
.col > div {
|
||||
margin: 0 auto;
|
||||
}
|
||||
img.fullwidth {
|
||||
max-width: 100%!important;
|
||||
height: auto!important;
|
||||
}
|
||||
img.fullwidthOnMobile {
|
||||
max-width: 100%!important;
|
||||
height: auto!important;
|
||||
}
|
||||
.no-stack .col {
|
||||
min-width: 0!important;
|
||||
display: table-cell!important;
|
||||
}
|
||||
.no-stack.two-up .col {
|
||||
width: 50%!important;
|
||||
}
|
||||
.no-stack.mixed-two-up .col.num4 {
|
||||
width: 33%!important;
|
||||
}
|
||||
.no-stack.mixed-two-up .col.num8 {
|
||||
width: 66%!important;
|
||||
}
|
||||
.no-stack.three-up .col.num4 {
|
||||
width: 33%!important
|
||||
}
|
||||
.no-stack.four-up .col.num3 {
|
||||
width: 25%!important
|
||||
}
|
||||
}
|
||||
|
||||
</style>
|
||||
<!--[if IE]>
|
||||
<div class="ie-browser"><![endif]-->
|
||||
<table bgcolor="#FFFFFF" cellpadding="0" cellspacing="0" class="nl-container" role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; min-width: 320px; Margin: 0 auto; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; background-color: #FFFFFF; width: 540px;"
|
||||
valign="top" width="540px">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td style="word-break: break-word; vertical-align: top; border-collapse: collapse;" valign="top">
|
||||
<!--[if (mso)|(IE)]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td align="center" style="background-color:#FFFFFF"><![endif]-->
|
||||
<div style="background-color:transparent;">
|
||||
<div class="block-grid"
|
||||
style="Margin: 0 auto; min-width: 320px; max-width: 500px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;;">
|
||||
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
|
||||
<!--[if (mso)|(IE)]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0"
|
||||
style="background-color:transparent;">
|
||||
<tr>
|
||||
<td align="center">
|
||||
<table cellpadding="0" cellspacing="0" border="0" style="width:500px">
|
||||
<tr class="layout-full-width" style="background-color:transparent"><![endif]-->
|
||||
<!--[if (mso)|(IE)]>
|
||||
<td align="center" width="500"
|
||||
style="background-color:transparent;width:500px; border-top: 0px solid transparent; border-left: 0px solid transparent; border-bottom: 0px solid transparent; border-right: 0px solid transparent;"
|
||||
valign="top">
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 0px; padding-left: 0px; padding-top:5px; padding-bottom:5px;">
|
||||
<![endif]-->
|
||||
<div class="col num12"
|
||||
style="min-width: 320px; max-width: 500px; display: table-cell; vertical-align: top;;">
|
||||
<div style="width:100% !important;">
|
||||
<!--[if (!mso)&(!IE)]><!-->
|
||||
<div style="border-top:0px solid transparent; border-left:0px solid transparent; border-bottom:0px solid transparent; border-right:0px solid transparent; padding-top:5px; padding-bottom:5px; padding-right: 0px; padding-left: 0px;">
|
||||
<!--<![endif]-->
|
||||
<div align="center" class="img-container center fixedwidth"
|
||||
style="padding-right: 0px;padding-left: 0px;">
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr style="line-height:0px">
|
||||
<td style="padding-right: 0px;padding-left: 0px;" align="center">
|
||||
<![endif]-->
|
||||
<img style="width=124px; height=35px;" width="124px" height="35px" src="img/asayer-logo.png"/>
|
||||
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
</div>
|
||||
<div style="font-size:16px;text-align:center;font-family:Arial, 'Helvetica Neue', Helvetica, sans-serif">
|
||||
<div class="our-class">
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:'Helvetica Neue', Helvetica, Arial, sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; color: #555555;">
|
||||
<p style="font-size: 14px; line-height: 21px; text-align: center; margin: 0;">
|
||||
<span style="font-size: 18px;"><strong>Welcome to Asayer!</strong></span>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 12px; line-height: 16px; text-align: center; margin: 0;">
|
||||
<span style="font-size: 14px;">You have been invited by %(sender)s to join %(clientId)s team on Asayer.</span>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="divider"
|
||||
role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td class="divider_inner"
|
||||
style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; border-collapse: collapse;"
|
||||
valign="top">
|
||||
<table align="center" border="0" cellpadding="0" cellspacing="0"
|
||||
class="divider_content" height="0" role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; width: 100%; border-top: 1px solid #EEE; height: 0px;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td height="0"
|
||||
style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; border-collapse: collapse;"
|
||||
valign="top"><span></span></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 16px; text-align: center; margin: 0;">
|
||||
Please use this link to login:</p>
|
||||
<p style="font-size: 14px; line-height: 21px; text-align: center; margin: 0;">
|
||||
<span style="font-size: 18px;"><a href="%(frontend_url)s"
|
||||
rel="noopener"
|
||||
style="text-decoration: underline; color: #009193;"
|
||||
target="_blank" title="Asayer Login">%(frontend_url)s</a></span><span
|
||||
style="font-size: 18px; line-height: 21px;"></span></p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="divider"
|
||||
role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td class="divider_inner"
|
||||
style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; border-collapse: collapse;"
|
||||
valign="top">
|
||||
<table align="center" border="0" cellpadding="0" cellspacing="0"
|
||||
class="divider_content" height="0" role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; width: 100%; border-top: 1px solid #EEE; height: 0px;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td height="0"
|
||||
style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; border-collapse: collapse;"
|
||||
valign="top"><span></span></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 16px; text-align: center; margin: 0;">
|
||||
Your login credentials</p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 16px; text-align: center; margin: 0;">
|
||||
<strong>Username / Email</strong></p>
|
||||
<p style="font-size: 14px; line-height: 16px; text-align: center; margin: 0;">
|
||||
<span style="text-decoration: none; color: #009193;">%(userName)s</span></p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 16px; text-align: center; margin: 0;">
|
||||
<strong>Password</strong></p>
|
||||
<p style="font-size: 14px; line-height: 16px; text-align: center; margin: 0;">
|
||||
%(password)s</p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="divider"
|
||||
role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td class="divider_inner"
|
||||
style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; border-collapse: collapse;"
|
||||
valign="top">
|
||||
<table align="center" border="0" cellpadding="0" cellspacing="0"
|
||||
class="divider_content" height="0" role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; width: 100%; border-top: 1px solid #eee; height: 0px;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td height="0"
|
||||
style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; border-collapse: collapse;"
|
||||
valign="top"><span></span></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 16px; text-align: center; margin: 0;">
|
||||
<a href="mailto:support@asayer.io?subject=[User Invite] - Reporting issue"
|
||||
style="text-decoration: underline; color: #009193;"
|
||||
title="support@asayer.io">Report an issue</a> | <a
|
||||
href="https://asayer.io/" rel="noopener"
|
||||
style="text-decoration: underline; color: #009193;" target="_blank">Take
|
||||
a tour</a></p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 13px; text-align: center; margin: 0;">
|
||||
<span style="font-size: 11px;">Replies to this message are undeliverable and will not reach us. Please do not reply.</span>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if (!mso)&(!IE)]><!-->
|
||||
</div>
|
||||
<!--<![endif]-->
|
||||
</div>
|
||||
</div>
|
||||
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
|
||||
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!--[if (IE)]></div><![endif]-->
|
||||
</body>
|
||||
</html>
|
||||
545
api/chalicelib/utils/html/reset_password.html
Normal file
|
|
@ -0,0 +1,545 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional //EN"
|
||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:o="urn:schemas-microsoft-com:office:office"
|
||||
xmlns:v="urn:schemas-microsoft-com:vml">
|
||||
<head>
|
||||
<!--[if gte mso 9]>
|
||||
<xml>
|
||||
<o:OfficeDocumentSettings>
|
||||
<o:AllowPNG/>
|
||||
<o:PixelsPerInch>96</o:PixelsPerInch>
|
||||
</o:OfficeDocumentSettings>
|
||||
</xml><![endif]-->
|
||||
<meta content="text/html; charset=utf-8" http-equiv="Content-Type"/>
|
||||
<meta content="width=device-width" name="viewport"/>
|
||||
<!--[if !mso]><!-->
|
||||
<meta content="IE=edge" http-equiv="X-UA-Compatible"/>
|
||||
<!--<![endif]-->
|
||||
<title></title>
|
||||
<!--[if !mso]><!-->
|
||||
<!--<![endif]-->
|
||||
<style type="text/css">
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
table,
|
||||
td,
|
||||
tr {
|
||||
vertical-align: top;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
* {
|
||||
line-height: inherit;
|
||||
}
|
||||
|
||||
a[x-apple-data-detectors=true] {
|
||||
color: inherit !important;
|
||||
text-decoration: none !important;
|
||||
}
|
||||
|
||||
.ie-browser table {
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
[owa] .img-container div,
|
||||
[owa] .img-container button {
|
||||
display: block !important;
|
||||
}
|
||||
|
||||
[owa] .fullwidth button {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
[owa] .block-grid .col {
|
||||
display: table-cell;
|
||||
float: none !important;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid,
|
||||
.ie-browser .num12,
|
||||
[owa] .num12,
|
||||
[owa] .block-grid {
|
||||
width: 500px !important;
|
||||
}
|
||||
|
||||
.ie-browser .mixed-two-up .num4,
|
||||
[owa] .mixed-two-up .num4 {
|
||||
width: 164px !important;
|
||||
}
|
||||
|
||||
.ie-browser .mixed-two-up .num8,
|
||||
[owa] .mixed-two-up .num8 {
|
||||
width: 328px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.two-up .col,
|
||||
[owa] .block-grid.two-up .col {
|
||||
width: 246px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.three-up .col,
|
||||
[owa] .block-grid.three-up .col {
|
||||
width: 246px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.four-up .col [owa] .block-grid.four-up .col {
|
||||
width: 123px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.five-up .col [owa] .block-grid.five-up .col {
|
||||
width: 100px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.six-up .col,
|
||||
[owa] .block-grid.six-up .col {
|
||||
width: 83px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.seven-up .col,
|
||||
[owa] .block-grid.seven-up .col {
|
||||
width: 71px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.eight-up .col,
|
||||
[owa] .block-grid.eight-up .col {
|
||||
width: 62px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.nine-up .col,
|
||||
[owa] .block-grid.nine-up .col {
|
||||
width: 55px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.ten-up .col,
|
||||
[owa] .block-grid.ten-up .col {
|
||||
width: 60px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.eleven-up .col,
|
||||
[owa] .block-grid.eleven-up .col {
|
||||
width: 54px !important;
|
||||
}
|
||||
|
||||
.ie-browser .block-grid.twelve-up .col,
|
||||
[owa] .block-grid.twelve-up .col {
|
||||
width: 50px !important;
|
||||
}
|
||||
|
||||
|
||||
|
||||
</style>
|
||||
<style id="media-query" type="text/css">
|
||||
@media only screen and (min-width: 520px) {
|
||||
.block-grid {
|
||||
width: 500px !important;
|
||||
}
|
||||
|
||||
.block-grid .col {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.block-grid .col.num12 {
|
||||
width: 500px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num3 {
|
||||
width: 123px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num4 {
|
||||
width: 164px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num8 {
|
||||
width: 328px !important;
|
||||
}
|
||||
|
||||
.block-grid.mixed-two-up .col.num9 {
|
||||
width: 369px !important;
|
||||
}
|
||||
|
||||
.block-grid.two-up .col {
|
||||
width: 250px !important;
|
||||
}
|
||||
|
||||
.block-grid.three-up .col {
|
||||
width: 166px !important;
|
||||
}
|
||||
|
||||
.block-grid.four-up .col {
|
||||
width: 125px !important;
|
||||
}
|
||||
|
||||
.block-grid.five-up .col {
|
||||
width: 100px !important;
|
||||
}
|
||||
|
||||
.block-grid.six-up .col {
|
||||
width: 83px !important;
|
||||
}
|
||||
|
||||
.block-grid.seven-up .col {
|
||||
width: 71px !important;
|
||||
}
|
||||
|
||||
.block-grid.eight-up .col {
|
||||
width: 62px !important;
|
||||
}
|
||||
|
||||
.block-grid.nine-up .col {
|
||||
width: 55px !important;
|
||||
}
|
||||
|
||||
.block-grid.ten-up .col {
|
||||
width: 50px !important;
|
||||
}
|
||||
|
||||
.block-grid.eleven-up .col {
|
||||
width: 45px !important;
|
||||
}
|
||||
|
||||
.block-grid.twelve-up .col {
|
||||
width: 41px !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 520px) {
|
||||
|
||||
.block-grid,
|
||||
.col {
|
||||
min-width: 320px !important;
|
||||
max-width: 100% !important;
|
||||
display: block !important;
|
||||
}
|
||||
|
||||
.block-grid {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.col {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.col>div {
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
img.fullwidth,
|
||||
img.fullwidthOnMobile {
|
||||
max-width: 100% !important;
|
||||
}
|
||||
|
||||
.no-stack .col {
|
||||
min-width: 0 !important;
|
||||
display: table-cell !important;
|
||||
}
|
||||
|
||||
.no-stack.two-up .col {
|
||||
width: 50% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num4 {
|
||||
width: 33% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num8 {
|
||||
width: 66% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num4 {
|
||||
width: 33% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num3 {
|
||||
width: 25% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num6 {
|
||||
width: 50% !important;
|
||||
}
|
||||
|
||||
.no-stack .col.num9 {
|
||||
width: 75% !important;
|
||||
}
|
||||
|
||||
.video-block {
|
||||
max-width: none !important;
|
||||
}
|
||||
|
||||
.mobile_hide {
|
||||
min-height: 0px;
|
||||
max-height: 0px;
|
||||
max-width: 0px;
|
||||
display: none;
|
||||
overflow: hidden;
|
||||
font-size: 0px;
|
||||
}
|
||||
|
||||
.desktop_hide {
|
||||
display: block !important;
|
||||
max-height: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body class="clean-body" style="margin: 0; padding: 0; -webkit-text-size-adjust: 100%; background-color: #FFFFFF;">
|
||||
<style id="media-query-bodytag" type="text/css">
|
||||
@media (max-width: 520px) {
|
||||
.block-grid {
|
||||
min-width: 320px!important;
|
||||
max-width: 100%!important;
|
||||
width: 100%!important;
|
||||
display: block!important;
|
||||
}
|
||||
.col {
|
||||
min-width: 320px!important;
|
||||
max-width: 100%!important;
|
||||
width: 100%!important;
|
||||
display: block!important;
|
||||
}
|
||||
.col > div {
|
||||
margin: 0 auto;
|
||||
}
|
||||
img.fullwidth {
|
||||
max-width: 100%!important;
|
||||
height: auto!important;
|
||||
}
|
||||
img.fullwidthOnMobile {
|
||||
max-width: 100%!important;
|
||||
height: auto!important;
|
||||
}
|
||||
.no-stack .col {
|
||||
min-width: 0!important;
|
||||
display: table-cell!important;
|
||||
}
|
||||
.no-stack.two-up .col {
|
||||
width: 50%!important;
|
||||
}
|
||||
.no-stack.mixed-two-up .col.num4 {
|
||||
width: 33%!important;
|
||||
}
|
||||
.no-stack.mixed-two-up .col.num8 {
|
||||
width: 66%!important;
|
||||
}
|
||||
.no-stack.three-up .col.num4 {
|
||||
width: 33%!important
|
||||
}
|
||||
.no-stack.four-up .col.num3 {
|
||||
width: 25%!important
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
</style>
|
||||
<!--[if IE]>
|
||||
<div class="ie-browser"><![endif]-->
|
||||
<table bgcolor="#FFFFFF" cellpadding="0" cellspacing="0" class="nl-container" role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; min-width: 320px; Margin: 0 auto; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; background-color: #FFFFFF; width: 540px;"
|
||||
valign="top" width="540px">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td style="word-break: break-word; vertical-align: top; border-collapse: collapse;" valign="top">
|
||||
<!--[if (mso)|(IE)]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td align="center" style="background-color:#FFFFFF"><![endif]-->
|
||||
<div style="background-color:transparent;">
|
||||
<div class="block-grid"
|
||||
style="Margin: 0 auto; min-width: 320px; max-width: 500px; overflow-wrap: break-word; word-wrap: break-word; word-break: break-word; background-color: transparent;;">
|
||||
<div style="border-collapse: collapse;display: table;width: 100%;background-color:transparent;">
|
||||
<!--[if (mso)|(IE)]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0"
|
||||
style="background-color:transparent;">
|
||||
<tr>
|
||||
<td align="center">
|
||||
<table cellpadding="0" cellspacing="0" border="0" style="width:500px">
|
||||
<tr class="layout-full-width" style="background-color:transparent"><![endif]-->
|
||||
<!--[if (mso)|(IE)]>
|
||||
<td align="center" width="500"
|
||||
style="background-color:transparent;width:500px; border-top: 0px solid transparent; border-left: 0px solid transparent; border-bottom: 0px solid transparent; border-right: 0px solid transparent;"
|
||||
valign="top">
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 0px; padding-left: 0px; padding-top:5px; padding-bottom:5px;">
|
||||
<![endif]-->
|
||||
<div class="col num12"
|
||||
style="min-width: 320px; max-width: 500px; display: table-cell; vertical-align: top;;">
|
||||
<div style="width:100% !important;">
|
||||
<!--[if (!mso)&(!IE)]><!-->
|
||||
<div style="border-top:0px solid transparent; border-left:0px solid transparent; border-bottom:0px solid transparent; border-right:0px solid transparent; padding-top:5px; padding-bottom:5px; padding-right: 0px; padding-left: 0px;">
|
||||
<!--<![endif]-->
|
||||
<div align="center" class="img-container center fixedwidth"
|
||||
style="padding-right: 0px;padding-left: 0px;">
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr style="line-height:0px">
|
||||
<td style="padding-right: 0px;padding-left: 0px;" align="center">
|
||||
<![endif]-->
|
||||
<img style="width=124px; height=35px;" width="124px" height="35px"
|
||||
src="img/asayer-logo.png"/>
|
||||
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
</div>
|
||||
<div style="font-size:16px;text-align:center;font-family:Arial, 'Helvetica Neue', Helvetica, sans-serif">
|
||||
<div class="our-class">
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:'Helvetica Neue', Helvetica, Arial, sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; color: #555555;">
|
||||
<p style="font-size: 14px; line-height: 21px; text-align: center; margin: 0;">
|
||||
<span style="font-size: 18px;"><strong>Password Recovery</strong></span>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 12px; line-height: 16px; text-align: center; margin: 0;">
|
||||
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="divider"
|
||||
role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td class="divider_inner"
|
||||
style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; border-collapse: collapse;"
|
||||
valign="top">
|
||||
<table align="center" border="0" cellpadding="0" cellspacing="0"
|
||||
class="divider_content" height="0" role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; width: 100%; border-top: 1px solid #EEE; height: 0px;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td height="0"
|
||||
style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; border-collapse: collapse;"
|
||||
valign="top"><span></span></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 16px; text-align: center; margin: 0;">
|
||||
Use the code below to reset your password (valid for 24 hours only):</p>
|
||||
<p style="font-size: 14px; line-height: 21px; text-align: center; margin: 0;">
|
||||
<br/>
|
||||
<span style="font-size: 18px;"><b>%(code)s</b></span><span
|
||||
style="font-size: 18px; line-height: 21px;"></span></p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<table border="0" cellpadding="0" cellspacing="0" class="divider"
|
||||
role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td class="divider_inner"
|
||||
style="word-break: break-word; vertical-align: top; min-width: 100%; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; padding-top: 10px; padding-right: 10px; padding-bottom: 10px; padding-left: 10px; border-collapse: collapse;"
|
||||
valign="top">
|
||||
<table align="center" border="0" cellpadding="0" cellspacing="0"
|
||||
class="divider_content" height="0" role="presentation"
|
||||
style="table-layout: fixed; vertical-align: top; border-spacing: 0; border-collapse: collapse; mso-table-lspace: 0pt; mso-table-rspace: 0pt; width: 100%; border-top: 1px solid #EEE; height: 0px;"
|
||||
valign="top" width="100%">
|
||||
<tbody>
|
||||
<tr style="vertical-align: top;" valign="top">
|
||||
<td height="0"
|
||||
style="word-break: break-word; vertical-align: top; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; border-collapse: collapse;"
|
||||
valign="top"><span></span></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
|
||||
<!--[if mso]>
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
|
||||
<table width="100%" cellpadding="0" cellspacing="0" border="0">
|
||||
<tr>
|
||||
<td style="padding-right: 10px; padding-left: 10px; padding-top: 10px; padding-bottom: 10px; font-family: Arial, sans-serif">
|
||||
<![endif]-->
|
||||
<div style="color:#555555;font-family:-apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;line-height:120%;padding-top:10px;padding-right:10px;padding-bottom:10px;padding-left:10px;">
|
||||
<div style="font-size: 12px; line-height: 14px; color: #555555; font-family: -apple-system,BlinkMacSystemFont,'Helvetica Neue','Segoe UI',Roboto,Oxygen-Sans,Ubuntu,Cantarell,sans-serif;">
|
||||
<p style="font-size: 14px; line-height: 13px; text-align: center; margin: 0;">
|
||||
<span style="font-size: 11px;">If you no longer wish to make this change or did not initiate this request, plrease ignore this e-mail.</span>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if mso]></td></tr></table><![endif]-->
|
||||
<!--[if (!mso)&(!IE)]><!-->
|
||||
</div>
|
||||
<!--<![endif]-->
|
||||
</div>
|
||||
</div>
|
||||
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
|
||||
<!--[if (mso)|(IE)]></td></tr></table></td></tr></table><![endif]-->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<!--[if (IE)]></div><![endif]-->
|
||||
</body>
|
||||
</html>
|
||||
327
api/chalicelib/utils/jira_client.py
Normal file
|
|
@ -0,0 +1,327 @@
|
|||
from jira import JIRA
|
||||
from jira.exceptions import JIRAError
|
||||
import time
|
||||
from datetime import datetime
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
fields = "id, summary, description, creator, reporter, created, assignee, status, updated, comment, issuetype, labels"
|
||||
|
||||
|
||||
class JiraManager:
|
||||
# retries = 5
|
||||
retries = 0
|
||||
|
||||
def __init__(self, url, username, password, project_id=None):
|
||||
self._config = {"JIRA_PROJECT_ID": project_id, "JIRA_URL": url, "JIRA_USERNAME": username,
|
||||
"JIRA_PASSWORD": password}
|
||||
self._jira = JIRA({'server': url}, basic_auth=(username, password), logging=True)
|
||||
|
||||
def set_jira_project_id(self, project_id):
|
||||
self._config["JIRA_PROJECT_ID"] = project_id
|
||||
|
||||
def get_projects(self):
|
||||
try:
|
||||
projects = self._jira.projects()
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_projects()
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
projects_dict_list = []
|
||||
for project in projects:
|
||||
projects_dict_list.append(self.__parser_project_info(project))
|
||||
|
||||
return projects_dict_list
|
||||
|
||||
def get_project(self):
|
||||
try:
|
||||
project = self._jira.project(self._config['JIRA_PROJECT_ID'])
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_project()
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
return self.__parser_project_info(project)
|
||||
|
||||
def get_issues(self, sql: str, offset: int = 0):
|
||||
jql = "project = " + self._config['JIRA_PROJECT_ID'] \
|
||||
+ ((" AND " + sql) if sql is not None and len(sql) > 0 else "") \
|
||||
+ " ORDER BY createdDate DESC"
|
||||
|
||||
try:
|
||||
issues = self._jira.search_issues(jql, maxResults=1000, startAt=offset, fields=fields)
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issues(sql, offset)
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
|
||||
issue_dict_list = []
|
||||
for issue in issues:
|
||||
# print(issue.raw)
|
||||
issue_dict_list.append(self.__parser_issue_info(issue, include_comments=False))
|
||||
|
||||
return {"total": issues.total, "issues": issue_dict_list}
|
||||
|
||||
def get_issue(self, issue_id: str):
|
||||
try:
|
||||
# issue = self._jira.issue(issue_id)
|
||||
issue = self._jira.issue(issue_id, fields=fields)
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issue(issue_id)
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
return self.__parser_issue_info(issue)
|
||||
|
||||
def get_issue_v3(self, issue_id: str):
|
||||
try:
|
||||
url = f"{self._config['JIRA_URL']}/rest/api/3/issue/{issue_id}?fields={fields}"
|
||||
auth = HTTPBasicAuth(self._config['JIRA_USERNAME'], self._config['JIRA_PASSWORD'])
|
||||
issue = requests.get(
|
||||
url,
|
||||
headers={
|
||||
"Accept": "application/json"
|
||||
},
|
||||
auth=auth
|
||||
)
|
||||
except Exception as e:
|
||||
self.retries -= 1
|
||||
if self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issue_v3(issue_id)
|
||||
print(f"=>Error {e}")
|
||||
raise e
|
||||
return self.__parser_issue_info(issue.json())
|
||||
|
||||
def create_issue(self, issue_dict):
|
||||
issue_dict["project"] = {"id": self._config['JIRA_PROJECT_ID']}
|
||||
try:
|
||||
issue = self._jira.create_issue(fields=issue_dict)
|
||||
return self.__parser_issue_info(issue)
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.create_issue(issue_dict)
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
|
||||
def close_issue(self, issue):
|
||||
try:
|
||||
# jira.transition_issue(issue, '5', assignee={'name': 'pm_user'}, resolution={'id': '3'})
|
||||
self._jira.transition_issue(issue, 'Close')
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.close_issue(issue)
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
|
||||
def assign_issue(self, issue_id, account_id) -> bool:
|
||||
try:
|
||||
return self._jira.assign_issue(issue_id, account_id)
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.assign_issue(issue_id, account_id)
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
|
||||
def add_comment(self, issue_id: str, comment: str):
|
||||
try:
|
||||
comment = self._jira.add_comment(issue_id, comment)
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.add_comment(issue_id, comment)
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
return self.__parser_comment_info(comment)
|
||||
|
||||
def add_comment_v3(self, issue_id: str, comment: str):
|
||||
try:
|
||||
url = f"{self._config['JIRA_URL']}/rest/api/3/issue/{issue_id}/comment"
|
||||
auth = HTTPBasicAuth(self._config['JIRA_USERNAME'], self._config['JIRA_PASSWORD'])
|
||||
comment_response = requests.post(
|
||||
url,
|
||||
headers={
|
||||
"Accept": "application/json"
|
||||
},
|
||||
auth=auth,
|
||||
json={
|
||||
"body": {
|
||||
"type": "doc",
|
||||
"version": 1,
|
||||
"content": [
|
||||
{
|
||||
"type": "paragraph",
|
||||
"content": [
|
||||
{
|
||||
"text": comment,
|
||||
"type": "text"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
self.retries -= 1
|
||||
if self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.add_comment_v3(issue_id, comment)
|
||||
print(f"=>Error {e}")
|
||||
raise e
|
||||
return self.__parser_comment_info(comment_response.json())
|
||||
|
||||
def get_comments(self, issueKey):
|
||||
try:
|
||||
comments = self._jira.comments(issueKey)
|
||||
results = []
|
||||
for c in comments:
|
||||
results.append(self.__parser_comment_info(c.raw))
|
||||
return results
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_comments(issueKey)
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
|
||||
def get_meta(self):
|
||||
meta = {}
|
||||
meta['issueTypes'] = self.get_issue_types()
|
||||
meta['users'] = self.get_assignable_users()
|
||||
return meta
|
||||
|
||||
def get_assignable_users(self):
|
||||
try:
|
||||
users = self._jira.search_assignable_users_for_issues('', project=self._config['JIRA_PROJECT_ID'])
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_assignable_users()
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
users_dict = []
|
||||
for user in users:
|
||||
users_dict.append({
|
||||
'name': user.displayName,
|
||||
'email': user.emailAddress,
|
||||
'id': user.accountId,
|
||||
'avatarUrls': user.raw["avatarUrls"]
|
||||
})
|
||||
|
||||
return users_dict
|
||||
|
||||
def get_issue_types(self):
|
||||
try:
|
||||
types = self._jira.issue_types()
|
||||
except JIRAError as e:
|
||||
self.retries -= 1
|
||||
if (e.status_code // 100) == 4 and self.retries > 0:
|
||||
time.sleep(1)
|
||||
return self.get_issue_types()
|
||||
print(f"=>Error {e.text}")
|
||||
raise e
|
||||
types_dict = []
|
||||
for type in types:
|
||||
if not type.subtask and not type.name.lower() == "epic":
|
||||
types_dict.append({
|
||||
'id': type.id,
|
||||
'name': type.name,
|
||||
'iconUrl': type.iconUrl,
|
||||
'description': type.description
|
||||
})
|
||||
return types_dict
|
||||
|
||||
def __parser_comment_info(self, comment):
|
||||
if not isinstance(comment, dict):
|
||||
comment = comment.raw
|
||||
|
||||
pattern = '%Y-%m-%dT%H:%M:%S.%f%z'
|
||||
creation = datetime.strptime(comment['created'], pattern)
|
||||
# update = datetime.strptime(comment['updated'], pattern)
|
||||
|
||||
return {
|
||||
'id': comment['id'],
|
||||
'author': comment['author']['accountId'],
|
||||
'message': comment['body'],
|
||||
# 'created': comment['created'],
|
||||
'createdAt': int((creation - creation.utcoffset()).timestamp() * 1000),
|
||||
# 'updated': comment['updated'],
|
||||
# 'updatedAt': int((update - update.utcoffset()).timestamp() * 1000)
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def __get_closed_status(status):
|
||||
return status.lower() == "done" or status.lower() == "close" or status.lower() == "closed" or status.lower() == "finish" or status.lower() == "finished"
|
||||
|
||||
def __parser_issue_info(self, issue, include_comments=True):
|
||||
results_dict = {}
|
||||
if not isinstance(issue, dict):
|
||||
raw_info = issue.raw
|
||||
else:
|
||||
raw_info = issue
|
||||
|
||||
fields = raw_info['fields']
|
||||
results_dict["id"] = raw_info["id"]
|
||||
results_dict["key"] = raw_info["key"]
|
||||
# results_dict["ticketNumber"] = raw_info["key"]
|
||||
results_dict["title"] = fields["summary"]
|
||||
results_dict["description"] = fields["description"]
|
||||
results_dict["issueType"] = [fields["issuetype"]["id"]]
|
||||
|
||||
# results_dict["assignee"] = None
|
||||
# results_dict["reporter"] = None
|
||||
|
||||
if isinstance(fields["assignee"], dict):
|
||||
results_dict["assignees"] = [fields["assignee"]["accountId"]]
|
||||
# if isinstance(fields["reporter"], dict):
|
||||
# results_dict["reporter"] = fields["reporter"]["accountId"]
|
||||
if isinstance(fields["creator"], dict):
|
||||
results_dict["creator"] = fields["creator"]["accountId"]
|
||||
|
||||
if "comment" in fields:
|
||||
if include_comments:
|
||||
comments_dict = []
|
||||
for comment in fields["comment"]["comments"]:
|
||||
comments_dict.append(self.__parser_comment_info(comment))
|
||||
|
||||
results_dict['comments'] = comments_dict
|
||||
results_dict['commentsCount'] = fields["comment"]["total"]
|
||||
|
||||
results_dict["status"] = fields["status"]['name']
|
||||
results_dict["createdAt"] = fields["created"]
|
||||
# results_dict["updated"] = fields["updated"]
|
||||
results_dict["labels"] = fields["labels"]
|
||||
results_dict["closed"] = self.__get_closed_status(fields["status"]['name'])
|
||||
|
||||
return results_dict
|
||||
|
||||
@staticmethod
|
||||
def __parser_project_info(project):
|
||||
results_dict = {}
|
||||
raw_info = project.raw
|
||||
results_dict["id"] = raw_info["id"]
|
||||
results_dict["name"] = raw_info["name"]
|
||||
results_dict["avatarUrls"] = raw_info["avatarUrls"]
|
||||
results_dict["description"] = raw_info["description"] if "description" in raw_info else ""
|
||||
|
||||
return results_dict
|
||||
5
api/chalicelib/utils/metrics_helper.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
def __get_step_size(startTimestamp, endTimestamp, density, decimal=False, factor=1000):
|
||||
step_size = (endTimestamp // factor - startTimestamp // factor)
|
||||
if decimal:
|
||||
return step_size / density
|
||||
return step_size // (density - 1)
|
||||
53
api/chalicelib/utils/pg_client.py
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
import psycopg2
|
||||
import psycopg2.extras
|
||||
from chalicelib.utils.helper import environ
|
||||
|
||||
PG_CONFIG = {"host": environ["pg_host"],
|
||||
"database": environ["pg_dbname"],
|
||||
"user": environ["pg_user"],
|
||||
"password": environ["pg_password"],
|
||||
"port": int(environ["pg_port"])}
|
||||
|
||||
from psycopg2 import pool
|
||||
|
||||
try:
|
||||
postgreSQL_pool = psycopg2.pool.ThreadedConnectionPool(6, 20, **PG_CONFIG)
|
||||
if (postgreSQL_pool):
|
||||
print("Connection pool created successfully")
|
||||
except (Exception, psycopg2.DatabaseError) as error:
|
||||
print("Error while connecting to PostgreSQL", error)
|
||||
raise error
|
||||
|
||||
|
||||
# finally:
|
||||
# # closing database connection.
|
||||
# # use closeall method to close all the active connection if you want to turn of the application
|
||||
# if (postgreSQL_pool):
|
||||
# postgreSQL_pool.closeall
|
||||
# print("PostgreSQL connection pool is closed")
|
||||
|
||||
class PostgresClient:
|
||||
connection = None
|
||||
cursor = None
|
||||
|
||||
def __init__(self):
|
||||
self.connection = postgreSQL_pool.getconn()
|
||||
|
||||
def __enter__(self):
|
||||
if self.cursor is None:
|
||||
self.cursor = self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
|
||||
return self.cursor
|
||||
|
||||
def __exit__(self, *args):
|
||||
try:
|
||||
self.connection.commit()
|
||||
self.cursor.close()
|
||||
except:
|
||||
print("Error while committing/closing PG-connection", error)
|
||||
raise error
|
||||
finally:
|
||||
postgreSQL_pool.putconn(self.connection)
|
||||
|
||||
|
||||
def close():
|
||||
pass
|
||||
57
api/chalicelib/utils/s3.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
from botocore.exceptions import ClientError
|
||||
|
||||
import boto3
|
||||
|
||||
client = boto3.client('s3')
|
||||
sts_client = boto3.client('sts')
|
||||
|
||||
|
||||
def exists(bucket, key):
|
||||
response = client.list_objects_v2(
|
||||
Bucket=bucket,
|
||||
Prefix=key,
|
||||
)
|
||||
for obj in response.get('Contents', []):
|
||||
if obj['Key'] == key:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_presigned_url_for_sharing(bucket, expires_in, key, check_exists=False):
|
||||
if check_exists and not exists(bucket, key):
|
||||
return None
|
||||
|
||||
return client.generate_presigned_url(
|
||||
'get_object',
|
||||
Params={
|
||||
'Bucket': bucket,
|
||||
'Key': key
|
||||
},
|
||||
ExpiresIn=expires_in
|
||||
)
|
||||
|
||||
|
||||
def get_presigned_url_for_upload(bucket, expires_in, key):
|
||||
return client.generate_presigned_url(
|
||||
'put_object',
|
||||
Params={
|
||||
'Bucket': bucket,
|
||||
'Key': key
|
||||
},
|
||||
ExpiresIn=expires_in
|
||||
)
|
||||
|
||||
|
||||
def get_file(source_bucket, source_key):
|
||||
try:
|
||||
result = client.get_object(
|
||||
Bucket=source_bucket,
|
||||
Key=source_key
|
||||
)
|
||||
except ClientError as ex:
|
||||
if ex.response['Error']['Code'] == 'NoSuchKey':
|
||||
print(f'======> No object found - returning None for {source_bucket}/{source_key}')
|
||||
return None
|
||||
else:
|
||||
raise ex
|
||||
return result["Body"].read().decode()
|
||||
39
api/chalicelib/utils/smtp.py
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
import smtplib
|
||||
from chalicelib.utils.helper import environ
|
||||
|
||||
|
||||
class EmptySMTP:
|
||||
def sendmail(self, from_addr, to_addrs, msg, mail_options=(), rcpt_options=()):
|
||||
print("!! CANNOT SEND EMAIL, NO VALID SMTP CONFIGURATION FOUND")
|
||||
|
||||
|
||||
class SMTPClient:
|
||||
server = None
|
||||
|
||||
def __init__(self):
|
||||
if environ["EMAIL_HOST"] is None or len(environ["EMAIL_HOST"]) == 0:
|
||||
return
|
||||
elif environ["EMAIL_USE_SSL"].lower() == "false":
|
||||
self.server = smtplib.SMTP(host=environ["EMAIL_HOST"], port=int(environ["EMAIL_PORT"]))
|
||||
else:
|
||||
if len(environ["EMAIL_SSL_KEY"]) == 0 or len(environ["EMAIL_SSL_CERT"]) == 0:
|
||||
self.server = smtplib.SMTP_SSL(host=environ["EMAIL_HOST"], port=int(environ["EMAIL_PORT"]))
|
||||
else:
|
||||
self.server = smtplib.SMTP_SSL(host=environ["EMAIL_HOST"], port=int(environ["EMAIL_PORT"]),
|
||||
keyfile=environ["EMAIL_SSL_KEY"], certfile=environ["EMAIL_SSL_CERT"])
|
||||
|
||||
def __enter__(self):
|
||||
if self.server is None:
|
||||
return EmptySMTP()
|
||||
self.server.ehlo()
|
||||
if environ["EMAIL_USE_SSL"].lower() == "false" and environ["EMAIL_USE_TLS"].lower() == "true":
|
||||
self.server.starttls()
|
||||
# stmplib docs recommend calling ehlo() before & after starttls()
|
||||
self.server.ehlo()
|
||||
self.server.login(user=environ["EMAIL_USER"], password=environ["EMAIL_PASSWORD"])
|
||||
return self.server
|
||||
|
||||
def __exit__(self, *args):
|
||||
if self.server is None:
|
||||
return
|
||||
self.server.quit()
|
||||
45
api/chalicelib/utils/strings.py
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
import string
|
||||
|
||||
jsonb = "'::jsonb,'"
|
||||
dash = '", "'
|
||||
dash_nl = ',\n'
|
||||
dash_key = ")s, %("
|
||||
|
||||
|
||||
def __filter(s, chars, l):
|
||||
s = filter(lambda c: c in chars, s)
|
||||
s = "".join(s)
|
||||
if len(s) == 0:
|
||||
return None
|
||||
return s[0:l]
|
||||
|
||||
|
||||
__keyword_chars = string.ascii_lowercase + string.ascii_uppercase + string.digits + "_"
|
||||
|
||||
|
||||
def keyword(s):
|
||||
if not isinstance(s, str):
|
||||
return None
|
||||
s = s.strip().replace(" ", "_")
|
||||
return __filter(s, __keyword_chars, 30)
|
||||
|
||||
|
||||
__pattern_chars = string.ascii_lowercase + string.ascii_uppercase + string.digits + "_-/*."
|
||||
|
||||
|
||||
def pattern(s):
|
||||
if not isinstance(s, str):
|
||||
return None
|
||||
return __filter(s, __pattern_chars, 1000)
|
||||
|
||||
|
||||
def join(*args):
|
||||
return '\x00'.join(args)
|
||||
|
||||
|
||||
def split(s):
|
||||
return s.split('\x00')
|
||||
|
||||
|
||||
def hexed(n):
|
||||
return hex(n)[2:]
|
||||