diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml
index 435d07126..c247b2a68 100644
--- a/.github/workflows/api.yaml
+++ b/.github/workflows/api.yaml
@@ -39,13 +39,11 @@ jobs:
ENVIRONMENT: staging
run: |
cd api
- bash build.sh
- [[ -z "${DOCKER_REPO}" ]] || {
- docker push ${DOCKER_REPO}/chalice:"${IMAGE_TAG}"
- }
+ PUSH_IMAGE=1 bash build.sh
- name: Deploy to kubernetes
run: |
cd scripts/helm/
+ sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml
sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml
sed -i "s/tag:.*/tag: \"$IMAGE_TAG\"/g" app/chalice.yaml
bash kube-install.sh --app chalice
diff --git a/api/.chalice/config.json b/api/.chalice/config.json
index 8f2874beb..8385a17e7 100644
--- a/api/.chalice/config.json
+++ b/api/.chalice/config.json
@@ -28,14 +28,12 @@
"assign_link": "http://127.0.0.1:8000/async/email_assignment",
"captcha_server": "",
"captcha_key": "",
- "sessions_bucket": "asayer-mobs",
+ "sessions_bucket": "mobs",
"sessions_region": "us-east-1",
"put_S3_TTL": "20",
- "sourcemaps_bucket": "asayer-sourcemaps",
- "sourcemaps_bucket_key": "",
- "sourcemaps_bucket_secret": "",
- "sourcemaps_bucket_region": "us-east-1",
- "js_cache_bucket": "asayer-sessions-assets",
+ "sourcemaps_reader": "http://127.0.0.1:3000/",
+ "sourcemaps_bucket": "sourcemaps",
+ "js_cache_bucket": "sessions-assets",
"async_Token": "",
"EMAIL_HOST": "",
"EMAIL_PORT": "587",
diff --git a/api/.gitignore b/api/.gitignore
index d9688e343..dd32b5d3f 100644
--- a/api/.gitignore
+++ b/api/.gitignore
@@ -170,7 +170,7 @@ logs*.txt
*.csv
*.p
-*.js
SUBNETS.json
./chalicelib/.configs
+README/*
\ No newline at end of file
diff --git a/api/Dockerfile b/api/Dockerfile
index 0ca8c1edf..84d1b88f5 100644
--- a/api/Dockerfile
+++ b/api/Dockerfile
@@ -4,6 +4,14 @@ WORKDIR /work
COPY . .
RUN pip install -r requirements.txt -t ./vendor --upgrade
RUN pip install chalice==1.22.2
+# Installing Nodejs
+RUN apt update && apt install -y curl && \
+ curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \
+ apt install -y nodejs && \
+ apt remove --purge -y curl && \
+ rm -rf /var/lib/apt/lists/* && \
+ cd sourcemaps_reader && \
+ npm install
# Add Tini
# Startup daemon
@@ -13,4 +21,4 @@ ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
ENTRYPOINT ["/tini", "--"]
-CMD python env_handler.py && chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD}
\ No newline at end of file
+CMD ./entrypoint.sh
diff --git a/api/app.py b/api/app.py
index 469d8a42f..2c4465189 100644
--- a/api/app.py
+++ b/api/app.py
@@ -23,13 +23,13 @@ import traceback
old_tb = traceback.print_exception
old_f = sys.stdout
old_e = sys.stderr
-ASAYER_SESSION_ID = None
+OR_SESSION_TOKEN = None
class F:
def write(self, x):
- if ASAYER_SESSION_ID is not None and x != '\n' and not helper.is_local():
- old_f.write(f"[asayer_session_id={ASAYER_SESSION_ID}] {x}")
+ if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local():
+ old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}")
else:
old_f.write(x)
@@ -38,9 +38,8 @@ class F:
def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True):
- if ASAYER_SESSION_ID is not None and not helper.is_local():
- # bugsnag.notify(Exception(str(value)), meta_data={"special_info": {"asayerSessionId": ASAYER_SESSION_ID}})
- value = type(value)(f"[asayer_session_id={ASAYER_SESSION_ID}] " + str(value))
+ if OR_SESSION_TOKEN is not None and not helper.is_local():
+ value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value))
old_tb(etype, value, tb, limit, file, chain)
@@ -55,11 +54,11 @@ sys.stderr = F()
_overrides.chalice_app(app)
-# v0905
+
@app.middleware('http')
-def asayer_middleware(event, get_response):
- global ASAYER_SESSION_ID
- ASAYER_SESSION_ID = app.current_request.headers.get('vnd.openreplay.com.sid',
+def or_middleware(event, get_response):
+ global OR_SESSION_TOKEN
+ OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid',
app.current_request.headers.get('vnd.asayer.io.sid'))
if "authorizer" in event.context and event.context["authorizer"] is None:
print("Deleted user!!")
@@ -71,19 +70,24 @@ def asayer_middleware(event, get_response):
import time
now = int(time.time() * 1000)
response = get_response(event)
+ if response.status_code == 500 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
+ with configure_scope() as scope:
+ scope.set_tag('stage', environ["stage"])
+ scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
+ scope.set_extra("context", event.context)
+ sentry_sdk.capture_exception(Exception(response.body))
if helper.TRACK_TIME:
print(f"Execution time: {int(time.time() * 1000) - now} ms")
except Exception as e:
- print("middleware exception handling")
- print(e)
- pg_client.close()
- if helper.allow_sentry() and ASAYER_SESSION_ID is not None and not helper.is_local():
+ if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
with configure_scope() as scope:
scope.set_tag('stage', environ["stage"])
- scope.set_tag('openReplaySessionToken', ASAYER_SESSION_ID)
+ scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
scope.set_extra("context", event.context)
sentry_sdk.capture_exception(e)
- raise e
+ response = Response(body={"Code": "InternalServerError",
+ "Message": "An internal server error occurred [level=Fatal]."},
+ status_code=500)
pg_client.close()
return response
diff --git a/api/chalicelib/blueprints/bp_core.py b/api/chalicelib/blueprints/bp_core.py
index 3b2910606..bd42b2254 100644
--- a/api/chalicelib/blueprints/bp_core.py
+++ b/api/chalicelib/blueprints/bp_core.py
@@ -881,5 +881,5 @@ def all_issue_types(context):
@app.route('/flows', methods=['GET', 'PUT', 'POST', 'DELETE'])
@app.route('/{projectId}/flows', methods=['GET', 'PUT', 'POST', 'DELETE'])
-def removed_endpoints(context):
+def removed_endpoints(projectId=None, context=None):
return Response(body={"errors": ["Endpoint no longer available"]}, status_code=410)
diff --git a/api/chalicelib/blueprints/bp_core_dynamic.py b/api/chalicelib/blueprints/bp_core_dynamic.py
index 4ec5278d7..1768896f9 100644
--- a/api/chalicelib/blueprints/bp_core_dynamic.py
+++ b/api/chalicelib/blueprints/bp_core_dynamic.py
@@ -35,7 +35,7 @@ def login():
if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]):
return {"errors": ["Invalid captcha."]}
r = users.authenticate(data['email'], data['password'],
- for_plugin= False
+ for_plugin=False
)
if r is None:
return {
@@ -73,10 +73,12 @@ def get_account(context):
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(context['tenantId'])
},
- **license.get_status(context["tenantId"])
+ **license.get_status(context["tenantId"]),
+ "smtp": environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0
}
}
+
@app.route('/projects', methods=['GET'])
def get_projects(context):
return {"data": projects.get_projects(tenant_id=context["tenantId"], recording_state=True, gdpr=True, recorded=True,
@@ -156,12 +158,28 @@ def add_slack_client(context):
data = app.current_request.json_body
if "url" not in data or "name" not in data:
return {"errors": ["please provide a url and a name"]}
- if Slack.add_integration(tenant_id=context["tenantId"], url=data["url"], name=data["name"]):
- return {"data": {"status": "success"}}
- else:
+ n = Slack.add_channel(tenant_id=context["tenantId"], url=data["url"], name=data["name"])
+ if n is None:
return {
- "errors": ["failed URL verification, if you received a message on slack, please notify our dev-team"]
+ "errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
}
+ return {"data": n}
+
+
+@app.route('/integrations/slack/{integrationId}', methods=['POST', 'PUT'])
+def edit_slack_integration(integrationId, context):
+ data = app.current_request.json_body
+ if data.get("url") and len(data["url"]) > 0:
+ old = webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId)
+ if old["endpoint"] != data["url"]:
+ if not Slack.say_hello(data["url"]):
+ return {
+ "errors": [
+ "We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
+ }
+ return {"data": webhook.update(tenant_id=context["tenantId"], webhook_id=integrationId,
+ changes={"name": data.get("name", ""), "endpoint": data["url"]})}
+
@app.route('/{projectId}/errors/search', methods=['POST'])
def errors_search(projectId, context):
@@ -386,6 +404,7 @@ def search_sessions_by_metadata(context):
m_key=key,
project_id=project_id)}
+
@app.route('/plans', methods=['GET'])
def get_current_plan(context):
return {
diff --git a/api/chalicelib/core/collaboration_slack.py b/api/chalicelib/core/collaboration_slack.py
index 5fc80511c..b3da03a37 100644
--- a/api/chalicelib/core/collaboration_slack.py
+++ b/api/chalicelib/core/collaboration_slack.py
@@ -6,19 +6,18 @@ from chalicelib.core import webhook
class Slack:
@classmethod
- def add_integration(cls, tenant_id, **args):
+ def add_channel(cls, tenant_id, **args):
url = args["url"]
name = args["name"]
- if cls.__say_hello(url):
- webhook.add(tenant_id=tenant_id,
- endpoint=url,
- webhook_type="slack",
- name=name)
- return True
- return False
+ if cls.say_hello(url):
+ return webhook.add(tenant_id=tenant_id,
+ endpoint=url,
+ webhook_type="slack",
+ name=name)
+ return None
@classmethod
- def __say_hello(cls, url):
+ def say_hello(cls, url):
r = requests.post(
url=url,
json={
diff --git a/api/chalicelib/core/dashboard.py b/api/chalicelib/core/dashboard.py
index a778dcdfc..f306a51b4 100644
--- a/api/chalicelib/core/dashboard.py
+++ b/api/chalicelib/core/dashboard.py
@@ -146,7 +146,6 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1)
ORDER BY generated_timestamp;"""
params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp,
"endTimestamp": endTimestamp, **__get_constraint_values(args)}
- print(cur.mogrify(pg_query, params))
cur.execute(cur.mogrify(pg_query, params))
rows = cur.fetchall()
results = {
@@ -640,9 +639,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False,
FROM events.pages INNER JOIN public.sessions USING (session_id)
WHERE {" AND ".join(pg_sub_query)}
LIMIT 10;"""
- print(cur.mogrify(pg_query, {"project_id": project_id,
- "value": helper.string_to_sql_like(text),
- "platform_0": platform}))
cur.execute(cur.mogrify(pg_query, {"project_id": project_id,
"value": helper.string_to_sql_like(text),
"platform_0": platform}))
diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py
index 65ade49ed..69213a079 100644
--- a/api/chalicelib/core/events.py
+++ b/api/chalicelib/core/events.py
@@ -365,7 +365,7 @@ def __get_merged_queries(queries, value, project_id):
def __get_autocomplete_table(value, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type
- FROM (SELECT *
+ FROM (SELECT project_id, type, value
FROM (SELECT *,
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
FROM public.autocomplete
diff --git a/api/chalicelib/core/integration_jira_cloud_issue.py b/api/chalicelib/core/integration_jira_cloud_issue.py
index 00fac2fcb..bb847007a 100644
--- a/api/chalicelib/core/integration_jira_cloud_issue.py
+++ b/api/chalicelib/core/integration_jira_cloud_issue.py
@@ -34,7 +34,7 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue):
if len(projects_map[integration_project_id]) > 0:
jql += f" AND ID IN ({','.join(projects_map[integration_project_id])})"
issues = self._client.get_issues(jql, offset=0)
- results += [issues]
+ results += issues
return {"issues": results}
def get(self, integration_project_id, assignment_id):
diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py
index 439bca0fd..fa127b04a 100644
--- a/api/chalicelib/core/sessions.py
+++ b/api/chalicelib/core/sessions.py
@@ -1,6 +1,6 @@
from chalicelib.utils import pg_client, helper
from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, \
- sessions_mobs
+ sessions_mobs, issues
from chalicelib.utils import dev
from chalicelib.core import projects, errors
@@ -25,7 +25,7 @@ SESSION_PROJECTION_COLS = """s.project_id,
s.user_anonymous_id,
s.platform,
s.issue_score,
- s.issue_types::text[] AS issue_types,
+ to_jsonb(s.issue_types) AS issue_types,
favorite_sessions.session_id NOTNULL AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs
@@ -84,7 +84,6 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id,
session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id)
- data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data["socket"] = socket_ios.start_replay(project_id=project_id, session_id=session_id,
device=data["userDevice"],
os_version=data["userOsVersion"],
@@ -101,9 +100,11 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id)
- data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['resources'] = resources.get_by_session_id(session_id=session_id)
+ data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
+ data['issues'] = issues.get_by_session_id(session_id=session_id)
+
return data
return None
diff --git a/api/chalicelib/core/sessions_assignments.py b/api/chalicelib/core/sessions_assignments.py
index 2b9c28d8f..3e0929dad 100644
--- a/api/chalicelib/core/sessions_assignments.py
+++ b/api/chalicelib/core/sessions_assignments.py
@@ -119,7 +119,6 @@ def get_by_session(tenant_id, user_id, project_id, session_id):
continue
r = integration.issue_handler.get_by_ids(saved_issues=issues[tool])
- print(r)
for i in r["issues"]:
i["provider"] = tool
results += r["issues"]
diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py
index 75ac59307..ea020d412 100644
--- a/api/chalicelib/core/sessions_mobs.py
+++ b/api/chalicelib/core/sessions_mobs.py
@@ -1,14 +1,10 @@
from chalicelib.utils.helper import environ
-import boto3
+from chalicelib.utils.s3 import client
def get_web(sessionId):
- return boto3.client('s3',
- endpoint_url=environ["S3_HOST"],
- aws_access_key_id=environ["S3_KEY"],
- aws_secret_access_key=environ["S3_SECRET"],
- region_name=environ["sessions_region"]).generate_presigned_url(
+ return client.generate_presigned_url(
'get_object',
Params={
'Bucket': environ["sessions_bucket"],
@@ -19,7 +15,7 @@ def get_web(sessionId):
def get_ios(sessionId):
- return boto3.client('s3', region_name=environ["ios_region"]).generate_presigned_url(
+ return client.generate_presigned_url(
'get_object',
Params={
'Bucket': environ["ios_bucket"],
diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py
index c198b859b..01204847c 100644
--- a/api/chalicelib/core/sourcemaps.py
+++ b/api/chalicelib/core/sourcemaps.py
@@ -80,7 +80,12 @@ def get_traces_group(project_id, payload):
payloads = {}
all_exists = True
for i, u in enumerate(frames):
+ print("===============================")
+ print(u["absPath"])
+ print("converted to:")
key = __get_key(project_id, u["absPath"]) # use filename instead?
+ print(key)
+ print("===============================")
if key not in payloads:
file_exists = s3.exists(environ['sourcemaps_bucket'], key)
all_exists = all_exists and file_exists
diff --git a/api/chalicelib/core/sourcemaps_parser.py b/api/chalicelib/core/sourcemaps_parser.py
index cb0463d55..b7c17f3d3 100644
--- a/api/chalicelib/core/sourcemaps_parser.py
+++ b/api/chalicelib/core/sourcemaps_parser.py
@@ -8,14 +8,9 @@ def get_original_trace(key, positions):
"key": key,
"positions": positions,
"padding": 5,
- "bucket": environ['sourcemaps_bucket'],
- "bucket_config": {
- "aws_access_key_id": environ["sourcemaps_bucket_key"],
- "aws_secret_access_key": environ["sourcemaps_bucket_secret"],
- "aws_region": environ["sourcemaps_bucket_region"]
- }
+ "bucket": environ['sourcemaps_bucket']
}
- r = requests.post(environ["sourcemaps"], json=payload)
+ r = requests.post(environ["sourcemaps_reader"], json=payload)
if r.status_code != 200:
return {}
diff --git a/api/chalicelib/core/telemetry.py b/api/chalicelib/core/telemetry.py
index 362550553..48f403f57 100644
--- a/api/chalicelib/core/telemetry.py
+++ b/api/chalicelib/core/telemetry.py
@@ -30,7 +30,7 @@ def compute():
RETURNING *,(SELECT email FROM public.users WHERE role='owner' LIMIT 1);"""
)
data = cur.fetchone()
- requests.post('https://parrot.asayer.io/os/telemetry', json=process_data(data))
+ requests.post('https://parrot.asayer.io/os/telemetry', json={"stats": [process_data(data)]})
def new_client():
diff --git a/api/chalicelib/core/tenants.py b/api/chalicelib/core/tenants.py
index f047dcffa..4b439cfef 100644
--- a/api/chalicelib/core/tenants.py
+++ b/api/chalicelib/core/tenants.py
@@ -10,7 +10,7 @@ def get_by_tenant_id(tenant_id):
f"""SELECT
tenant_id,
name,
- api_key
+ api_key,
created_at,
edition,
version_number,
diff --git a/api/chalicelib/core/webhook.py b/api/chalicelib/core/webhook.py
index 99a3b0569..fff2d4e7e 100644
--- a/api/chalicelib/core/webhook.py
+++ b/api/chalicelib/core/webhook.py
@@ -24,7 +24,7 @@ def get(tenant_id, webhook_id):
cur.execute(
cur.mogrify("""\
SELECT
- w.*
+ webhook_id AS integration_id, webhook_id AS id, w.*
FROM public.webhooks AS w
where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id})
@@ -40,7 +40,7 @@ def get_by_type(tenant_id, webhook_type):
cur.execute(
cur.mogrify("""\
SELECT
- w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
+ w.webhook_id AS integration_id, w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
FROM public.webhooks AS w
WHERE w.type =%(type)s AND deleted_at ISNULL;""",
{"type": webhook_type})
@@ -55,7 +55,7 @@ def get_by_tenant(tenant_id, replace_none=False):
with pg_client.PostgresClient() as cur:
cur.execute("""\
SELECT
- w.*
+ webhook_id AS integration_id, webhook_id AS id, w.*
FROM public.webhooks AS w
WHERE deleted_at ISNULL;"""
)
@@ -81,7 +81,7 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
UPDATE public.webhooks
SET {','.join(sub_query)}
WHERE webhook_id =%(id)s AND deleted_at ISNULL
- RETURNING *;""",
+ RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
{"id": webhook_id, **changes})
)
w = helper.dict_to_camel_case(cur.fetchone())
@@ -98,7 +98,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
query = cur.mogrify("""\
INSERT INTO public.webhooks(endpoint,auth_header,type,name)
VALUES (%(endpoint)s, %(auth_header)s, %(type)s,%(name)s)
- RETURNING *;""",
+ RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
{"endpoint": endpoint, "auth_header": auth_header,
"type": webhook_type, "name": name})
cur.execute(
diff --git a/api/chalicelib/utils/jira_client.py b/api/chalicelib/utils/jira_client.py
index 6da501bbe..a7ab92932 100644
--- a/api/chalicelib/utils/jira_client.py
+++ b/api/chalicelib/utils/jira_client.py
@@ -68,7 +68,8 @@ class JiraManager:
# print(issue.raw)
issue_dict_list.append(self.__parser_issue_info(issue, include_comments=False))
- return {"total": issues.total, "issues": issue_dict_list}
+ # return {"total": issues.total, "issues": issue_dict_list}
+ return issue_dict_list
def get_issue(self, issue_id: str):
try:
diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py
index 8d1e37d40..89a9dc8fa 100644
--- a/api/chalicelib/utils/pg_client.py
+++ b/api/chalicelib/utils/pg_client.py
@@ -9,9 +9,25 @@ PG_CONFIG = {"host": environ["pg_host"],
"port": int(environ["pg_port"])}
from psycopg2 import pool
+from threading import Semaphore
+
+
+class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
+ def __init__(self, minconn, maxconn, *args, **kwargs):
+ self._semaphore = Semaphore(maxconn)
+ super().__init__(minconn, maxconn, *args, **kwargs)
+
+ def getconn(self, *args, **kwargs):
+ self._semaphore.acquire()
+ return super().getconn(*args, **kwargs)
+
+ def putconn(self, *args, **kwargs):
+ super().putconn(*args, **kwargs)
+ self._semaphore.release()
+
try:
- postgreSQL_pool = psycopg2.pool.ThreadedConnectionPool(6, 20, **PG_CONFIG)
+ postgreSQL_pool = ORThreadedConnectionPool(20, 100, **PG_CONFIG)
if (postgreSQL_pool):
print("Connection pool created successfully")
except (Exception, psycopg2.DatabaseError) as error:
@@ -19,13 +35,6 @@ except (Exception, psycopg2.DatabaseError) as error:
raise error
-# finally:
-# # closing database connection.
-# # use closeall method to close all the active connection if you want to turn of the application
-# if (postgreSQL_pool):
-# postgreSQL_pool.closeall
-# print("PostgreSQL connection pool is closed")
-
class PostgresClient:
connection = None
cursor = None
diff --git a/api/chalicelib/utils/s3.py b/api/chalicelib/utils/s3.py
index 29a8d28bc..49b6cfc85 100644
--- a/api/chalicelib/utils/s3.py
+++ b/api/chalicelib/utils/s3.py
@@ -2,7 +2,7 @@ from botocore.exceptions import ClientError
from chalicelib.utils.helper import environ
import boto3
-
+import botocore
from botocore.client import Config
client = boto3.client('s3', endpoint_url=environ["S3_HOST"],
@@ -13,14 +13,20 @@ client = boto3.client('s3', endpoint_url=environ["S3_HOST"],
def exists(bucket, key):
- response = client.list_objects_v2(
- Bucket=bucket,
- Prefix=key,
- )
- for obj in response.get('Contents', []):
- if obj['Key'] == key:
- return True
- return False
+ try:
+ boto3.resource('s3', endpoint_url=environ["S3_HOST"],
+ aws_access_key_id=environ["S3_KEY"],
+ aws_secret_access_key=environ["S3_SECRET"],
+ config=Config(signature_version='s3v4'),
+ region_name='us-east-1') \
+ .Object(bucket, key).load()
+ except botocore.exceptions.ClientError as e:
+ if e.response['Error']['Code'] == "404":
+ return False
+ else:
+ # Something else has gone wrong.
+ raise
+ return True
def get_presigned_url_for_sharing(bucket, expires_in, key, check_exists=False):
@@ -49,6 +55,9 @@ def get_presigned_url_for_upload(bucket, expires_in, key):
def get_file(source_bucket, source_key):
+ print("******************************")
+ print(f"looking for: {source_key} in {source_bucket}")
+ print("******************************")
try:
result = client.get_object(
Bucket=source_bucket,
diff --git a/api/entrypoint.sh b/api/entrypoint.sh
new file mode 100755
index 000000000..3c3d12fd5
--- /dev/null
+++ b/api/entrypoint.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+cd sourcemaps_reader
+nohup node server.js &> /tmp/sourcemaps_reader.log &
+cd ..
+python env_handler.py
+chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD}
diff --git a/api/requirements.txt b/api/requirements.txt
index 094d32758..671aa5da5 100644
--- a/api/requirements.txt
+++ b/api/requirements.txt
@@ -5,9 +5,6 @@ pyjwt==1.7.1
psycopg2-binary==2.8.6
pytz==2020.1
sentry-sdk==0.19.1
-rollbar==0.15.1
-bugsnag==4.0.1
-kubernetes==12.0.0
elasticsearch==7.9.1
jira==2.0.0
schedule==1.1.0
diff --git a/api/sourcemaps_reader/handler.js b/api/sourcemaps_reader/handler.js
new file mode 100644
index 000000000..117808cae
--- /dev/null
+++ b/api/sourcemaps_reader/handler.js
@@ -0,0 +1,111 @@
+'use strict';
+const sourceMap = require('source-map');
+const AWS = require('aws-sdk');
+const sourceMapVersion = require('./package.json').dependencies["source-map"];
+const URL = require('url');
+const getVersion = version => version.replace(/[\^\$\=\~]/, "");
+
+module.exports.sourcemapReader = async event => {
+ sourceMap.SourceMapConsumer.initialize({
+ "lib/mappings.wasm": `https://unpkg.com/source-map@${getVersion(sourceMapVersion)}/lib/mappings.wasm`
+ });
+ let s3;
+ if (process.env.S3_HOST) {
+ s3 = new AWS.S3({
+ endpoint: process.env.S3_HOST,
+ accessKeyId: process.env.S3_KEY,
+ secretAccessKey: process.env.S3_SECRET,
+ s3ForcePathStyle: true, // needed with minio?
+ signatureVersion: 'v4'
+ });
+ } else {
+ s3 = new AWS.S3({
+ 'AccessKeyID': process.env.aws_access_key_id,
+ 'SecretAccessKey': process.env.aws_secret_access_key,
+ 'Region': process.env.aws_region
+ });
+ }
+
+ var options = {
+ Bucket: event.bucket,
+ Key: event.key
+ };
+ return new Promise(function (resolve, reject) {
+ s3.getObject(options, (err, data) => {
+ if (err) {
+ console.log("Get S3 object failed");
+ console.log(err);
+ return reject(err);
+ }
+ const sourcemap = data.Body.toString();
+
+ return new sourceMap.SourceMapConsumer(sourcemap)
+ .then(consumer => {
+ let results = [];
+ for (let i = 0; i < event.positions.length; i++) {
+ let original = consumer.originalPositionFor({
+ line: event.positions[i].line,
+ column: event.positions[i].column
+ });
+ let url = URL.parse("");
+ let preview = [];
+ if (original.source) {
+ preview = consumer.sourceContentFor(original.source, true);
+ if (preview !== null) {
+ preview = preview.split("\n")
+ .map((line, i) => [i + 1, line]);
+ if (event.padding) {
+ let start = original.line < event.padding ? 0 : original.line - event.padding;
+ preview = preview.slice(start, original.line + event.padding);
+ }
+ } else {
+ console.log("source not found, null preview for:");
+ console.log(original.source);
+ preview = []
+ }
+ url = URL.parse(original.source);
+ } else {
+ console.log("couldn't find original position of:");
+ console.log({
+ line: event.positions[i].line,
+ column: event.positions[i].column
+ });
+ }
+ let result = {
+ "absPath": url.href,
+ "filename": url.pathname,
+ "lineNo": original.line,
+ "colNo": original.column,
+ "function": original.name,
+ "context": preview
+ };
+ // console.log(result);
+ results.push(result);
+ }
+
+ // Use this code if you don't use the http event with the LAMBDA-PROXY integration
+ return resolve(results);
+ });
+ });
+ });
+};
+
+
+// let v = {
+// 'key': '1725/99f96f044fa7e941dbb15d7d68b20549',
+// 'positions': [{'line': 1, 'column': 943}],
+// 'padding': 5,
+// 'bucket': 'asayer-sourcemaps'
+// };
+// let v = {
+// 'key': '1/65d8d3866bb8c92f3db612cb330f270c',
+// 'positions': [{'line': 1, 'column': 0}],
+// 'padding': 5,
+// 'bucket': 'asayer-sourcemaps-staging'
+// };
+// module.exports.sourcemapReader(v).then((r) => {
+// // console.log(r);
+// const fs = require('fs');
+// let data = JSON.stringify(r);
+// fs.writeFileSync('results.json', data);
+// });
\ No newline at end of file
diff --git a/api/sourcemaps_reader/server.js b/api/sourcemaps_reader/server.js
new file mode 100644
index 000000000..2a1c4dcf6
--- /dev/null
+++ b/api/sourcemaps_reader/server.js
@@ -0,0 +1,38 @@
+const http = require('http');
+const handler = require('./handler');
+const hostname = '127.0.0.1';
+const port = 3000;
+
+const server = http.createServer((req, res) => {
+ if (req.method === 'POST') {
+ let data = '';
+ req.on('data', chunk => {
+ data += chunk;
+ });
+ req.on('end', function () {
+ data = JSON.parse(data);
+ console.log("Starting parser for: " + data.key);
+ // process.env = {...process.env, ...data.bucket_config};
+ handler.sourcemapReader(data)
+ .then((results) => {
+ res.statusCode = 200;
+ res.setHeader('Content-Type', 'application/json');
+ res.end(JSON.stringify(results));
+ })
+ .catch((e) => {
+ console.error("Something went wrong");
+ console.error(e);
+ res.statusCode(500);
+ res.end(e);
+ });
+ })
+ } else {
+ res.statusCode = 405;
+ res.setHeader('Content-Type', 'text/plain');
+ res.end('Method Not Allowed');
+ }
+});
+
+server.listen(port, hostname, () => {
+ console.log(`Server running at http://${hostname}:${port}/`);
+});
\ No newline at end of file
diff --git a/backend/pkg/db/postgres/messages_web.go b/backend/pkg/db/postgres/messages_web.go
index 9156ab78e..25e044e68 100644
--- a/backend/pkg/db/postgres/messages_web.go
+++ b/backend/pkg/db/postgres/messages_web.go
@@ -92,8 +92,8 @@ func (conn *Conn) InsertWebPageEvent(sessionID uint64, e *PageEvent) error {
if err = tx.commit(); err != nil {
return err
}
- conn.insertAutocompleteValue(sessionID, url.DiscardURLQuery(path), "LOCATION")
- conn.insertAutocompleteValue(sessionID, url.DiscardURLQuery(e.Referrer), "REFERRER")
+ conn.insertAutocompleteValue(sessionID, "LOCATION", url.DiscardURLQuery(path))
+ conn.insertAutocompleteValue(sessionID, "REFERRER", url.DiscardURLQuery(e.Referrer))
return nil
}
@@ -123,7 +123,7 @@ func (conn *Conn) InsertWebClickEvent(sessionID uint64, e *ClickEvent) error {
if err = tx.commit(); err != nil {
return err
}
- conn.insertAutocompleteValue(sessionID, e.Label, "CLICK")
+ conn.insertAutocompleteValue(sessionID, "CLICK", e.Label)
return nil
}
@@ -158,7 +158,7 @@ func (conn *Conn) InsertWebInputEvent(sessionID uint64, e *InputEvent) error {
if err = tx.commit(); err != nil {
return err
}
- conn.insertAutocompleteValue(sessionID, e.Label, "INPUT")
+ conn.insertAutocompleteValue(sessionID, "INPUT", e.Label)
return nil
}
diff --git a/backend/services/db/messages.go b/backend/services/db/messages.go
index 6aa4ac076..511165c5f 100644
--- a/backend/services/db/messages.go
+++ b/backend/services/db/messages.go
@@ -16,6 +16,7 @@ func insertMessage(sessionID uint64, msg Message) error {
// Web
case *SessionStart:
+ log.Printf("Session Start: %v", sessionID)
return pg.InsertWebSessionStart(sessionID, m)
case *SessionEnd:
return pg.InsertWebSessionEnd(sessionID, m)
diff --git a/backend/services/ender/builder/builder.go b/backend/services/ender/builder/builder.go
index cccf96bcf..246b2f7e0 100644
--- a/backend/services/ender/builder/builder.go
+++ b/backend/services/ender/builder/builder.go
@@ -82,6 +82,9 @@ func (b *builder) iterateReadyMessage(iter func(msg Message)) {
}
func (b *builder) buildSessionEnd() {
+ if b.timestamp == 0 {
+ return
+ }
sessionEnd := &SessionEnd{
Timestamp: b.timestamp, // + delay?
}
@@ -106,16 +109,25 @@ func (b *builder) buildInputEvent() {
func (b *builder) handleMessage(message Message, messageID uint64) {
timestamp := uint64(message.Meta().Timestamp)
- if b.timestamp <= timestamp {
+ if b.timestamp <= timestamp { // unnecessary. TODO: test and remove
b.timestamp = timestamp
}
- // Start from the first timestamp.
+ // Before the first timestamp.
switch msg := message.(type) {
case *SessionStart,
*Metadata,
*UserID,
*UserAnonymousID:
b.appendReadyMessage(msg)
+ case *RawErrorEvent:
+ b.appendReadyMessage(&ErrorEvent{
+ MessageID: messageID,
+ Timestamp: msg.Timestamp,
+ Source: msg.Source,
+ Name: msg.Name,
+ Message: msg.Message,
+ Payload: msg.Payload,
+ })
}
if b.timestamp == 0 {
return
@@ -177,15 +189,6 @@ func (b *builder) handleMessage(message Message, messageID uint64) {
Timestamp: b.timestamp,
})
}
- case *RawErrorEvent:
- b.appendReadyMessage(&ErrorEvent{
- MessageID: messageID,
- Timestamp: msg.Timestamp,
- Source: msg.Source,
- Name: msg.Name,
- Message: msg.Message,
- Payload: msg.Payload,
- })
case *JSException:
b.appendReadyMessage(&ErrorEvent{
MessageID: messageID,
diff --git a/backend/services/ender/builder/inputEventBuilder.go b/backend/services/ender/builder/inputEventBuilder.go
index 4938e47a9..98c7ebaf6 100644
--- a/backend/services/ender/builder/inputEventBuilder.go
+++ b/backend/services/ender/builder/inputEventBuilder.go
@@ -69,10 +69,10 @@ func (b *inputEventBuilder) Build() *InputEvent {
return nil
}
inputEvent := b.inputEvent
- label := b.inputLabels[b.inputID]
- // if !ok {
- // return nil
- // }
+ label, exists := b.inputLabels[b.inputID]
+ if !exists {
+ return nil
+ }
inputEvent.Label = label
b.inputEvent = nil
diff --git a/backend/services/integrations/integration/sentry.go b/backend/services/integrations/integration/sentry.go
index 39443f51a..0330430c3 100644
--- a/backend/services/integrations/integration/sentry.go
+++ b/backend/services/integrations/integration/sentry.go
@@ -111,7 +111,7 @@ PageLoop:
c.errChan <- err
continue
}
- if sessionID == 0 { // We can't felter them on request
+ if token == "" && sessionID == 0 { // We can't felter them on request
continue
}
diff --git a/backend/services/integrations/main.go b/backend/services/integrations/main.go
index 68b4ec5aa..e1ea58ebd 100644
--- a/backend/services/integrations/main.go
+++ b/backend/services/integrations/main.go
@@ -19,7 +19,7 @@ import (
func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
- TOPIC_TRIGGER := env.String("TOPIC_TRIGGER")
+ TOPIC_RAW := env.String("TOPIC_RAW")
POSTGRES_STRING := env.String("POSTGRES_STRING")
pg := postgres.NewConn(POSTGRES_STRING)
@@ -43,6 +43,7 @@ func main() {
})
producer:= queue.NewProducer()
+ defer producer.Close(15000)
listener, err := postgres.NewIntegrationsListener(POSTGRES_STRING)
if err != nil {
@@ -72,13 +73,14 @@ func main() {
sessionID := event.SessionID
if sessionID == 0 {
sessData, err := tokenizer.Parse(event.Token)
- if err != nil {
+ if err != nil && err != token.EXPIRED {
log.Printf("Error on token parsing: %v; Token: %v", err, event.Token)
continue
}
sessionID = sessData.ID
}
- producer.Produce(TOPIC_TRIGGER, sessionID, messages.Encode(event.RawErrorEvent))
+ // TODO: send to ready-events topic. Otherwise it have to go through the events worker.
+ producer.Produce(TOPIC_RAW, sessionID, messages.Encode(event.RawErrorEvent))
case err := <-manager.Errors:
log.Printf("Integration error: %v\n", err)
case i := <-manager.RequestDataUpdates:
@@ -86,10 +88,10 @@ func main() {
if err := pg.UpdateIntegrationRequestData(&i); err != nil {
log.Printf("Postgres Update request_data error: %v\n", err)
}
- //case err := <-listener.Errors:
- //log.Printf("Postgres listen error: %v\n", err)
+ case err := <-listener.Errors:
+ log.Printf("Postgres listen error: %v\n", err)
case iPointer := <-listener.Integrations:
- // log.Printf("Integration update: %v\n", *iPointer)
+ log.Printf("Integration update: %v\n", *iPointer)
err := manager.Update(iPointer)
if err != nil {
log.Printf("Integration parse error: %v | Integration: %v\n", err, *iPointer)
diff --git a/ee/api/.chalice/config.json b/ee/api/.chalice/config.json
index 605e5b7c1..5cda73bd3 100644
--- a/ee/api/.chalice/config.json
+++ b/ee/api/.chalice/config.json
@@ -31,14 +31,12 @@
"assign_link": "http://127.0.0.1:8000/async/email_assignment",
"captcha_server": "",
"captcha_key": "",
- "sessions_bucket": "asayer-mobs",
+ "sessions_bucket": "mobs",
"sessions_region": "us-east-1",
"put_S3_TTL": "20",
- "sourcemaps_bucket": "asayer-sourcemaps",
- "sourcemaps_bucket_key": "",
- "sourcemaps_bucket_secret": "",
- "sourcemaps_bucket_region": "us-east-1",
- "js_cache_bucket": "asayer-sessions-assets",
+ "sourcemaps_reader": "http://127.0.0.1:3000/",
+ "sourcemaps_bucket": "sourcemaps",
+ "js_cache_bucket": "sessions-assets",
"async_Token": "",
"EMAIL_HOST": "",
"EMAIL_PORT": "587",
diff --git a/ee/api/.gitignore b/ee/api/.gitignore
index 812abce9c..7e2873ee0 100644
--- a/ee/api/.gitignore
+++ b/ee/api/.gitignore
@@ -170,8 +170,8 @@ logs*.txt
*.csv
*.p
-*.js
SUBNETS.json
chalicelib/.config
-chalicelib/saas
\ No newline at end of file
+chalicelib/saas
+README/*
\ No newline at end of file
diff --git a/ee/api/app.py b/ee/api/app.py
index da75c1ac5..d604992a1 100644
--- a/ee/api/app.py
+++ b/ee/api/app.py
@@ -25,13 +25,13 @@ import traceback
old_tb = traceback.print_exception
old_f = sys.stdout
old_e = sys.stderr
-ASAYER_SESSION_ID = None
+OR_SESSION_TOKEN = None
class F:
def write(self, x):
- if ASAYER_SESSION_ID is not None and x != '\n' and not helper.is_local():
- old_f.write(f"[asayer_session_id={ASAYER_SESSION_ID}] {x}")
+ if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local():
+ old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}")
else:
old_f.write(x)
@@ -40,9 +40,8 @@ class F:
def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True):
- if ASAYER_SESSION_ID is not None and not helper.is_local():
- # bugsnag.notify(Exception(str(value)), meta_data={"special_info": {"asayerSessionId": ASAYER_SESSION_ID}})
- value = type(value)(f"[asayer_session_id={ASAYER_SESSION_ID}] " + str(value))
+ if OR_SESSION_TOKEN is not None and not helper.is_local():
+ value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value))
old_tb(etype, value, tb, limit, file, chain)
@@ -59,7 +58,7 @@ _overrides.chalice_app(app)
@app.middleware('http')
-def asayer_middleware(event, get_response):
+def or_middleware(event, get_response):
from chalicelib.ee import unlock
if not unlock.is_valid():
return Response(body={"errors": ["expired license"]}, status_code=403)
@@ -68,12 +67,11 @@ def asayer_middleware(event, get_response):
if not projects.is_authorized(project_id=event.uri_params["projectId"],
tenant_id=event.context["authorizer"]["tenantId"]):
print("unauthorized project")
- # return {"errors": ["unauthorized project"]}
pg_client.close()
return Response(body={"errors": ["unauthorized project"]}, status_code=401)
- global ASAYER_SESSION_ID
- ASAYER_SESSION_ID = app.current_request.headers.get('vnd.openreplay.com.sid',
- app.current_request.headers.get('vnd.asayer.io.sid'))
+ global OR_SESSION_TOKEN
+ OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid',
+ app.current_request.headers.get('vnd.asayer.io.sid'))
if "authorizer" in event.context and event.context["authorizer"] is None:
print("Deleted user!!")
pg_client.close()
@@ -84,19 +82,24 @@ def asayer_middleware(event, get_response):
import time
now = int(time.time() * 1000)
response = get_response(event)
+ if response.status_code == 500 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
+ with configure_scope() as scope:
+ scope.set_tag('stage', environ["stage"])
+ scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
+ scope.set_extra("context", event.context)
+ sentry_sdk.capture_exception(Exception(response.body))
if helper.TRACK_TIME:
print(f"Execution time: {int(time.time() * 1000) - now} ms")
except Exception as e:
- print("middleware exception handling")
- print(e)
- pg_client.close()
- if helper.allow_sentry() and ASAYER_SESSION_ID is not None and not helper.is_local():
+ if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local():
with configure_scope() as scope:
scope.set_tag('stage', environ["stage"])
- scope.set_tag('openReplaySessionToken', ASAYER_SESSION_ID)
+ scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN)
scope.set_extra("context", event.context)
sentry_sdk.capture_exception(e)
- raise e
+ response = Response(body={"Code": "InternalServerError",
+ "Message": "An internal server error occurred [level=Fatal]."},
+ status_code=500)
pg_client.close()
return response
diff --git a/ee/api/chalicelib/blueprints/bp_core.py b/ee/api/chalicelib/blueprints/bp_core.py
index 3b2910606..bd42b2254 100644
--- a/ee/api/chalicelib/blueprints/bp_core.py
+++ b/ee/api/chalicelib/blueprints/bp_core.py
@@ -881,5 +881,5 @@ def all_issue_types(context):
@app.route('/flows', methods=['GET', 'PUT', 'POST', 'DELETE'])
@app.route('/{projectId}/flows', methods=['GET', 'PUT', 'POST', 'DELETE'])
-def removed_endpoints(context):
+def removed_endpoints(projectId=None, context=None):
return Response(body={"errors": ["Endpoint no longer available"]}, status_code=410)
diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic.py b/ee/api/chalicelib/blueprints/bp_core_dynamic.py
index 505f10cb9..6e45627df 100644
--- a/ee/api/chalicelib/blueprints/bp_core_dynamic.py
+++ b/ee/api/chalicelib/blueprints/bp_core_dynamic.py
@@ -73,10 +73,12 @@ def get_account(context):
"projects": -1,
"metadata": metadata.get_remaining_metadata_with_count(context['tenantId'])
},
- **license.get_status(context["tenantId"])
+ **license.get_status(context["tenantId"]),
+ "smtp": environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0
}
}
+
@app.route('/projects', methods=['GET'])
def get_projects(context):
return {"data": projects.get_projects(tenant_id=context["tenantId"], recording_state=True, gdpr=True, recorded=True,
@@ -157,12 +159,27 @@ def add_slack_client(context):
data = app.current_request.json_body
if "url" not in data or "name" not in data:
return {"errors": ["please provide a url and a name"]}
- if Slack.add_integration(tenant_id=context["tenantId"], url=data["url"], name=data["name"]):
- return {"data": {"status": "success"}}
- else:
+ n = Slack.add_channel(tenant_id=context["tenantId"], url=data["url"], name=data["name"])
+ if n is None:
return {
- "errors": ["failed URL verification, if you received a message on slack, please notify our dev-team"]
+ "errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
}
+ return {"data": n}
+
+
+@app.route('/integrations/slack/{integrationId}', methods=['POST', 'PUT'])
+def edit_slack_integration(integrationId, context):
+ data = app.current_request.json_body
+ if data.get("url") and len(data["url"]) > 0:
+ old = webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId)
+ if old["endpoint"] != data["url"]:
+ if not Slack.say_hello(data["url"]):
+ return {
+ "errors": [
+ "We couldn't send you a test message on your Slack channel. Please verify your webhook url."]
+ }
+ return {"data": webhook.update(tenant_id=context["tenantId"], webhook_id=integrationId,
+ changes={"name": data.get("name", ""), "endpoint": data["url"]})}
@app.route('/{projectId}/errors/search', methods=['POST'])
@@ -391,6 +408,7 @@ def search_sessions_by_metadata(context):
m_key=key,
project_id=project_id)}
+
@app.route('/plans', methods=['GET'])
def get_current_plan(context):
return {
diff --git a/ee/api/chalicelib/core/collaboration_slack.py b/ee/api/chalicelib/core/collaboration_slack.py
index 5fc80511c..b3da03a37 100644
--- a/ee/api/chalicelib/core/collaboration_slack.py
+++ b/ee/api/chalicelib/core/collaboration_slack.py
@@ -6,19 +6,18 @@ from chalicelib.core import webhook
class Slack:
@classmethod
- def add_integration(cls, tenant_id, **args):
+ def add_channel(cls, tenant_id, **args):
url = args["url"]
name = args["name"]
- if cls.__say_hello(url):
- webhook.add(tenant_id=tenant_id,
- endpoint=url,
- webhook_type="slack",
- name=name)
- return True
- return False
+ if cls.say_hello(url):
+ return webhook.add(tenant_id=tenant_id,
+ endpoint=url,
+ webhook_type="slack",
+ name=name)
+ return None
@classmethod
- def __say_hello(cls, url):
+ def say_hello(cls, url):
r = requests.post(
url=url,
json={
diff --git a/ee/api/chalicelib/core/events.py b/ee/api/chalicelib/core/events.py
index 65ade49ed..69213a079 100644
--- a/ee/api/chalicelib/core/events.py
+++ b/ee/api/chalicelib/core/events.py
@@ -365,7 +365,7 @@ def __get_merged_queries(queries, value, project_id):
def __get_autocomplete_table(value, project_id):
with pg_client.PostgresClient() as cur:
cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type
- FROM (SELECT *
+ FROM (SELECT project_id, type, value
FROM (SELECT *,
ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID
FROM public.autocomplete
diff --git a/ee/api/chalicelib/core/integration_jira_cloud_issue.py b/ee/api/chalicelib/core/integration_jira_cloud_issue.py
index 00fac2fcb..bb847007a 100644
--- a/ee/api/chalicelib/core/integration_jira_cloud_issue.py
+++ b/ee/api/chalicelib/core/integration_jira_cloud_issue.py
@@ -34,7 +34,7 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue):
if len(projects_map[integration_project_id]) > 0:
jql += f" AND ID IN ({','.join(projects_map[integration_project_id])})"
issues = self._client.get_issues(jql, offset=0)
- results += [issues]
+ results += issues
return {"issues": results}
def get(self, integration_project_id, assignment_id):
diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py
index 9d9ff204a..56ba7c463 100644
--- a/ee/api/chalicelib/core/sessions.py
+++ b/ee/api/chalicelib/core/sessions.py
@@ -1,6 +1,6 @@
from chalicelib.utils import pg_client, helper
from chalicelib.utils import dev
-from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, sessions_mobs
+from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, sessions_mobs, issues
from chalicelib.ee import projects, errors
@@ -24,7 +24,7 @@ SESSION_PROJECTION_COLS = """s.project_id,
s.user_anonymous_id,
s.platform,
s.issue_score,
- s.issue_types::text[] AS issue_types,
+ to_jsonb(s.issue_types) AS issue_types,
favorite_sessions.session_id NOTNULL AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs
@@ -83,7 +83,6 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id,
session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id)
- data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data["socket"] = socket_ios.start_replay(project_id=project_id, session_id=session_id,
device=data["userDevice"],
os_version=data["userOsVersion"],
@@ -100,9 +99,11 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id,
session_id=session_id)
data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id)
- data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['resources'] = resources.get_by_session_id(session_id=session_id)
+ data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
+ data['issues'] = issues.get_by_session_id(session_id=session_id)
+
return data
return None
diff --git a/ee/api/chalicelib/core/sessions_assignments.py b/ee/api/chalicelib/core/sessions_assignments.py
index 2b9c28d8f..3e0929dad 100644
--- a/ee/api/chalicelib/core/sessions_assignments.py
+++ b/ee/api/chalicelib/core/sessions_assignments.py
@@ -119,7 +119,6 @@ def get_by_session(tenant_id, user_id, project_id, session_id):
continue
r = integration.issue_handler.get_by_ids(saved_issues=issues[tool])
- print(r)
for i in r["issues"]:
i["provider"] = tool
results += r["issues"]
diff --git a/ee/api/chalicelib/core/sessions_mobs.py b/ee/api/chalicelib/core/sessions_mobs.py
index b96662c67..80fe59b28 100644
--- a/ee/api/chalicelib/core/sessions_mobs.py
+++ b/ee/api/chalicelib/core/sessions_mobs.py
@@ -1,11 +1,11 @@
from chalicelib.utils import helper
from chalicelib.utils.helper import environ
-import boto3
+from chalicelib.utils.s3 import client
def get_web(sessionId):
- return boto3.client('s3', region_name=environ["sessions_region"]).generate_presigned_url(
+ return client.generate_presigned_url(
'get_object',
Params={
'Bucket': environ["sessions_bucket"],
@@ -16,7 +16,7 @@ def get_web(sessionId):
def get_ios(sessionId):
- return boto3.client('s3', region_name=environ["ios_region"]).generate_presigned_url(
+ return client.generate_presigned_url(
'get_object',
Params={
'Bucket': environ["ios_bucket"],
diff --git a/ee/api/chalicelib/core/sourcemaps.py b/ee/api/chalicelib/core/sourcemaps.py
index 5f82a31e2..dbd7213ea 100644
--- a/ee/api/chalicelib/core/sourcemaps.py
+++ b/ee/api/chalicelib/core/sourcemaps.py
@@ -79,7 +79,12 @@ def get_traces_group(project_id, payload):
payloads = {}
all_exists = True
for i, u in enumerate(frames):
+ print("===============================")
+ print(u["absPath"])
+ print("converted to:")
key = __get_key(project_id, u["absPath"]) # use filename instead?
+ print(key)
+ print("===============================")
if key not in payloads:
file_exists = s3.exists(environ['sourcemaps_bucket'], key)
all_exists = all_exists and file_exists
diff --git a/ee/api/chalicelib/core/sourcemaps_parser.py b/ee/api/chalicelib/core/sourcemaps_parser.py
index cb0463d55..b7c17f3d3 100644
--- a/ee/api/chalicelib/core/sourcemaps_parser.py
+++ b/ee/api/chalicelib/core/sourcemaps_parser.py
@@ -8,14 +8,9 @@ def get_original_trace(key, positions):
"key": key,
"positions": positions,
"padding": 5,
- "bucket": environ['sourcemaps_bucket'],
- "bucket_config": {
- "aws_access_key_id": environ["sourcemaps_bucket_key"],
- "aws_secret_access_key": environ["sourcemaps_bucket_secret"],
- "aws_region": environ["sourcemaps_bucket_region"]
- }
+ "bucket": environ['sourcemaps_bucket']
}
- r = requests.post(environ["sourcemaps"], json=payload)
+ r = requests.post(environ["sourcemaps_reader"], json=payload)
if r.status_code != 200:
return {}
diff --git a/ee/api/chalicelib/ee/webhook.py b/ee/api/chalicelib/ee/webhook.py
index 0a2406ab9..20e873f5c 100644
--- a/ee/api/chalicelib/ee/webhook.py
+++ b/ee/api/chalicelib/ee/webhook.py
@@ -8,7 +8,7 @@ def get_by_id(webhook_id):
cur.execute(
cur.mogrify("""\
SELECT
- w.*
+ webhook_id AS integration_id, webhook_id AS id, w.*
FROM public.webhooks AS w
where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id})
@@ -24,7 +24,7 @@ def get(tenant_id, webhook_id):
cur.execute(
cur.mogrify("""\
SELECT
- w.*
+ webhook_id AS integration_id, webhook_id AS id, w.*
FROM public.webhooks AS w
where w.webhook_id =%(webhook_id)s AND w.tenant_id =%(tenant_id)s AND deleted_at ISNULL;""",
{"webhook_id": webhook_id, "tenant_id": tenant_id})
@@ -40,7 +40,7 @@ def get_by_type(tenant_id, webhook_type):
cur.execute(
cur.mogrify("""\
SELECT
- w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
+ w.webhook_id AS integration_id, w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at
FROM public.webhooks AS w
where
w.tenant_id =%(tenant_id)s
@@ -59,7 +59,7 @@ def get_by_tenant(tenant_id, replace_none=False):
cur.execute(
cur.mogrify("""\
SELECT
- w.*
+ webhook_id AS integration_id, webhook_id AS id,w.*
FROM public.webhooks AS w
where
w.tenant_id =%(tenant_id)s
@@ -88,7 +88,7 @@ def update(tenant_id, webhook_id, changes, replace_none=False):
UPDATE public.webhooks
SET {','.join(sub_query)}
WHERE tenant_id =%(tenant_id)s AND webhook_id =%(id)s AND deleted_at ISNULL
- RETURNING *;""",
+ RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
{"tenant_id": tenant_id, "id": webhook_id, **changes})
)
w = helper.dict_to_camel_case(cur.fetchone())
@@ -105,7 +105,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="",
query = cur.mogrify("""\
INSERT INTO public.webhooks(tenant_id, endpoint,auth_header,type,name)
VALUES (%(tenant_id)s, %(endpoint)s, %(auth_header)s, %(type)s,%(name)s)
- RETURNING *;""",
+ RETURNING webhook_id AS integration_id, webhook_id AS id,*;""",
{"tenant_id": tenant_id, "endpoint": endpoint, "auth_header": auth_header,
"type": webhook_type, "name": name})
cur.execute(
diff --git a/ee/api/chalicelib/utils/jira_client.py b/ee/api/chalicelib/utils/jira_client.py
index 6da501bbe..a7ab92932 100644
--- a/ee/api/chalicelib/utils/jira_client.py
+++ b/ee/api/chalicelib/utils/jira_client.py
@@ -68,7 +68,8 @@ class JiraManager:
# print(issue.raw)
issue_dict_list.append(self.__parser_issue_info(issue, include_comments=False))
- return {"total": issues.total, "issues": issue_dict_list}
+ # return {"total": issues.total, "issues": issue_dict_list}
+ return issue_dict_list
def get_issue(self, issue_id: str):
try:
diff --git a/ee/api/chalicelib/utils/pg_client.py b/ee/api/chalicelib/utils/pg_client.py
index e95527d64..4df29be39 100644
--- a/ee/api/chalicelib/utils/pg_client.py
+++ b/ee/api/chalicelib/utils/pg_client.py
@@ -9,11 +9,26 @@ PG_CONFIG = {"host": environ["pg_host"],
"port": int(environ["pg_port"])}
# connexion pool for FOS & EE
-
from psycopg2 import pool
+from threading import Semaphore
+
+
+class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool):
+ def __init__(self, minconn, maxconn, *args, **kwargs):
+ self._semaphore = Semaphore(maxconn)
+ super().__init__(minconn, maxconn, *args, **kwargs)
+
+ def getconn(self, *args, **kwargs):
+ self._semaphore.acquire()
+ return super().getconn(*args, **kwargs)
+
+ def putconn(self, *args, **kwargs):
+ super().putconn(*args, **kwargs)
+ self._semaphore.release()
+
try:
- postgreSQL_pool = psycopg2.pool.ThreadedConnectionPool(6, 20, **PG_CONFIG)
+ postgreSQL_pool = ORThreadedConnectionPool(20, 100, **PG_CONFIG)
if (postgreSQL_pool):
print("Connection pool created successfully")
except (Exception, psycopg2.DatabaseError) as error:
@@ -21,13 +36,6 @@ except (Exception, psycopg2.DatabaseError) as error:
raise error
-# finally:
-# # closing database connection.
-# # use closeall method to close all the active connection if you want to turn of the application
-# if (postgreSQL_pool):
-# postgreSQL_pool.closeall
-# print("PostgreSQL connection pool is closed")
-
class PostgresClient:
connection = None
cursor = None
diff --git a/ee/api/chalicelib/utils/s3.py b/ee/api/chalicelib/utils/s3.py
index 29a8d28bc..c9516982f 100644
--- a/ee/api/chalicelib/utils/s3.py
+++ b/ee/api/chalicelib/utils/s3.py
@@ -3,6 +3,7 @@ from chalicelib.utils.helper import environ
import boto3
+import botocore
from botocore.client import Config
client = boto3.client('s3', endpoint_url=environ["S3_HOST"],
@@ -13,51 +14,17 @@ client = boto3.client('s3', endpoint_url=environ["S3_HOST"],
def exists(bucket, key):
- response = client.list_objects_v2(
- Bucket=bucket,
- Prefix=key,
- )
- for obj in response.get('Contents', []):
- if obj['Key'] == key:
- return True
- return False
-
-
-def get_presigned_url_for_sharing(bucket, expires_in, key, check_exists=False):
- if check_exists and not exists(bucket, key):
- return None
-
- return client.generate_presigned_url(
- 'get_object',
- Params={
- 'Bucket': bucket,
- 'Key': key
- },
- ExpiresIn=expires_in
- )
-
-
-def get_presigned_url_for_upload(bucket, expires_in, key):
- return client.generate_presigned_url(
- 'put_object',
- Params={
- 'Bucket': bucket,
- 'Key': key
- },
- ExpiresIn=expires_in
- )
-
-
-def get_file(source_bucket, source_key):
try:
- result = client.get_object(
- Bucket=source_bucket,
- Key=source_key
- )
- except ClientError as ex:
- if ex.response['Error']['Code'] == 'NoSuchKey':
- print(f'======> No object found - returning None for {source_bucket}/{source_key}')
- return None
+ boto3.resource('s3', endpoint_url=environ["S3_HOST"],
+ aws_access_key_id=environ["S3_KEY"],
+ aws_secret_access_key=environ["S3_SECRET"],
+ config=Config(signature_version='s3v4'),
+ region_name='us-east-1') \
+ .Object(bucket, key).load()
+ except botocore.exceptions.ClientError as e:
+ if e.response['Error']['Code'] == "404":
+ return False
else:
- raise ex
- return result["Body"].read().decode()
+ # Something else has gone wrong.
+ raise
+ return True
diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt
index 3944c0923..4fa698105 100644
--- a/ee/api/requirements.txt
+++ b/ee/api/requirements.txt
@@ -5,9 +5,6 @@ pyjwt==1.7.1
psycopg2-binary==2.8.6
pytz==2020.1
sentry-sdk==0.19.1
-rollbar==0.15.1
-bugsnag==4.0.1
-kubernetes==12.0.0
elasticsearch==7.9.1
jira==2.0.0
schedule==1.1.0
diff --git a/ee/api/sourcemaps_reader/handler.js b/ee/api/sourcemaps_reader/handler.js
new file mode 100644
index 000000000..117808cae
--- /dev/null
+++ b/ee/api/sourcemaps_reader/handler.js
@@ -0,0 +1,111 @@
+'use strict';
+const sourceMap = require('source-map');
+const AWS = require('aws-sdk');
+const sourceMapVersion = require('./package.json').dependencies["source-map"];
+const URL = require('url');
+const getVersion = version => version.replace(/[\^\$\=\~]/, "");
+
+module.exports.sourcemapReader = async event => {
+ sourceMap.SourceMapConsumer.initialize({
+ "lib/mappings.wasm": `https://unpkg.com/source-map@${getVersion(sourceMapVersion)}/lib/mappings.wasm`
+ });
+ let s3;
+ if (process.env.S3_HOST) {
+ s3 = new AWS.S3({
+ endpoint: process.env.S3_HOST,
+ accessKeyId: process.env.S3_KEY,
+ secretAccessKey: process.env.S3_SECRET,
+ s3ForcePathStyle: true, // needed with minio?
+ signatureVersion: 'v4'
+ });
+ } else {
+ s3 = new AWS.S3({
+ 'AccessKeyID': process.env.aws_access_key_id,
+ 'SecretAccessKey': process.env.aws_secret_access_key,
+ 'Region': process.env.aws_region
+ });
+ }
+
+ var options = {
+ Bucket: event.bucket,
+ Key: event.key
+ };
+ return new Promise(function (resolve, reject) {
+ s3.getObject(options, (err, data) => {
+ if (err) {
+ console.log("Get S3 object failed");
+ console.log(err);
+ return reject(err);
+ }
+ const sourcemap = data.Body.toString();
+
+ return new sourceMap.SourceMapConsumer(sourcemap)
+ .then(consumer => {
+ let results = [];
+ for (let i = 0; i < event.positions.length; i++) {
+ let original = consumer.originalPositionFor({
+ line: event.positions[i].line,
+ column: event.positions[i].column
+ });
+ let url = URL.parse("");
+ let preview = [];
+ if (original.source) {
+ preview = consumer.sourceContentFor(original.source, true);
+ if (preview !== null) {
+ preview = preview.split("\n")
+ .map((line, i) => [i + 1, line]);
+ if (event.padding) {
+ let start = original.line < event.padding ? 0 : original.line - event.padding;
+ preview = preview.slice(start, original.line + event.padding);
+ }
+ } else {
+ console.log("source not found, null preview for:");
+ console.log(original.source);
+ preview = []
+ }
+ url = URL.parse(original.source);
+ } else {
+ console.log("couldn't find original position of:");
+ console.log({
+ line: event.positions[i].line,
+ column: event.positions[i].column
+ });
+ }
+ let result = {
+ "absPath": url.href,
+ "filename": url.pathname,
+ "lineNo": original.line,
+ "colNo": original.column,
+ "function": original.name,
+ "context": preview
+ };
+ // console.log(result);
+ results.push(result);
+ }
+
+ // Use this code if you don't use the http event with the LAMBDA-PROXY integration
+ return resolve(results);
+ });
+ });
+ });
+};
+
+
+// let v = {
+// 'key': '1725/99f96f044fa7e941dbb15d7d68b20549',
+// 'positions': [{'line': 1, 'column': 943}],
+// 'padding': 5,
+// 'bucket': 'asayer-sourcemaps'
+// };
+// let v = {
+// 'key': '1/65d8d3866bb8c92f3db612cb330f270c',
+// 'positions': [{'line': 1, 'column': 0}],
+// 'padding': 5,
+// 'bucket': 'asayer-sourcemaps-staging'
+// };
+// module.exports.sourcemapReader(v).then((r) => {
+// // console.log(r);
+// const fs = require('fs');
+// let data = JSON.stringify(r);
+// fs.writeFileSync('results.json', data);
+// });
\ No newline at end of file
diff --git a/ee/api/sourcemaps_reader/server.js b/ee/api/sourcemaps_reader/server.js
new file mode 100644
index 000000000..2a1c4dcf6
--- /dev/null
+++ b/ee/api/sourcemaps_reader/server.js
@@ -0,0 +1,38 @@
+const http = require('http');
+const handler = require('./handler');
+const hostname = '127.0.0.1';
+const port = 3000;
+
+const server = http.createServer((req, res) => {
+ if (req.method === 'POST') {
+ let data = '';
+ req.on('data', chunk => {
+ data += chunk;
+ });
+ req.on('end', function () {
+ data = JSON.parse(data);
+ console.log("Starting parser for: " + data.key);
+ // process.env = {...process.env, ...data.bucket_config};
+ handler.sourcemapReader(data)
+ .then((results) => {
+ res.statusCode = 200;
+ res.setHeader('Content-Type', 'application/json');
+ res.end(JSON.stringify(results));
+ })
+ .catch((e) => {
+ console.error("Something went wrong");
+ console.error(e);
+ res.statusCode(500);
+ res.end(e);
+ });
+ })
+ } else {
+ res.statusCode = 405;
+ res.setHeader('Content-Type', 'text/plain');
+ res.end('Method Not Allowed');
+ }
+});
+
+server.listen(port, hostname, () => {
+ console.log(`Server running at http://${hostname}:${port}/`);
+});
\ No newline at end of file
diff --git a/ee/connectors/bigquery_utils/create_table.py b/ee/connectors/bigquery_utils/create_table.py
new file mode 100644
index 000000000..4b166e4ae
--- /dev/null
+++ b/ee/connectors/bigquery_utils/create_table.py
@@ -0,0 +1,357 @@
+import os
+from google.cloud import bigquery
+
+from db.loaders.bigquery_loader import creds_file
+
+
+def create_tables_bigquery():
+ create_sessions_table(creds_file=creds_file,
+ table_id=f"{os.environ['project_id']}.{os.environ['dataset']}.{os.environ['sessions_table']}")
+ print(f"`{os.environ['sessions_table']}` table created succesfully.")
+ create_events_table(creds_file=creds_file,
+ table_id=f"{os.environ['project_id']}.{os.environ['dataset']}.{os.environ['events_table_name']}")
+ print(f"`{os.environ['events_table_name']}` table created succesfully.")
+
+
+def create_table(creds_file, table_id, schema):
+ client = bigquery.Client.from_service_account_json(creds_file)
+ table = bigquery.Table(table_id, schema=schema)
+ table = client.create_table(table) # Make an API request.
+ print(
+ "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id)
+ )
+
+
+def create_sessions_table(creds_file, table_id):
+ schema = [
+ bigquery.SchemaField("sessionid", "INT64", mode="REQUIRED"),
+ bigquery.SchemaField("user_agent", "STRING"),
+ bigquery.SchemaField("user_browser", "STRING"),
+ bigquery.SchemaField("user_browser_version", "STRING"),
+ bigquery.SchemaField("user_country", "STRING"),
+ bigquery.SchemaField("user_device", "STRING"),
+ bigquery.SchemaField("user_device_heap_size", "INT64"),
+ bigquery.SchemaField("user_device_memory_size", "INT64"),
+
+ bigquery.SchemaField("user_device_type", "STRING"),
+ bigquery.SchemaField("user_os", "STRING"),
+ bigquery.SchemaField("user_os_version", "STRING"),
+ bigquery.SchemaField("user_uuid", "STRING"),
+ bigquery.SchemaField("connection_effective_bandwidth", "INT64"),
+
+ bigquery.SchemaField("connection_type", "STRING"),
+ bigquery.SchemaField("metadata_key", "STRING"),
+ bigquery.SchemaField("metadata_value", "STRING"),
+ bigquery.SchemaField("referrer", "STRING"),
+ bigquery.SchemaField("user_anonymous_id", "STRING"),
+ bigquery.SchemaField("user_id", "STRING"),
+ bigquery.SchemaField("session_start_timestamp", "INT64"),
+ bigquery.SchemaField("session_end_timestamp", "INT64"),
+ bigquery.SchemaField("session_duration", "INT64"),
+
+ bigquery.SchemaField("first_contentful_paint", "INT64"),
+ bigquery.SchemaField("speed_index", "INT64"),
+ bigquery.SchemaField("visually_complete", "INT64"),
+ bigquery.SchemaField("timing_time_to_interactive", "INT64"),
+
+ bigquery.SchemaField("avg_cpu", "INT64"),
+ bigquery.SchemaField("avg_fps", "INT64"),
+ bigquery.SchemaField("max_cpu", "INT64"),
+ bigquery.SchemaField("max_fps", "INT64"),
+ bigquery.SchemaField("max_total_js_heap_size", "INT64"),
+ bigquery.SchemaField("max_used_js_heap_size", "INT64"),
+
+ bigquery.SchemaField("js_exceptions_count", "INT64"),
+ bigquery.SchemaField("long_tasks_total_duration", "INT64"),
+ bigquery.SchemaField("long_tasks_max_duration", "INT64"),
+ bigquery.SchemaField("long_tasks_count", "INT64"),
+ bigquery.SchemaField("inputs_count", "INT64"),
+ bigquery.SchemaField("clicks_count", "INT64"),
+ bigquery.SchemaField("issues_count", "INT64"),
+ bigquery.SchemaField("issues", "STRING"),
+ bigquery.SchemaField("urls_count", "INT64"),
+ bigquery.SchemaField("urls", "STRING")]
+ create_table(creds_file, table_id, schema)
+
+
+def create_events_table(creds_file, table_id):
+
+ schema = [
+ bigquery.SchemaField("sessionid", "INT64"),
+ bigquery.SchemaField("connectioninformation_downlink", "INT64"),
+ bigquery.SchemaField("connectioninformation_type", "STRING"),
+ bigquery.SchemaField("consolelog_level", "STRING"),
+ bigquery.SchemaField("consolelog_value", "STRING"),
+ bigquery.SchemaField("customevent_messageid", "INT64"),
+ bigquery.SchemaField("customevent_name", "STRING"),
+ bigquery.SchemaField("customevent_payload", "STRING"),
+ bigquery.SchemaField("customevent_timestamp", "INT64"),
+ bigquery.SchemaField("errorevent_message", "STRING"),
+ bigquery.SchemaField("errorevent_messageid", "INT64"),
+ bigquery.SchemaField("errorevent_name", "STRING"),
+ bigquery.SchemaField("errorevent_payload", "STRING"),
+ bigquery.SchemaField("errorevent_source", "STRING"),
+ bigquery.SchemaField("errorevent_timestamp", "INT64"),
+ bigquery.SchemaField("jsexception_message", "STRING"),
+ bigquery.SchemaField("jsexception_name", "STRING"),
+ bigquery.SchemaField("jsexception_payload", "STRING"),
+ bigquery.SchemaField("metadata_key", "STRING"),
+ bigquery.SchemaField("metadata_value", "STRING"),
+ bigquery.SchemaField("mouseclick_id", "INT64"),
+ bigquery.SchemaField("mouseclick_hesitationtime", "INT64"),
+ bigquery.SchemaField("mouseclick_label", "STRING"),
+ bigquery.SchemaField("pageevent_firstcontentfulpaint", "INT64"),
+ bigquery.SchemaField("pageevent_firstpaint", "INT64"),
+ bigquery.SchemaField("pageevent_messageid", "INT64"),
+ bigquery.SchemaField("pageevent_referrer", "STRING"),
+ bigquery.SchemaField("pageevent_speedindex", "INT64"),
+ bigquery.SchemaField("pageevent_timestamp", "INT64"),
+ bigquery.SchemaField("pageevent_url", "STRING"),
+ bigquery.SchemaField("pagerendertiming_timetointeractive", "INT64"),
+ bigquery.SchemaField("pagerendertiming_visuallycomplete", "INT64"),
+ bigquery.SchemaField("rawcustomevent_name", "STRING"),
+ bigquery.SchemaField("rawcustomevent_payload", "STRING"),
+ bigquery.SchemaField("setviewportsize_height", "INT64"),
+ bigquery.SchemaField("setviewportsize_width", "INT64"),
+ bigquery.SchemaField("timestamp_timestamp", "INT64"),
+ bigquery.SchemaField("user_anonymous_id", "STRING"),
+ bigquery.SchemaField("user_id", "STRING"),
+ bigquery.SchemaField("issueevent_messageid", "INT64"),
+ bigquery.SchemaField("issueevent_timestamp", "INT64"),
+ bigquery.SchemaField("issueevent_type", "STRING"),
+ bigquery.SchemaField("issueevent_contextstring", "STRING"),
+ bigquery.SchemaField("issueevent_context", "STRING"),
+ bigquery.SchemaField("issueevent_payload", "STRING"),
+ bigquery.SchemaField("customissue_name", "STRING"),
+ bigquery.SchemaField("customissue_payload", "STRING"),
+ bigquery.SchemaField("received_at", "INT64"),
+ bigquery.SchemaField("batch_order_number", "INT64")]
+ create_table(creds_file, table_id, schema)
+
+
+def create_table_negatives(creds_file, table_id):
+ client = bigquery.Client.from_service_account_json(creds_file)
+
+ schema = [
+ bigquery.SchemaField("sessionid", "INT64", mode="REQUIRED"),
+ bigquery.SchemaField("clickevent_hesitationtime", "INT64"),
+ bigquery.SchemaField("clickevent_label", "STRING"),
+ bigquery.SchemaField("clickevent_messageid", "INT64"),
+ bigquery.SchemaField("clickevent_timestamp", "INT64"),
+ bigquery.SchemaField("connectioninformation_downlink", "INT64"),
+ bigquery.SchemaField("connectioninformation_type", "STRING"),
+ bigquery.SchemaField("consolelog_level", "STRING"),
+ bigquery.SchemaField("consolelog_value", "STRING"),
+ bigquery.SchemaField("cpuissue_duration", "INT64"),
+ bigquery.SchemaField("cpuissue_rate", "INT64"),
+ bigquery.SchemaField("cpuissue_timestamp", "INT64"),
+ bigquery.SchemaField("createdocument", "BOOL"),
+ bigquery.SchemaField("createelementnode_id", "INT64"),
+ bigquery.SchemaField("createelementnode_parentid", "INT64"),
+ bigquery.SchemaField("cssdeleterule_index", "INT64"),
+ bigquery.SchemaField("cssdeleterule_stylesheetid", "INT64"),
+ bigquery.SchemaField("cssinsertrule_index", "INT64"),
+ bigquery.SchemaField("cssinsertrule_rule", "STRING"),
+ bigquery.SchemaField("cssinsertrule_stylesheetid", "INT64"),
+ bigquery.SchemaField("customevent_messageid", "INT64"),
+ bigquery.SchemaField("customevent_name", "STRING"),
+ bigquery.SchemaField("customevent_payload", "STRING"),
+ bigquery.SchemaField("customevent_timestamp", "INT64"),
+ bigquery.SchemaField("domdrop_timestamp", "INT64"),
+ bigquery.SchemaField("errorevent_message", "STRING"),
+ bigquery.SchemaField("errorevent_messageid", "INT64"),
+ bigquery.SchemaField("errorevent_name", "STRING"),
+ bigquery.SchemaField("errorevent_payload", "STRING"),
+ bigquery.SchemaField("errorevent_source", "STRING"),
+ bigquery.SchemaField("errorevent_timestamp", "INT64"),
+ bigquery.SchemaField("fetch_duration", "INT64"),
+ bigquery.SchemaField("fetch_method", "STRING"),
+ bigquery.SchemaField("fetch_request", "STRING"),
+ bigquery.SchemaField("fetch_response", "STRING"),
+ bigquery.SchemaField("fetch_status", "INT64"),
+ bigquery.SchemaField("fetch_timestamp", "INT64"),
+ bigquery.SchemaField("fetch_url", "STRING"),
+ bigquery.SchemaField("graphql_operationkind", "STRING"),
+ bigquery.SchemaField("graphql_operationname", "STRING"),
+ bigquery.SchemaField("graphql_response", "STRING"),
+ bigquery.SchemaField("graphql_variables", "STRING"),
+ bigquery.SchemaField("graphqlevent_messageid", "INT64"),
+ bigquery.SchemaField("graphqlevent_name", "STRING"),
+ bigquery.SchemaField("graphqlevent_timestamp", "INT64"),
+ bigquery.SchemaField("inputevent_label", "STRING"),
+ bigquery.SchemaField("inputevent_messageid", "INT64"),
+ bigquery.SchemaField("inputevent_timestamp", "INT64"),
+ bigquery.SchemaField("inputevent_value", "STRING"),
+ bigquery.SchemaField("inputevent_valuemasked", "BOOL"),
+ bigquery.SchemaField("is_asayer_event", "BOOL"),
+ bigquery.SchemaField("jsexception_message", "STRING"),
+ bigquery.SchemaField("jsexception_name", "STRING"),
+ bigquery.SchemaField("jsexception_payload", "STRING"),
+ bigquery.SchemaField("longtasks_timestamp", "INT64"),
+ bigquery.SchemaField("longtasks_duration", "INT64"),
+ bigquery.SchemaField("longtasks_containerid", "STRING"),
+ bigquery.SchemaField("longtasks_containersrc", "STRING"),
+ bigquery.SchemaField("memoryissue_duration", "INT64"),
+ bigquery.SchemaField("memoryissue_rate", "INT64"),
+ bigquery.SchemaField("memoryissue_timestamp", "INT64"),
+ bigquery.SchemaField("metadata_key", "STRING"),
+ bigquery.SchemaField("metadata_value", "STRING"),
+ bigquery.SchemaField("mobx_payload", "STRING"),
+ bigquery.SchemaField("mobx_type", "STRING"),
+ bigquery.SchemaField("mouseclick_id", "INT64"),
+ bigquery.SchemaField("mouseclick_hesitationtime", "INT64"),
+ bigquery.SchemaField("mouseclick_label", "STRING"),
+ bigquery.SchemaField("mousemove_x", "INT64"),
+ bigquery.SchemaField("mousemove_y", "INT64"),
+ bigquery.SchemaField("movenode_id", "INT64"),
+ bigquery.SchemaField("movenode_index", "INT64"),
+ bigquery.SchemaField("movenode_parentid", "INT64"),
+ bigquery.SchemaField("ngrx_action", "STRING"),
+ bigquery.SchemaField("ngrx_duration", "INT64"),
+ bigquery.SchemaField("ngrx_state", "STRING"),
+ bigquery.SchemaField("otable_key", "STRING"),
+ bigquery.SchemaField("otable_value", "STRING"),
+ bigquery.SchemaField("pageevent_domcontentloadedeventend", "INT64"),
+ bigquery.SchemaField("pageevent_domcontentloadedeventstart", "INT64"),
+ bigquery.SchemaField("pageevent_firstcontentfulpaint", "INT64"),
+ bigquery.SchemaField("pageevent_firstpaint", "INT64"),
+ bigquery.SchemaField("pageevent_loaded", "BOOL"),
+ bigquery.SchemaField("pageevent_loadeventend", "INT64"),
+ bigquery.SchemaField("pageevent_loadeventstart", "INT64"),
+ bigquery.SchemaField("pageevent_messageid", "INT64"),
+ bigquery.SchemaField("pageevent_referrer", "STRING"),
+ bigquery.SchemaField("pageevent_requeststart", "INT64"),
+ bigquery.SchemaField("pageevent_responseend", "INT64"),
+ bigquery.SchemaField("pageevent_responsestart", "INT64"),
+ bigquery.SchemaField("pageevent_speedindex", "INT64"),
+ bigquery.SchemaField("pageevent_timestamp", "INT64"),
+ bigquery.SchemaField("pageevent_url", "STRING"),
+ bigquery.SchemaField("pageloadtiming_domcontentloadedeventend", "INT64"),
+ bigquery.SchemaField("pageloadtiming_domcontentloadedeventstart", "INT64"),
+ bigquery.SchemaField("pageloadtiming_firstcontentfulpaint", "INT64"),
+ bigquery.SchemaField("pageloadtiming_firstpaint", "INT64"),
+ bigquery.SchemaField("pageloadtiming_loadeventend", "INT64"),
+ bigquery.SchemaField("pageloadtiming_loadeventstart", "INT64"),
+ bigquery.SchemaField("pageloadtiming_requeststart", "INT64"),
+ bigquery.SchemaField("pageloadtiming_responseend", "INT64"),
+ bigquery.SchemaField("pageloadtiming_responsestart", "INT64"),
+ bigquery.SchemaField("pagerendertiming_speedindex", "INT64"),
+ bigquery.SchemaField("pagerendertiming_timetointeractive", "INT64"),
+ bigquery.SchemaField("pagerendertiming_visuallycomplete", "INT64"),
+ bigquery.SchemaField("performancetrack_frames", "INT64"),
+ bigquery.SchemaField("performancetrack_ticks", "INT64"),
+ bigquery.SchemaField("performancetrack_totaljsheapsize", "INT64"),
+ bigquery.SchemaField("performancetrack_usedjsheapsize", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_avgcpu", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_avgfps", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_avgtotaljsheapsize", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_avgusedjsheapsize", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_maxcpu", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_maxfps", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_maxtotaljsheapsize", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_maxusedjsheapsize", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_mincpu", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_minfps", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_mintotaljsheapsize", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_minusedjsheapsize", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_timestampend", "INT64"),
+ bigquery.SchemaField("performancetrackaggr_timestampstart", "INT64"),
+ bigquery.SchemaField("profiler_args", "STRING"),
+ bigquery.SchemaField("profiler_duration", "INT64"),
+ bigquery.SchemaField("profiler_name", "STRING"),
+ bigquery.SchemaField("profiler_result", "STRING"),
+ bigquery.SchemaField("rawcustomevent_name", "STRING"),
+ bigquery.SchemaField("rawcustomevent_payload", "STRING"),
+ bigquery.SchemaField("rawerrorevent_message", "STRING"),
+ bigquery.SchemaField("rawerrorevent_name", "STRING"),
+ bigquery.SchemaField("rawerrorevent_payload", "STRING"),
+ bigquery.SchemaField("rawerrorevent_source", "STRING"),
+ bigquery.SchemaField("rawerrorevent_timestamp", "INT64"),
+ bigquery.SchemaField("redux_action", "STRING"),
+ bigquery.SchemaField("redux_duration", "INT64"),
+ bigquery.SchemaField("redux_state", "STRING"),
+ bigquery.SchemaField("removenode_id", "INT64"),
+ bigquery.SchemaField("removenodeattribute_id", "INT64"),
+ bigquery.SchemaField("removenodeattribute_name", "STRING"),
+ bigquery.SchemaField("resourceevent_decodedbodysize", "INT64"),
+ bigquery.SchemaField("resourceevent_duration", "INT64"),
+ bigquery.SchemaField("resourceevent_encodedbodysize", "INT64"),
+ bigquery.SchemaField("resourceevent_headersize", "INT64"),
+ bigquery.SchemaField("resourceevent_messageid", "INT64"),
+ bigquery.SchemaField("resourceevent_method", "STRING"),
+ bigquery.SchemaField("resourceevent_status", "INT64"),
+ bigquery.SchemaField("resourceevent_success", "BOOL"),
+ bigquery.SchemaField("resourceevent_timestamp", "INT64"),
+ bigquery.SchemaField("resourceevent_ttfb", "INT64"),
+ bigquery.SchemaField("resourceevent_type", "STRING"),
+ bigquery.SchemaField("resourceevent_url", "STRING"),
+ bigquery.SchemaField("resourcetiming_decodedbodysize", "INT64"),
+ bigquery.SchemaField("resourcetiming_duration", "INT64"),
+ bigquery.SchemaField("resourcetiming_encodedbodysize", "INT64"),
+ bigquery.SchemaField("resourcetiming_headersize", "INT64"),
+ bigquery.SchemaField("resourcetiming_initiator", "STRING"),
+ bigquery.SchemaField("resourcetiming_timestamp", "INT64"),
+ bigquery.SchemaField("resourcetiming_ttfb", "INT64"),
+ bigquery.SchemaField("resourcetiming_url", "STRING"),
+ bigquery.SchemaField("sessiondisconnect", "BOOL"),
+ bigquery.SchemaField("sessiondisconnect_timestamp", "INT64"),
+ bigquery.SchemaField("sessionend", "BOOL"),
+ bigquery.SchemaField("sessionend_timestamp", "INT64"),
+ bigquery.SchemaField("sessionstart_projectid", "INT64"),
+ bigquery.SchemaField("sessionstart_revid", "STRING"),
+ bigquery.SchemaField("sessionstart_timestamp", "INT64"),
+ bigquery.SchemaField("sessionstart_trackerversion", "STRING"),
+ bigquery.SchemaField("sessionstart_useragent", "STRING"),
+ bigquery.SchemaField("sessionstart_userbrowser", "STRING"),
+ bigquery.SchemaField("sessionstart_userbrowserversion", "STRING"),
+ bigquery.SchemaField("sessionstart_usercountry", "STRING"),
+ bigquery.SchemaField("sessionstart_userdevice", "STRING"),
+ bigquery.SchemaField("sessionstart_userdeviceheapsize", "INT64"),
+ bigquery.SchemaField("sessionstart_userdevicememorysize", "INT64"),
+ bigquery.SchemaField("sessionstart_userdevicetype", "STRING"),
+ bigquery.SchemaField("sessionstart_useros", "STRING"),
+ bigquery.SchemaField("sessionstart_userosversion", "STRING"),
+ bigquery.SchemaField("sessionstart_useruuid", "STRING"),
+ bigquery.SchemaField("setcssdata_data", "INT64"),
+ bigquery.SchemaField("setcssdata_id", "INT64"),
+ bigquery.SchemaField("setinputchecked_checked", "INT64"),
+ bigquery.SchemaField("setinputchecked_id", "INT64"),
+ bigquery.SchemaField("setinputtarget_id", "INT64"),
+ bigquery.SchemaField("setinputtarget_label", "INT64"),
+ bigquery.SchemaField("setinputvalue_id", "INT64"),
+ bigquery.SchemaField("setinputvalue_mask", "INT64"),
+ bigquery.SchemaField("setinputvalue_value", "INT64"),
+ bigquery.SchemaField("setnodeattribute_id", "INT64"),
+ bigquery.SchemaField("setnodeattribute_name", "INT64"),
+ bigquery.SchemaField("setnodeattribute_value", "INT64"),
+ bigquery.SchemaField("setnodedata_data", "INT64"),
+ bigquery.SchemaField("setnodedata_id", "INT64"),
+ bigquery.SchemaField("setnodescroll_id", "INT64"),
+ bigquery.SchemaField("setnodescroll_x", "INT64"),
+ bigquery.SchemaField("setnodescroll_y", "INT64"),
+ bigquery.SchemaField("setpagelocation_navigationstart", "INT64"),
+ bigquery.SchemaField("setpagelocation_referrer", "STRING"),
+ bigquery.SchemaField("setpagelocation_url", "STRING"),
+ bigquery.SchemaField("setpagevisibility_hidden", "BOOL"),
+ bigquery.SchemaField("setviewportscroll_x", "BOOL"),
+ bigquery.SchemaField("setviewportscroll_y", "BOOL"),
+ bigquery.SchemaField("setviewportsize_height", "INT64"),
+ bigquery.SchemaField("setviewportsize_width", "INT64"),
+ bigquery.SchemaField("stateaction_type", "STRING"),
+ bigquery.SchemaField("stateactionevent_messageid", "INT64"),
+ bigquery.SchemaField("stateactionevent_timestamp", "INT64"),
+ bigquery.SchemaField("stateactionevent_type", "STRING"),
+ bigquery.SchemaField("timestamp_timestamp", "INT64"),
+ bigquery.SchemaField("useranonymousid_id", "STRING"),
+ bigquery.SchemaField("userid_id", "STRING"),
+ bigquery.SchemaField("vuex_mutation", "STRING"),
+ bigquery.SchemaField("vuex_state", "STRING"),
+ bigquery.SchemaField("received_at", "INT64", mode="REQUIRED"),
+ bigquery.SchemaField("batch_order_number", "INT64", mode="REQUIRED")
+ ]
+
+ table = bigquery.Table(table_id, schema=schema)
+ table = client.create_table(table) # Make an API request.
+ print(
+ "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id)
+ )
diff --git a/ee/connectors/db/api.py b/ee/connectors/db/api.py
new file mode 100644
index 000000000..33abf67cc
--- /dev/null
+++ b/ee/connectors/db/api.py
@@ -0,0 +1,129 @@
+from sqlalchemy import create_engine
+from sqlalchemy import MetaData
+from sqlalchemy.orm import sessionmaker, session
+from contextlib import contextmanager
+import logging
+import os
+from pathlib import Path
+
+DATABASE = os.environ['DATABASE_NAME']
+if DATABASE == 'redshift':
+ import pandas_redshift as pr
+
+base_path = Path(__file__).parent.parent
+
+from db.models import Base
+
+logger = logging.getLogger(__file__)
+
+
+def get_class_by_tablename(tablename):
+ """Return class reference mapped to table.
+ Raise an exception if class not found
+
+ :param tablename: String with name of table.
+ :return: Class reference.
+ """
+ for c in Base._decl_class_registry.values():
+ if hasattr(c, '__tablename__') and c.__tablename__ == tablename:
+ return c
+ raise AttributeError(f'No model with tablename "{tablename}"')
+
+
+class DBConnection:
+ """
+ Initializes connection to a database
+ To update models file use:
+ sqlacodegen --outfile models_universal.py mysql+pymysql://{user}:{pwd}@{address}
+ """
+ _sessions = sessionmaker()
+
+ def __init__(self, config) -> None:
+ self.metadata = MetaData()
+ self.config = config
+
+ if config == 'redshift':
+ self.pdredshift = pr
+ self.pdredshift.connect_to_redshift(dbname=os.environ['schema'],
+ host=os.environ['address'],
+ port=os.environ['port'],
+ user=os.environ['user'],
+ password=os.environ['password'])
+
+ self.pdredshift.connect_to_s3(aws_access_key_id=os.environ['aws_access_key_id'],
+ aws_secret_access_key=os.environ['aws_secret_access_key'],
+ bucket=os.environ['bucket'],
+ subdirectory=os.environ['subdirectory'])
+
+ self.connect_str = os.environ['connect_str'].format(
+ user=os.environ['user'],
+ password=os.environ['password'],
+ address=os.environ['address'],
+ port=os.environ['port'],
+ schema=os.environ['schema']
+ )
+ self.engine = create_engine(self.connect_str)
+
+ elif config == 'clickhouse':
+ self.connect_str = os.environ['connect_str'].format(
+ address=os.environ['address'],
+ database=os.environ['database']
+ )
+ self.engine = create_engine(self.connect_str)
+ elif config == 'pg':
+ self.connect_str = os.environ['connect_str'].format(
+ user=os.environ['user'],
+ password=os.environ['password'],
+ address=os.environ['address'],
+ port=os.environ['port'],
+ database=os.environ['database']
+ )
+ self.engine = create_engine(self.connect_str)
+ elif config == 'bigquery':
+ pass
+ elif config == 'snowflake':
+ self.connect_str = os.environ['connect_str'].format(
+ user=os.environ['user'],
+ password=os.environ['password'],
+ account=os.environ['account'],
+ database=os.environ['database'],
+ schema = os.environ['schema'],
+ warehouse = os.environ['warehouse']
+ )
+ self.engine = create_engine(self.connect_str)
+ else:
+ raise ValueError("This db configuration doesn't exist. Add into keys file.")
+
+ @contextmanager
+ def get_test_session(self, **kwargs) -> session:
+ """
+ Test session context, even commits won't be persisted into db.
+ :Keyword Arguments:
+ * autoflush (``bool``) -- default: True
+ * autocommit (``bool``) -- default: False
+ * expire_on_commit (``bool``) -- default: True
+ """
+ connection = self.engine.connect()
+ transaction = connection.begin()
+ my_session = type(self)._sessions(bind=connection, **kwargs)
+ yield my_session
+
+ # Do cleanup, rollback and closing, whatever happens
+ my_session.close()
+ transaction.rollback()
+ connection.close()
+
+ @contextmanager
+ def get_live_session(self) -> session:
+ """
+ This is a session that can be committed.
+ Changes will be reflected in the database.
+ """
+ # Automatic transaction and connection handling in session
+ connection = self.engine.connect()
+ my_session = type(self)._sessions(bind=connection)
+
+ yield my_session
+
+ my_session.close()
+ connection.close()
diff --git a/ee/connectors/db/loaders/__init__.py b/ee/connectors/db/loaders/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/ee/connectors/db/loaders/bigquery_loader.py b/ee/connectors/db/loaders/bigquery_loader.py
new file mode 100644
index 000000000..2f3747d0a
--- /dev/null
+++ b/ee/connectors/db/loaders/bigquery_loader.py
@@ -0,0 +1,34 @@
+import os
+from pathlib import Path
+
+from google.oauth2.service_account import Credentials
+
+# obtain the JSON file:
+# In the Cloud Console, go to the Create service account key page.
+#
+# Go to the Create Service Account Key page
+# From the Service account list, select New service account.
+# In the Service account name field, enter a name.
+# From the Role list, select Project > Owner.
+#
+# Note: The Role field affects which resources your service account can access in your project. You can revoke these roles or grant additional roles later. In production environments, do not grant the Owner, Editor, or Viewer roles. For more information, see Granting, changing, and revoking access to resources.
+# Click Create. A JSON file that contains your key downloads to your computer.
+#
+# Put it in utils under a name bigquery_service_account
+
+base_path = Path(__file__).parent.parent.parent
+creds_file = base_path / 'utils' / 'bigquery_service_account.json'
+credentials = Credentials.from_service_account_file(
+ creds_file)
+
+
+def insert_to_bigquery(df, table):
+ df.to_gbq(destination_table=f"{os.environ['dataset']}.{table}",
+ project_id=os.environ['project_id'],
+ if_exists='append',
+ credentials=credentials)
+
+
+def transit_insert_to_bigquery(db, batch):
+ ...
+
diff --git a/ee/connectors/db/loaders/clickhouse_loader.py b/ee/connectors/db/loaders/clickhouse_loader.py
new file mode 100644
index 000000000..2fea7fd01
--- /dev/null
+++ b/ee/connectors/db/loaders/clickhouse_loader.py
@@ -0,0 +1,4 @@
+
+def insert_to_clickhouse(db, df, table: str):
+ df.to_sql(table, db.engine, if_exists='append', index=False)
+
diff --git a/ee/connectors/db/loaders/postgres_loader.py b/ee/connectors/db/loaders/postgres_loader.py
new file mode 100644
index 000000000..bd982c607
--- /dev/null
+++ b/ee/connectors/db/loaders/postgres_loader.py
@@ -0,0 +1,3 @@
+
+def insert_to_postgres(db, df, table: str):
+ df.to_sql(table, db.engine, if_exists='append', index=False)
diff --git a/ee/connectors/db/loaders/redshift_loader.py b/ee/connectors/db/loaders/redshift_loader.py
new file mode 100644
index 000000000..fe31d4fc4
--- /dev/null
+++ b/ee/connectors/db/loaders/redshift_loader.py
@@ -0,0 +1,19 @@
+from db.models import DetailedEvent
+from psycopg2.errors import InternalError_
+
+
+def transit_insert_to_redshift(db, df, table):
+
+ try:
+ insert_df(db.pdredshift, df, table)
+ except InternalError_ as e:
+ print(repr(e))
+ print("loading failed. check stl_load_errors")
+
+
+def insert_df(pr, df, table):
+ # Write the DataFrame to S3 and then to redshift
+ pr.pandas_to_redshift(data_frame=df,
+ redshift_table_name=table,
+ append=True,
+ delimiter='|')
diff --git a/ee/connectors/db/loaders/snowflake_loader.py b/ee/connectors/db/loaders/snowflake_loader.py
new file mode 100644
index 000000000..b0bfde37f
--- /dev/null
+++ b/ee/connectors/db/loaders/snowflake_loader.py
@@ -0,0 +1,5 @@
+
+def insert_to_snowflake(db, df, table):
+ df.to_sql(table, db.engine, if_exists='append', index=False)
+
+
diff --git a/ee/connectors/db/models.py b/ee/connectors/db/models.py
new file mode 100644
index 000000000..46654e249
--- /dev/null
+++ b/ee/connectors/db/models.py
@@ -0,0 +1,389 @@
+# coding: utf-8
+import yaml
+from sqlalchemy import BigInteger, Boolean, Column, Integer, ARRAY, VARCHAR, text, VARCHAR
+from sqlalchemy.ext.declarative import declarative_base
+from pathlib import Path
+import os
+
+DATABASE = os.environ['DATABASE_NAME']
+
+Base = declarative_base()
+metadata = Base.metadata
+
+base_path = Path(__file__).parent.parent
+
+# Load configuration file
+conf = yaml.load(
+ open(f'{base_path}/utils/config.yml'), Loader=yaml.FullLoader)
+try:
+ db_conf = conf[DATABASE]
+except KeyError:
+ raise KeyError("Please provide a configuration in a YAML file with a key like\n"
+ "'snowflake', 'pg', 'bigquery', 'clickhouse' or 'redshift'.")
+
+# Get a table name from a configuration file
+try:
+ events_table_name = db_conf['events_table_name']
+except KeyError as e:
+ events_table_name = None
+ print(repr(e))
+try:
+ events_detailed_table_name = db_conf['events_detailed_table_name']
+except KeyError as e:
+ print(repr(e))
+ events_detailed_table_name = None
+try:
+ sessions_table_name = db_conf['sessions_table']
+except KeyError as e:
+ print(repr(e))
+ raise KeyError("Please provide a table name under a key 'table' in a YAML configuration file")
+
+
+class Session(Base):
+ __tablename__ = sessions_table_name
+
+ sessionid = Column(BigInteger, primary_key=True)
+ user_agent = Column(VARCHAR(5000))
+ user_browser = Column(VARCHAR(5000))
+ user_browser_version = Column(VARCHAR(5000))
+ user_country = Column(VARCHAR(5000))
+ user_device = Column(VARCHAR(5000))
+ user_device_heap_size = Column(BigInteger)
+ user_device_memory_size = Column(BigInteger)
+ user_device_type = Column(VARCHAR(5000))
+ user_os = Column(VARCHAR(5000))
+ user_os_version = Column(VARCHAR(5000))
+ user_uuid = Column(VARCHAR(5000))
+ connection_effective_bandwidth = Column(BigInteger) # Downlink
+ connection_type = Column(VARCHAR(5000)) # "bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown"
+ metadata_key = Column(VARCHAR(5000))
+ metadata_value = Column(VARCHAR(5000))
+ referrer = Column(VARCHAR(5000))
+ user_anonymous_id = Column(VARCHAR(5000))
+ user_id = Column(VARCHAR(5000))
+
+ # TIME
+ session_start_timestamp = Column(BigInteger)
+ session_end_timestamp = Column(BigInteger)
+ session_duration = Column(BigInteger)
+
+ # SPEED INDEX RELATED
+ first_contentful_paint = Column(BigInteger)
+ speed_index = Column(BigInteger)
+ visually_complete = Column(BigInteger)
+ timing_time_to_interactive = Column(BigInteger)
+
+ # PERFORMANCE
+ avg_cpu = Column(Integer)
+ avg_fps = Column(BigInteger)
+ max_cpu = Column(Integer)
+ max_fps = Column(BigInteger)
+ max_total_js_heap_size = Column(BigInteger)
+ max_used_js_heap_size = Column(BigInteger)
+
+ # ISSUES AND EVENTS
+ js_exceptions_count = Column(BigInteger)
+ long_tasks_total_duration = Column(BigInteger)
+ long_tasks_max_duration = Column(BigInteger)
+ long_tasks_count = Column(BigInteger)
+ inputs_count = Column(BigInteger)
+ clicks_count = Column(BigInteger)
+ issues_count = Column(BigInteger)
+ issues = ARRAY(VARCHAR(5000))
+ urls_count = Column(BigInteger)
+ urls = ARRAY(VARCHAR(5000))
+
+
+class Event(Base):
+ __tablename__ = events_table_name
+
+ sessionid = Column(BigInteger, primary_key=True)
+ connectioninformation_downlink = Column(BigInteger)
+ connectioninformation_type = Column(VARCHAR(5000))
+ consolelog_level = Column(VARCHAR(5000))
+ consolelog_value = Column(VARCHAR(5000))
+ customevent_messageid = Column(BigInteger)
+ customevent_name = Column(VARCHAR(5000))
+ customevent_payload = Column(VARCHAR(5000))
+ customevent_timestamp = Column(BigInteger)
+ errorevent_message = Column(VARCHAR(5000))
+ errorevent_messageid = Column(BigInteger)
+ errorevent_name = Column(VARCHAR(5000))
+ errorevent_payload = Column(VARCHAR(5000))
+ errorevent_source = Column(VARCHAR(5000))
+ errorevent_timestamp = Column(BigInteger)
+ jsexception_message = Column(VARCHAR(5000))
+ jsexception_name = Column(VARCHAR(5000))
+ jsexception_payload = Column(VARCHAR(5000))
+ metadata_key = Column(VARCHAR(5000))
+ metadata_value = Column(VARCHAR(5000))
+ mouseclick_id = Column(BigInteger)
+ mouseclick_hesitationtime = Column(BigInteger)
+ mouseclick_label = Column(VARCHAR(5000))
+ pageevent_firstcontentfulpaint = Column(BigInteger)
+ pageevent_firstpaint = Column(BigInteger)
+ pageevent_messageid = Column(BigInteger)
+ pageevent_referrer = Column(VARCHAR(5000))
+ pageevent_speedindex = Column(BigInteger)
+ pageevent_timestamp = Column(BigInteger)
+ pageevent_url = Column(VARCHAR(5000))
+ pagerendertiming_timetointeractive = Column(BigInteger)
+ pagerendertiming_visuallycomplete = Column(BigInteger)
+ rawcustomevent_name = Column(VARCHAR(5000))
+ rawcustomevent_payload = Column(VARCHAR(5000))
+ setviewportsize_height = Column(BigInteger)
+ setviewportsize_width = Column(BigInteger)
+ timestamp_timestamp = Column(BigInteger)
+ user_anonymous_id = Column(VARCHAR(5000))
+ user_id = Column(VARCHAR(5000))
+ issueevent_messageid = Column(BigInteger)
+ issueevent_timestamp = Column(BigInteger)
+ issueevent_type = Column(VARCHAR(5000))
+ issueevent_contextstring = Column(VARCHAR(5000))
+ issueevent_context = Column(VARCHAR(5000))
+ issueevent_payload = Column(VARCHAR(5000))
+ customissue_name = Column(VARCHAR(5000))
+ customissue_payload = Column(VARCHAR(5000))
+ received_at = Column(BigInteger)
+ batch_order_number = Column(BigInteger)
+
+
+class DetailedEvent(Base):
+ __tablename__ = events_detailed_table_name
+
+ # id = Column(Integer, primary_key=True, server_default=text("\"identity\"(119029, 0, '0,1'::text)"))
+ sessionid = Column(BigInteger, primary_key=True)
+ clickevent_hesitationtime = Column(BigInteger)
+ clickevent_label = Column(VARCHAR(5000))
+ clickevent_messageid = Column(BigInteger)
+ clickevent_timestamp = Column(BigInteger)
+ connectioninformation_downlink = Column(BigInteger)
+ connectioninformation_type = Column(VARCHAR(5000))
+ consolelog_level = Column(VARCHAR(5000))
+ consolelog_value = Column(VARCHAR(5000))
+ cpuissue_duration = Column(BigInteger)
+ cpuissue_rate = Column(BigInteger)
+ cpuissue_timestamp = Column(BigInteger)
+ createdocument = Column(Boolean)
+ createelementnode_id = Column(BigInteger)
+ createelementnode_parentid = Column(BigInteger)
+ cssdeleterule_index = Column(BigInteger)
+ cssdeleterule_stylesheetid = Column(BigInteger)
+ cssinsertrule_index = Column(BigInteger)
+ cssinsertrule_rule = Column(VARCHAR(5000))
+ cssinsertrule_stylesheetid = Column(BigInteger)
+ customevent_messageid = Column(BigInteger)
+ customevent_name = Column(VARCHAR(5000))
+ customevent_payload = Column(VARCHAR(5000))
+ customevent_timestamp = Column(BigInteger)
+ domdrop_timestamp = Column(BigInteger)
+ errorevent_message = Column(VARCHAR(5000))
+ errorevent_messageid = Column(BigInteger)
+ errorevent_name = Column(VARCHAR(5000))
+ errorevent_payload = Column(VARCHAR(5000))
+ errorevent_source = Column(VARCHAR(5000))
+ errorevent_timestamp = Column(BigInteger)
+ fetch_duration = Column(BigInteger)
+ fetch_method = Column(VARCHAR(5000))
+ fetch_request = Column(VARCHAR(5000))
+ fetch_response = Column(VARCHAR(5000))
+ fetch_status = Column(BigInteger)
+ fetch_timestamp = Column(BigInteger)
+ fetch_url = Column(VARCHAR(5000))
+ graphql_operationkind = Column(VARCHAR(5000))
+ graphql_operationname = Column(VARCHAR(5000))
+ graphql_response = Column(VARCHAR(5000))
+ graphql_variables = Column(VARCHAR(5000))
+ graphqlevent_messageid = Column(BigInteger)
+ graphqlevent_name = Column(VARCHAR(5000))
+ graphqlevent_timestamp = Column(BigInteger)
+ inputevent_label = Column(VARCHAR(5000))
+ inputevent_messageid = Column(BigInteger)
+ inputevent_timestamp = Column(BigInteger)
+ inputevent_value = Column(VARCHAR(5000))
+ inputevent_valuemasked = Column(Boolean)
+ jsexception_message = Column(VARCHAR(5000))
+ jsexception_name = Column(VARCHAR(5000))
+ jsexception_payload = Column(VARCHAR(5000))
+ memoryissue_duration = Column(BigInteger)
+ memoryissue_rate = Column(BigInteger)
+ memoryissue_timestamp = Column(BigInteger)
+ metadata_key = Column(VARCHAR(5000))
+ metadata_value = Column(VARCHAR(5000))
+ mobx_payload = Column(VARCHAR(5000))
+ mobx_type = Column(VARCHAR(5000))
+ mouseclick_id = Column(BigInteger)
+ mouseclick_hesitationtime = Column(BigInteger)
+ mouseclick_label = Column(VARCHAR(5000))
+ mousemove_x = Column(BigInteger)
+ mousemove_y = Column(BigInteger)
+ movenode_id = Column(BigInteger)
+ movenode_index = Column(BigInteger)
+ movenode_parentid = Column(BigInteger)
+ ngrx_action = Column(VARCHAR(5000))
+ ngrx_duration = Column(BigInteger)
+ ngrx_state = Column(VARCHAR(5000))
+ otable_key = Column(VARCHAR(5000))
+ otable_value = Column(VARCHAR(5000))
+ pageevent_domcontentloadedeventend = Column(BigInteger)
+ pageevent_domcontentloadedeventstart = Column(BigInteger)
+ pageevent_firstcontentfulpaint = Column(BigInteger)
+ pageevent_firstpaint = Column(BigInteger)
+ pageevent_loaded = Column(Boolean)
+ pageevent_loadeventend = Column(BigInteger)
+ pageevent_loadeventstart = Column(BigInteger)
+ pageevent_messageid = Column(BigInteger)
+ pageevent_referrer = Column(VARCHAR(5000))
+ pageevent_requeststart = Column(BigInteger)
+ pageevent_responseend = Column(BigInteger)
+ pageevent_responsestart = Column(BigInteger)
+ pageevent_speedindex = Column(BigInteger)
+ pageevent_timestamp = Column(BigInteger)
+ pageevent_url = Column(VARCHAR(5000))
+ pageloadtiming_domcontentloadedeventend = Column(BigInteger)
+ pageloadtiming_domcontentloadedeventstart = Column(BigInteger)
+ pageloadtiming_firstcontentfulpaint = Column(BigInteger)
+ pageloadtiming_firstpaint = Column(BigInteger)
+ pageloadtiming_loadeventend = Column(BigInteger)
+ pageloadtiming_loadeventstart = Column(BigInteger)
+ pageloadtiming_requeststart = Column(BigInteger)
+ pageloadtiming_responseend = Column(BigInteger)
+ pageloadtiming_responsestart = Column(BigInteger)
+ pagerendertiming_speedindex = Column(BigInteger)
+ pagerendertiming_timetointeractive = Column(BigInteger)
+ pagerendertiming_visuallycomplete = Column(BigInteger)
+ performancetrack_frames = Column(BigInteger)
+ performancetrack_ticks = Column(BigInteger)
+ performancetrack_totaljsheapsize = Column(BigInteger)
+ performancetrack_usedjsheapsize = Column(BigInteger)
+ performancetrackaggr_avgcpu = Column(BigInteger)
+ performancetrackaggr_avgfps = Column(BigInteger)
+ performancetrackaggr_avgtotaljsheapsize = Column(BigInteger)
+ performancetrackaggr_avgusedjsheapsize = Column(BigInteger)
+ performancetrackaggr_maxcpu = Column(BigInteger)
+ performancetrackaggr_maxfps = Column(BigInteger)
+ performancetrackaggr_maxtotaljsheapsize = Column(BigInteger)
+ performancetrackaggr_maxusedjsheapsize = Column(BigInteger)
+ performancetrackaggr_mincpu = Column(BigInteger)
+ performancetrackaggr_minfps = Column(BigInteger)
+ performancetrackaggr_mintotaljsheapsize = Column(BigInteger)
+ performancetrackaggr_minusedjsheapsize = Column(BigInteger)
+ performancetrackaggr_timestampend = Column(BigInteger)
+ performancetrackaggr_timestampstart = Column(BigInteger)
+ profiler_args = Column(VARCHAR(5000))
+ profiler_duration = Column(BigInteger)
+ profiler_name = Column(VARCHAR(5000))
+ profiler_result = Column(VARCHAR(5000))
+ rawcustomevent_name = Column(VARCHAR(5000))
+ rawcustomevent_payload = Column(VARCHAR(5000))
+ rawerrorevent_message = Column(VARCHAR(5000))
+ rawerrorevent_name = Column(VARCHAR(5000))
+ rawerrorevent_payload = Column(VARCHAR(5000))
+ rawerrorevent_source = Column(VARCHAR(5000))
+ rawerrorevent_timestamp = Column(BigInteger)
+ redux_action = Column(VARCHAR(5000))
+ redux_duration = Column(BigInteger)
+ redux_state = Column(VARCHAR(5000))
+ removenode_id = Column(BigInteger)
+ removenodeattribute_id = Column(BigInteger)
+ removenodeattribute_name = Column(VARCHAR(5000))
+ resourceevent_decodedbodysize = Column(BigInteger)
+ resourceevent_duration = Column(BigInteger)
+ resourceevent_encodedbodysize = Column(BigInteger)
+ resourceevent_headersize = Column(BigInteger)
+ resourceevent_messageid = Column(BigInteger)
+ resourceevent_method = Column(VARCHAR(5000))
+ resourceevent_status = Column(BigInteger)
+ resourceevent_success = Column(Boolean)
+ resourceevent_timestamp = Column(BigInteger)
+ resourceevent_ttfb = Column(BigInteger)
+ resourceevent_type = Column(VARCHAR(5000))
+ resourceevent_url = Column(VARCHAR(5000))
+ resourcetiming_decodedbodysize = Column(BigInteger)
+ resourcetiming_duration = Column(BigInteger)
+ resourcetiming_encodedbodysize = Column(BigInteger)
+ resourcetiming_headersize = Column(BigInteger)
+ resourcetiming_initiator = Column(VARCHAR(5000))
+ resourcetiming_timestamp = Column(BigInteger)
+ resourcetiming_ttfb = Column(BigInteger)
+ resourcetiming_url = Column(VARCHAR(5000))
+ sessiondisconnect = Column(Boolean)
+ sessiondisconnect_timestamp = Column(BigInteger)
+ sessionend = Column(Boolean)
+ sessionend_timestamp = Column(BigInteger)
+ sessionstart_projectid = Column(BigInteger)
+ sessionstart_revid = Column(VARCHAR(5000))
+ sessionstart_timestamp = Column(BigInteger)
+ sessionstart_trackerversion = Column(VARCHAR(5000))
+ sessionstart_useragent = Column(VARCHAR(5000))
+ sessionstart_userbrowser = Column(VARCHAR(5000))
+ sessionstart_userbrowserversion = Column(VARCHAR(5000))
+ sessionstart_usercountry = Column(VARCHAR(5000))
+ sessionstart_userdevice = Column(VARCHAR(5000))
+ sessionstart_userdeviceheapsize = Column(BigInteger)
+ sessionstart_userdevicememorysize = Column(BigInteger)
+ sessionstart_userdevicetype = Column(VARCHAR(5000))
+ sessionstart_useros = Column(VARCHAR(5000))
+ sessionstart_userosversion = Column(VARCHAR(5000))
+ sessionstart_useruuid = Column(VARCHAR(5000))
+ setcssdata_data = Column(BigInteger)
+ setcssdata_id = Column(BigInteger)
+ setinputchecked_checked = Column(BigInteger)
+ setinputchecked_id = Column(BigInteger)
+ setinputtarget_id = Column(BigInteger)
+ setinputtarget_label = Column(BigInteger)
+ setinputvalue_id = Column(BigInteger)
+ setinputvalue_mask = Column(BigInteger)
+ setinputvalue_value = Column(BigInteger)
+ setnodeattribute_id = Column(BigInteger)
+ setnodeattribute_name = Column(BigInteger)
+ setnodeattribute_value = Column(BigInteger)
+ setnodedata_data = Column(BigInteger)
+ setnodedata_id = Column(BigInteger)
+ setnodescroll_id = Column(BigInteger)
+ setnodescroll_x = Column(BigInteger)
+ setnodescroll_y = Column(BigInteger)
+ setpagelocation_navigationstart = Column(BigInteger)
+ setpagelocation_referrer = Column(VARCHAR(5000))
+ setpagelocation_url = Column(VARCHAR(5000))
+ setpagevisibility_hidden = Column(Boolean)
+ setviewportscroll_x = Column(BigInteger)
+ setviewportscroll_y = Column(BigInteger)
+ setviewportsize_height = Column(BigInteger)
+ setviewportsize_width = Column(BigInteger)
+ stateaction_type = Column(VARCHAR(5000))
+ stateactionevent_messageid = Column(BigInteger)
+ stateactionevent_timestamp = Column(BigInteger)
+ stateactionevent_type = Column(VARCHAR(5000))
+ timestamp_timestamp = Column(BigInteger)
+ useranonymousid_id = Column(VARCHAR(5000))
+ userid_id = Column(VARCHAR(5000))
+ vuex_mutation = Column(VARCHAR(5000))
+ vuex_state = Column(VARCHAR(5000))
+ longtask_timestamp = Column(BigInteger)
+ longtask_duration = Column(BigInteger)
+ longtask_context = Column(BigInteger)
+ longtask_containertype = Column(BigInteger)
+ longtask_containersrc = Column(VARCHAR(5000))
+ longtask_containerid = Column(VARCHAR(5000))
+ longtask_containername = Column(VARCHAR(5000))
+ setnodeurlbasedattribute_id = Column(BigInteger)
+ setnodeurlbasedattribute_name = Column(VARCHAR(5000))
+ setnodeurlbasedattribute_value = Column(VARCHAR(5000))
+ setnodeurlbasedattribute_baseurl = Column(VARCHAR(5000))
+ setstyledata_id = Column(BigInteger)
+ setstyledata_data = Column(VARCHAR(5000))
+ setstyledata_baseurl = Column(VARCHAR(5000))
+ issueevent_messageid = Column(BigInteger)
+ issueevent_timestamp = Column(BigInteger)
+ issueevent_type = Column(VARCHAR(5000))
+ issueevent_contextstring = Column(VARCHAR(5000))
+ issueevent_context = Column(VARCHAR(5000))
+ issueevent_payload = Column(VARCHAR(5000))
+ technicalinfo_type = Column(VARCHAR(5000))
+ technicalinfo_value = Column(VARCHAR(5000))
+ customissue_name = Column(VARCHAR(5000))
+ customissue_payload = Column(VARCHAR(5000))
+ pageclose = Column(Boolean)
+ received_at = Column(BigInteger)
+ batch_order_number = Column(BigInteger)
diff --git a/ee/connectors/db/tables.py b/ee/connectors/db/tables.py
new file mode 100644
index 000000000..0127cbbd1
--- /dev/null
+++ b/ee/connectors/db/tables.py
@@ -0,0 +1,61 @@
+from pathlib import Path
+
+base_path = Path(__file__).parent.parent
+
+
+def create_tables_clickhouse(db):
+ with open(base_path / 'sql' / 'clickhouse_events.sql') as f:
+ q = f.read()
+ db.engine.execute(q)
+ print(f"`connector_user_events` table created succesfully.")
+
+ with open(base_path / 'sql' / 'clickhouse_events_buffer.sql') as f:
+ q = f.read()
+ db.engine.execute(q)
+ print(f"`connector_user_events_buffer` table created succesfully.")
+
+ with open(base_path / 'sql' / 'clickhouse_sessions.sql') as f:
+ q = f.read()
+ db.engine.execute(q)
+ print(f"`connector_sessions` table created succesfully.")
+
+ with open(base_path / 'sql' / 'clickhouse_sessions_buffer.sql') as f:
+ q = f.read()
+ db.engine.execute(q)
+ print(f"`connector_sessions_buffer` table created succesfully.")
+
+
+def create_tables_postgres(db):
+ with open(base_path / 'sql' / 'postgres_events.sql') as f:
+ q = f.read()
+ db.engine.execute(q)
+ print(f"`connector_user_events` table created succesfully.")
+
+ with open(base_path / 'sql' / 'postgres_sessions.sql') as f:
+ q = f.read()
+ db.engine.execute(q)
+ print(f"`connector_sessions` table created succesfully.")
+
+
+def create_tables_snowflake(db):
+ with open(base_path / 'sql' / 'snowflake_events.sql') as f:
+ q = f.read()
+ db.engine.execute(q)
+ print(f"`connector_user_events` table created succesfully.")
+
+ with open(base_path / 'sql' / 'snowflake_sessions.sql') as f:
+ q = f.read()
+ db.engine.execute(q)
+ print(f"`connector_sessions` table created succesfully.")
+
+
+def create_tables_redshift(db):
+ with open(base_path / 'sql' / 'redshift_events.sql') as f:
+ q = f.read()
+ db.engine.execute(q)
+ print(f"`connector_user_events` table created succesfully.")
+
+ with open(base_path / 'sql' / 'redshift_sessions.sql') as f:
+ q = f.read()
+ db.engine.execute(q)
+ print(f"`connector_sessions` table created succesfully.")
diff --git a/ee/connectors/db/utils.py b/ee/connectors/db/utils.py
new file mode 100644
index 000000000..7c268c6b3
--- /dev/null
+++ b/ee/connectors/db/utils.py
@@ -0,0 +1,368 @@
+import pandas as pd
+from db.models import DetailedEvent, Event, Session, DATABASE
+
+dtypes_events = {'sessionid': "Int64",
+ 'connectioninformation_downlink': "Int64",
+ 'connectioninformation_type': "string",
+ 'consolelog_level': "string",
+ 'consolelog_value': "string",
+ 'customevent_messageid': "Int64",
+ 'customevent_name': "string",
+ 'customevent_payload': "string",
+ 'customevent_timestamp': "Int64",
+ 'errorevent_message': "string",
+ 'errorevent_messageid': "Int64",
+ 'errorevent_name': "string",
+ 'errorevent_payload': "string",
+ 'errorevent_source': "string",
+ 'errorevent_timestamp': "Int64",
+ 'jsexception_message': "string",
+ 'jsexception_name': "string",
+ 'jsexception_payload': "string",
+ 'metadata_key': "string",
+ 'metadata_value': "string",
+ 'mouseclick_id': "Int64",
+ 'mouseclick_hesitationtime': "Int64",
+ 'mouseclick_label': "string",
+ 'pageevent_firstcontentfulpaint': "Int64",
+ 'pageevent_firstpaint': "Int64",
+ 'pageevent_messageid': "Int64",
+ 'pageevent_referrer': "string",
+ 'pageevent_speedindex': "Int64",
+ 'pageevent_timestamp': "Int64",
+ 'pageevent_url': "string",
+ 'pagerendertiming_timetointeractive': "Int64",
+ 'pagerendertiming_visuallycomplete': "Int64",
+ 'rawcustomevent_name': "string",
+ 'rawcustomevent_payload': "string",
+ 'setviewportsize_height': "Int64",
+ 'setviewportsize_width': "Int64",
+ 'timestamp_timestamp': "Int64",
+ 'user_anonymous_id': "string",
+ 'user_id': "string",
+ 'issueevent_messageid': "Int64",
+ 'issueevent_timestamp': "Int64",
+ 'issueevent_type': "string",
+ 'issueevent_contextstring': "string",
+ 'issueevent_context': "string",
+ 'issueevent_payload': "string",
+ 'customissue_name': "string",
+ 'customissue_payload': "string",
+ 'received_at': "Int64",
+ 'batch_order_number': "Int64"}
+dtypes_detailed_events = {
+ "sessionid": "Int64",
+ "clickevent_hesitationtime": "Int64",
+ "clickevent_label": "object",
+ "clickevent_messageid": "Int64",
+ "clickevent_timestamp": "Int64",
+ "connectioninformation_downlink": "Int64",
+ "connectioninformation_type": "object",
+ "consolelog_level": "object",
+ "consolelog_value": "object",
+ "cpuissue_duration": "Int64",
+ "cpuissue_rate": "Int64",
+ "cpuissue_timestamp": "Int64",
+ "createdocument": "boolean",
+ "createelementnode_id": "Int64",
+ "createelementnode_parentid": "Int64",
+ "cssdeleterule_index": "Int64",
+ "cssdeleterule_stylesheetid": "Int64",
+ "cssinsertrule_index": "Int64",
+ "cssinsertrule_rule": "object",
+ "cssinsertrule_stylesheetid": "Int64",
+ "customevent_messageid": "Int64",
+ "customevent_name": "object",
+ "customevent_payload": "object",
+ "customevent_timestamp": "Int64",
+ "domdrop_timestamp": "Int64",
+ "errorevent_message": "object",
+ "errorevent_messageid": "Int64",
+ "errorevent_name": "object",
+ "errorevent_payload": "object",
+ "errorevent_source": "object",
+ "errorevent_timestamp": "Int64",
+ "fetch_duration": "Int64",
+ "fetch_method": "object",
+ "fetch_request": "object",
+ "fetch_response": "object",
+ "fetch_status": "Int64",
+ "fetch_timestamp": "Int64",
+ "fetch_url": "object",
+ "graphql_operationkind": "object",
+ "graphql_operationname": "object",
+ "graphql_response": "object",
+ "graphql_variables": "object",
+ "graphqlevent_messageid": "Int64",
+ "graphqlevent_name": "object",
+ "graphqlevent_timestamp": "Int64",
+ "inputevent_label": "object",
+ "inputevent_messageid": "Int64",
+ "inputevent_timestamp": "Int64",
+ "inputevent_value": "object",
+ "inputevent_valuemasked": "boolean",
+ "jsexception_message": "object",
+ "jsexception_name": "object",
+ "jsexception_payload": "object",
+ "longtasks_timestamp": "Int64",
+ "longtasks_duration": "Int64",
+ "longtasks_containerid": "object",
+ "longtasks_containersrc": "object",
+ "memoryissue_duration": "Int64",
+ "memoryissue_rate": "Int64",
+ "memoryissue_timestamp": "Int64",
+ "metadata_key": "object",
+ "metadata_value": "object",
+ "mobx_payload": "object",
+ "mobx_type": "object",
+ "mouseclick_id": "Int64",
+ "mouseclick_hesitationtime": "Int64",
+ "mouseclick_label": "object",
+ "mousemove_x": "Int64",
+ "mousemove_y": "Int64",
+ "movenode_id": "Int64",
+ "movenode_index": "Int64",
+ "movenode_parentid": "Int64",
+ "ngrx_action": "object",
+ "ngrx_duration": "Int64",
+ "ngrx_state": "object",
+ "otable_key": "object",
+ "otable_value": "object",
+ "pageevent_domcontentloadedeventend": "Int64",
+ "pageevent_domcontentloadedeventstart": "Int64",
+ "pageevent_firstcontentfulpaint": "Int64",
+ "pageevent_firstpaint": "Int64",
+ "pageevent_loaded": "boolean",
+ "pageevent_loadeventend": "Int64",
+ "pageevent_loadeventstart": "Int64",
+ "pageevent_messageid": "Int64",
+ "pageevent_referrer": "object",
+ "pageevent_requeststart": "Int64",
+ "pageevent_responseend": "Int64",
+ "pageevent_responsestart": "Int64",
+ "pageevent_speedindex": "Int64",
+ "pageevent_timestamp": "Int64",
+ "pageevent_url": "object",
+ "pageloadtiming_domcontentloadedeventend": "Int64",
+ "pageloadtiming_domcontentloadedeventstart": "Int64",
+ "pageloadtiming_firstcontentfulpaint": "Int64",
+ "pageloadtiming_firstpaint": "Int64",
+ "pageloadtiming_loadeventend": "Int64",
+ "pageloadtiming_loadeventstart": "Int64",
+ "pageloadtiming_requeststart": "Int64",
+ "pageloadtiming_responseend": "Int64",
+ "pageloadtiming_responsestart": "Int64",
+ "pagerendertiming_speedindex": "Int64",
+ "pagerendertiming_timetointeractive": "Int64",
+ "pagerendertiming_visuallycomplete": "Int64",
+ "performancetrack_frames": "Int64",
+ "performancetrack_ticks": "Int64",
+ "performancetrack_totaljsheapsize": "Int64",
+ "performancetrack_usedjsheapsize": "Int64",
+ "performancetrackaggr_avgcpu": "Int64",
+ "performancetrackaggr_avgfps": "Int64",
+ "performancetrackaggr_avgtotaljsheapsize": "Int64",
+ "performancetrackaggr_avgusedjsheapsize": "Int64",
+ "performancetrackaggr_maxcpu": "Int64",
+ "performancetrackaggr_maxfps": "Int64",
+ "performancetrackaggr_maxtotaljsheapsize": "Int64",
+ "performancetrackaggr_maxusedjsheapsize": "Int64",
+ "performancetrackaggr_mincpu": "Int64",
+ "performancetrackaggr_minfps": "Int64",
+ "performancetrackaggr_mintotaljsheapsize": "Int64",
+ "performancetrackaggr_minusedjsheapsize": "Int64",
+ "performancetrackaggr_timestampend": "Int64",
+ "performancetrackaggr_timestampstart": "Int64",
+ "profiler_args": "object",
+ "profiler_duration": "Int64",
+ "profiler_name": "object",
+ "profiler_result": "object",
+ "rawcustomevent_name": "object",
+ "rawcustomevent_payload": "object",
+ "rawerrorevent_message": "object",
+ "rawerrorevent_name": "object",
+ "rawerrorevent_payload": "object",
+ "rawerrorevent_source": "object",
+ "rawerrorevent_timestamp": "Int64",
+ "redux_action": "object",
+ "redux_duration": "Int64",
+ "redux_state": "object",
+ "removenode_id": "Int64",
+ "removenodeattribute_id": "Int64",
+ "removenodeattribute_name": "object",
+ "resourceevent_decodedbodysize": "Int64",
+ "resourceevent_duration": "Int64",
+ "resourceevent_encodedbodysize": "Int64",
+ "resourceevent_headersize": "Int64",
+ "resourceevent_messageid": "Int64",
+ "resourceevent_method": "object",
+ "resourceevent_status": "Int64",
+ "resourceevent_success": "boolean",
+ "resourceevent_timestamp": "Int64",
+ "resourceevent_ttfb": "Int64",
+ "resourceevent_type": "object",
+ "resourceevent_url": "object",
+ "resourcetiming_decodedbodysize": "Int64",
+ "resourcetiming_duration": "Int64",
+ "resourcetiming_encodedbodysize": "Int64",
+ "resourcetiming_headersize": "Int64",
+ "resourcetiming_initiator": "object",
+ "resourcetiming_timestamp": "Int64",
+ "resourcetiming_ttfb": "Int64",
+ "resourcetiming_url": "object",
+ "sessiondisconnect": "boolean",
+ "sessiondisconnect_timestamp": "Int64",
+ "sessionend": "boolean",
+ "sessionend_timestamp": "Int64",
+ "sessionstart_projectid": "Int64",
+ "sessionstart_revid": "object",
+ "sessionstart_timestamp": "Int64",
+ "sessionstart_trackerversion": "object",
+ "sessionstart_useragent": "object",
+ "sessionstart_userbrowser": "object",
+ "sessionstart_userbrowserversion": "object",
+ "sessionstart_usercountry": "object",
+ "sessionstart_userdevice": "object",
+ "sessionstart_userdeviceheapsize": "Int64",
+ "sessionstart_userdevicememorysize": "Int64",
+ "sessionstart_userdevicetype": "object",
+ "sessionstart_useros": "object",
+ "sessionstart_userosversion": "object",
+ "sessionstart_useruuid": "object",
+ "setcssdata_data": "Int64",
+ "setcssdata_id": "Int64",
+ "setinputchecked_checked": "Int64",
+ "setinputchecked_id": "Int64",
+ "setinputtarget_id": "Int64",
+ "setinputtarget_label": "Int64",
+ "setinputvalue_id": "Int64",
+ "setinputvalue_mask": "Int64",
+ "setinputvalue_value": "Int64",
+ "setnodeattribute_id": "Int64",
+ "setnodeattribute_name": "Int64",
+ "setnodeattribute_value": "Int64",
+ "setnodedata_data": "Int64",
+ "setnodedata_id": "Int64",
+ "setnodescroll_id": "Int64",
+ "setnodescroll_x": "Int64",
+ "setnodescroll_y": "Int64",
+ "setpagelocation_navigationstart": "Int64",
+ "setpagelocation_referrer": "object",
+ "setpagelocation_url": "object",
+ "setpagevisibility_hidden": "boolean",
+ "setviewportscroll_x": "Int64",
+ "setviewportscroll_y": "Int64",
+ "setviewportsize_height": "Int64",
+ "setviewportsize_width": "Int64",
+ "stateaction_type": "object",
+ "stateactionevent_messageid": "Int64",
+ "stateactionevent_timestamp": "Int64",
+ "stateactionevent_type": "object",
+ "timestamp_timestamp": "Int64",
+ "useranonymousid_id": "object",
+ "userid_id": "object",
+ "vuex_mutation": "object",
+ "vuex_state": "string",
+ "received_at": "Int64",
+ "batch_order_number": "Int64"
+}
+dtypes_sessions = {'sessionid': 'Int64',
+ 'user_agent': 'string',
+ 'user_browser': 'string',
+ 'user_browser_version': 'string',
+ 'user_country': 'string',
+ 'user_device': 'string',
+ 'user_device_heap_size': 'Int64',
+ 'user_device_memory_size': 'Int64',
+ 'user_device_type': 'string',
+ 'user_os': 'string',
+ 'user_os_version': 'string',
+ 'user_uuid': 'string',
+ 'connection_effective_bandwidth': 'Int64',
+ 'connection_type': 'string',
+ 'metadata_key': 'string',
+ 'metadata_value': 'string',
+ 'referrer': 'string',
+ 'user_anonymous_id': 'string',
+ 'user_id': 'string',
+ 'session_start_timestamp': 'Int64',
+ 'session_end_timestamp': 'Int64',
+ 'session_duration': 'Int64',
+ 'first_contentful_paint': 'Int64',
+ 'speed_index': 'Int64',
+ 'visually_complete': 'Int64',
+ 'timing_time_to_interactive': 'Int64',
+ 'avg_cpu': 'Int64',
+ 'avg_fps': 'Int64',
+ 'max_cpu': 'Int64',
+ 'max_fps': 'Int64',
+ 'max_total_js_heap_size': 'Int64',
+ 'max_used_js_heap_size': 'Int64',
+ 'js_exceptions_count': 'Int64',
+ 'long_tasks_total_duration': 'Int64',
+ 'long_tasks_max_duration': 'Int64',
+ 'long_tasks_count': 'Int64',
+ 'inputs_count': 'Int64',
+ 'clicks_count': 'Int64',
+ 'issues_count': 'Int64',
+ 'issues': 'object',
+ 'urls_count': 'Int64',
+ 'urls': 'object'}
+
+if DATABASE == 'bigquery':
+ dtypes_sessions['urls'] = 'string'
+ dtypes_sessions['issues'] = 'string'
+
+detailed_events_col = []
+for col in DetailedEvent.__dict__:
+ if not col.startswith('_'):
+ detailed_events_col.append(col)
+
+events_col = []
+for col in Event.__dict__:
+ if not col.startswith('_'):
+ events_col.append(col)
+
+sessions_col = []
+for col in Session.__dict__:
+ if not col.startswith('_'):
+ sessions_col.append(col)
+
+
+def get_df_from_batch(batch, level):
+ if level == 'normal':
+ df = pd.DataFrame([b.__dict__ for b in batch], columns=events_col)
+ if level == 'detailed':
+ df = pd.DataFrame([b.__dict__ for b in batch], columns=detailed_events_col)
+ if level == 'sessions':
+ df = pd.DataFrame([b.__dict__ for b in batch], columns=sessions_col)
+
+ try:
+ df = df.drop('_sa_instance_state', axis=1)
+ except KeyError:
+ pass
+
+ if level == 'normal':
+ df = df.astype(dtypes_events)
+ if level == 'detailed':
+ df['inputevent_value'] = None
+ df['customevent_payload'] = None
+ df = df.astype(dtypes_detailed_events)
+ if level == 'sessions':
+ df = df.astype(dtypes_sessions)
+
+ if DATABASE == 'clickhouse' and level == 'sessions':
+ df['issues'] = df['issues'].fillna('')
+ df['urls'] = df['urls'].fillna('')
+
+ for x in df.columns:
+ try:
+ if df[x].dtype == 'string':
+ df[x] = df[x].str.slice(0, 255)
+ df[x] = df[x].str.replace("|", "")
+ except TypeError as e:
+ print(repr(e))
+ if df[x].dtype == 'str':
+ df[x] = df[x].str.slice(0, 255)
+ df[x] = df[x].str.replace("|", "")
+ return df
diff --git a/ee/connectors/db/writer.py b/ee/connectors/db/writer.py
new file mode 100644
index 000000000..b999b773f
--- /dev/null
+++ b/ee/connectors/db/writer.py
@@ -0,0 +1,63 @@
+import os
+DATABASE = os.environ['DATABASE_NAME']
+
+from db.api import DBConnection
+from db.utils import get_df_from_batch
+from db.tables import *
+
+if DATABASE == 'redshift':
+ from db.loaders.redshift_loader import transit_insert_to_redshift
+if DATABASE == 'clickhouse':
+ from db.loaders.clickhouse_loader import insert_to_clickhouse
+if DATABASE == 'pg':
+ from db.loaders.postgres_loader import insert_to_postgres
+if DATABASE == 'bigquery':
+ from db.loaders.bigquery_loader import insert_to_bigquery
+ from bigquery_utils.create_table import create_tables_bigquery
+if DATABASE == 'snowflake':
+ from db.loaders.snowflake_loader import insert_to_snowflake
+
+
+# create tables if don't exist
+try:
+ db = DBConnection(DATABASE)
+ if DATABASE == 'pg':
+ create_tables_postgres(db)
+ if DATABASE == 'clickhouse':
+ create_tables_clickhouse(db)
+ if DATABASE == 'snowflake':
+ create_tables_snowflake(db)
+ if DATABASE == 'bigquery':
+ create_tables_bigquery()
+ if DATABASE == 'redshift':
+ create_tables_redshift(db)
+ db.engine.dispose()
+ db = None
+except Exception as e:
+ print(repr(e))
+ print("Please create the tables with scripts provided in "
+ "'/sql/{DATABASE}_sessions.sql' and '/sql/{DATABASE}_events.sql'")
+
+
+def insert_batch(db: DBConnection, batch, table, level='normal'):
+
+ if len(batch) == 0:
+ return
+ df = get_df_from_batch(batch, level=level)
+
+ if db.config == 'redshift':
+ transit_insert_to_redshift(db=db, df=df, table=table)
+ return
+
+ if db.config == 'clickhouse':
+ insert_to_clickhouse(db=db, df=df, table=table)
+
+ if db.config == 'pg':
+ insert_to_postgres(db=db, df=df, table=table)
+
+ if db.config == 'bigquery':
+ insert_to_bigquery(df=df, table=table)
+
+ if db.config == 'snowflake':
+ insert_to_snowflake(db=db, df=df, table=table)
+
diff --git a/ee/connectors/handler.py b/ee/connectors/handler.py
new file mode 100644
index 000000000..5167c7800
--- /dev/null
+++ b/ee/connectors/handler.py
@@ -0,0 +1,647 @@
+from typing import Optional, Union
+
+from db.models import Event, DetailedEvent, Session
+from msgcodec.messages import *
+
+
+def handle_normal_message(message: Message) -> Optional[Event]:
+
+ n = Event()
+
+ if isinstance(message, ConnectionInformation):
+ n.connectioninformation_downlink = message.downlink
+ n.connectioninformation_type = message.type
+ return n
+
+ if isinstance(message, ConsoleLog):
+ n.consolelog_level = message.level
+ n.consolelog_value = message.value
+ return n
+
+ if isinstance(message, CustomEvent):
+ n.customevent_messageid = message.message_id
+ n.customevent_name = message.name
+ n.customevent_timestamp = message.timestamp
+ n.customevent_payload = message.payload
+ return n
+
+ if isinstance(message, ErrorEvent):
+ n.errorevent_message = message.message
+ n.errorevent_messageid = message.message_id
+ n.errorevent_name = message.name
+ n.errorevent_payload = message.payload
+ n.errorevent_source = message.source
+ n.errorevent_timestamp = message.timestamp
+ return n
+
+ if isinstance(message, JSException):
+ n.jsexception_name = message.name
+ n.jsexception_payload = message.payload
+ n.jsexception_message = message.message
+ return n
+
+ if isinstance(message, Metadata):
+ n.metadata_key = message.key
+ n.metadata_value = message.value
+ return n
+
+ if isinstance(message, MouseClick):
+ n.mouseclick_hesitationtime = message.hesitation_time
+ n.mouseclick_id = message.id
+ n.mouseclick_label = message.label
+ return n
+
+ if isinstance(message, PageEvent):
+ n.pageevent_firstcontentfulpaint = message.first_contentful_paint
+ n.pageevent_firstpaint = message.first_paint
+ n.pageevent_messageid = message.message_id
+ n.pageevent_referrer = message.referrer
+ n.pageevent_speedindex = message.speed_index
+ n.pageevent_timestamp = message.timestamp
+ n.pageevent_url = message.url
+ return n
+
+ if isinstance(message, PageRenderTiming):
+ n.pagerendertiming_timetointeractive = message.time_to_interactive
+ n.pagerendertiming_visuallycomplete = message.visually_complete
+ return n
+
+ if isinstance(message, RawCustomEvent):
+ n.rawcustomevent_name = message.name
+ n.rawcustomevent_payload = message.payload
+ return n
+
+ if isinstance(message, SetViewportSize):
+ n.setviewportsize_height = message.height
+ n.setviewportsize_width = message.width
+ return n
+
+ if isinstance(message, Timestamp):
+ n.timestamp_timestamp = message.timestamp
+ return n
+
+ if isinstance(message, UserAnonymousID):
+ n.user_anonymous_id = message.id
+ return n
+
+ if isinstance(message, UserID):
+ n.user_id = message.id
+ return n
+
+ if isinstance(message, IssueEvent):
+ n.issueevent_messageid = message.message_id
+ n.issueevent_timestamp = message.timestamp
+ n.issueevent_type = message.type
+ n.issueevent_contextstring = message.context_string
+ n.issueevent_context = message.context
+ n.issueevent_payload = message.payload
+ return n
+
+ if isinstance(message, CustomIssue):
+ n.customissue_name = message.name
+ n.customissue_payload = message.payload
+ return n
+
+
+def handle_session(n: Session, message: Message) -> Optional[Session]:
+
+ if not n:
+ n = Session()
+
+ if isinstance(message, SessionStart):
+ n.session_start_timestamp = message.timestamp
+
+ n.user_uuid = message.user_uuid
+ n.user_agent = message.user_agent
+ n.user_os = message.user_os
+ n.user_os_version = message.user_os_version
+ n.user_browser = message.user_browser
+ n.user_browser_version = message.user_browser_version
+ n.user_device = message.user_device
+ n.user_device_type = message.user_device_type
+ n.user_device_memory_size = message.user_device_memory_size
+ n.user_device_heap_size = message.user_device_heap_size
+ n.user_country = message.user_country
+ return n
+
+ if isinstance(message, SessionEnd):
+ n.session_end_timestamp = message.timestamp
+ try:
+ n.session_duration = n.session_end_timestamp - n.session_start_timestamp
+ except TypeError:
+ pass
+ return n
+
+ if isinstance(message, ConnectionInformation):
+ n.connection_effective_bandwidth = message.downlink
+ n.connection_type = message.type
+ return n
+
+ if isinstance(message, Metadata):
+ n.metadata_key = message.key
+ n.metadata_value = message.value
+ return n
+
+ if isinstance(message, PageEvent):
+ n.referrer = message.referrer
+ n.first_contentful_paint = message.first_contentful_paint
+ n.speed_index = message.speed_index
+ n.timing_time_to_interactive = message.time_to_interactive
+ n.visually_complete = message.visually_complete
+ try:
+ n.urls_count += 1
+ except TypeError:
+ n.urls_count = 1
+ try:
+ n.urls.append(message.url)
+ except AttributeError:
+ n.urls = [message.url]
+ return n
+
+ if isinstance(message, PerformanceTrackAggr):
+ n.avg_cpu = message.avg_cpu
+ n.avg_fps = message.avg_fps
+ n.max_cpu = message.max_cpu
+ n.max_fps = message.max_fps
+ n.max_total_js_heap_size = message.max_total_js_heap_size
+ n.max_used_js_heap_size = message.max_used_js_heap_size
+ return n
+
+ if isinstance(message, UserID):
+ n.user_id = message.id
+ return n
+
+ if isinstance(message, UserAnonymousID):
+ n.user_anonymous_id = message.id
+ return n
+
+ if isinstance(message, JSException):
+ try:
+ n.js_exceptions_count += 1
+ except TypeError:
+ n.js_exceptions_count = 1
+ return n
+
+ if isinstance(message, LongTask):
+ try:
+ n.long_tasks_total_duration += message.duration
+ except TypeError:
+ n.long_tasks_total_duration = message.duration
+
+ try:
+ if n.long_tasks_max_duration > message.duration:
+ n.long_tasks_max_duration = message.duration
+ except TypeError:
+ n.long_tasks_max_duration = message.duration
+
+ try:
+ n.long_tasks_count += 1
+ except TypeError:
+ n.long_tasks_count = 1
+ return n
+
+ if isinstance(message, InputEvent):
+ try:
+ n.inputs_count += 1
+ except TypeError:
+ n.inputs_count = 1
+ return n
+
+ if isinstance(message, MouseClick):
+ try:
+ n.inputs_count += 1
+ except TypeError:
+ n.inputs_count = 1
+ return n
+
+ if isinstance(message, IssueEvent):
+ try:
+ n.issues_count += 1
+ except TypeError:
+ n.issues_count = 1
+
+
+ n.inputs_count = 1
+ return n
+
+ if isinstance(message, MouseClick):
+ try:
+ n.inputs_count += 1
+ except TypeError:
+ n.inputs_count = 1
+ return n
+
+ if isinstance(message, IssueEvent):
+ try:
+ n.issues_count += 1
+ except TypeError:
+ n.issues_count = 1
+
+ try:
+ n.issues.append(message.type)
+ except AttributeError:
+ n.issues = [message.type]
+ return n
+
+
+def handle_message(message: Message) -> Optional[DetailedEvent]:
+ n = DetailedEvent()
+
+ if isinstance(message, SessionEnd):
+ n.sessionend = True
+ n.sessionend_timestamp = message.timestamp
+ return n
+
+ if isinstance(message, Timestamp):
+ n.timestamp_timestamp = message.timestamp
+ return n
+
+ if isinstance(message, SessionDisconnect):
+ n.sessiondisconnect = True
+ n.sessiondisconnect_timestamp = message.timestamp
+ return n
+
+ if isinstance(message, SessionStart):
+ n.sessionstart_trackerversion = message.tracker_version
+ n.sessionstart_revid = message.rev_id
+ n.sessionstart_timestamp = message.timestamp
+ n.sessionstart_useruuid = message.user_uuid
+ n.sessionstart_useragent = message.user_agent
+ n.sessionstart_useros = message.user_os
+ n.sessionstart_userosversion = message.user_os_version
+ n.sessionstart_userbrowser = message.user_browser
+ n.sessionstart_userbrowserversion = message.user_browser_version
+ n.sessionstart_userdevice = message.user_device
+ n.sessionstart_userdevicetype = message.user_device_type
+ n.sessionstart_userdevicememorysize = message.user_device_memory_size
+ n.sessionstart_userdeviceheapsize = message.user_device_heap_size
+ n.sessionstart_usercountry = message.user_country
+ return n
+
+ if isinstance(message, SetViewportSize):
+ n.setviewportsize_width = message.width
+ n.setviewportsize_height = message.height
+ return n
+
+ if isinstance(message, SetViewportScroll):
+ n.setviewportscroll_x = message.x
+ n.setviewportscroll_y = message.y
+ return n
+
+ if isinstance(message, SetNodeScroll):
+ n.setnodescroll_id = message.id
+ n.setnodescroll_x = message.x
+ n.setnodescroll_y = message.y
+ return n
+
+ if isinstance(message, ConsoleLog):
+ n.consolelog_level = message.level
+ n.consolelog_value = message.value
+ return n
+
+ if isinstance(message, PageLoadTiming):
+ n.pageloadtiming_requeststart = message.request_start
+ n.pageloadtiming_responsestart = message.response_start
+ n.pageloadtiming_responseend = message.response_end
+ n.pageloadtiming_domcontentloadedeventstart = message.dom_content_loaded_event_start
+ n.pageloadtiming_domcontentloadedeventend = message.dom_content_loaded_event_end
+ n.pageloadtiming_loadeventstart = message.load_event_start
+ n.pageloadtiming_loadeventend = message.load_event_end
+ n.pageloadtiming_firstpaint = message.first_paint
+ n.pageloadtiming_firstcontentfulpaint = message.first_contentful_paint
+ return n
+
+ if isinstance(message, PageRenderTiming):
+ n.pagerendertiming_speedindex = message.speed_index
+ n.pagerendertiming_visuallycomplete = message.visually_complete
+ n.pagerendertiming_timetointeractive = message.time_to_interactive
+ return n
+
+ if isinstance(message, ResourceTiming):
+ n.resourcetiming_timestamp = message.timestamp
+ n.resourcetiming_duration = message.duration
+ n.resourcetiming_ttfb = message.ttfb
+ n.resourcetiming_headersize = message.header_size
+ n.resourcetiming_encodedbodysize = message.encoded_body_size
+ n.resourcetiming_decodedbodysize = message.decoded_body_size
+ n.resourcetiming_url = message.url
+ n.resourcetiming_initiator = message.initiator
+ return n
+
+ if isinstance(message, JSException):
+ n.jsexception_name = message.name
+ n.jsexception_message = message.message
+ n.jsexception_payload = message.payload
+ return n
+
+ if isinstance(message, RawErrorEvent):
+ n.rawerrorevent_timestamp = message.timestamp
+ n.rawerrorevent_source = message.source
+ n.rawerrorevent_name = message.name
+ n.rawerrorevent_message = message.message
+ n.rawerrorevent_payload = message.payload
+ return n
+
+ if isinstance(message, RawCustomEvent):
+ n.rawcustomevent_name = message.name
+ n.rawcustomevent_payload = message.payload
+ return n
+
+ if isinstance(message, UserID):
+ n.userid_id = message.id
+ return n
+
+ if isinstance(message, UserAnonymousID):
+ n.useranonymousid_id = message.id
+ return n
+
+ if isinstance(message, Metadata):
+ n.metadata_key = message.key
+ n.metadata_value = message.value
+ return n
+
+ if isinstance(message, PerformanceTrack):
+ n.performancetrack_frames = message.frames
+ n.performancetrack_ticks = message.ticks
+ n.performancetrack_totaljsheapsize = message.total_js_heap_size
+ n.performancetrack_usedjsheapsize = message.used_js_heap_size
+ return n
+
+ if isinstance(message, PerformanceTrackAggr):
+ n.performancetrackaggr_timestampstart = message.timestamp_start
+ n.performancetrackaggr_timestampend = message.timestamp_end
+ n.performancetrackaggr_minfps = message.min_fps
+ n.performancetrackaggr_avgfps = message.avg_fps
+ n.performancetrackaggr_maxfps = message.max_fps
+ n.performancetrackaggr_mincpu = message.min_cpu
+ n.performancetrackaggr_avgcpu = message.avg_cpu
+ n.performancetrackaggr_maxcpu = message.max_cpu
+ n.performancetrackaggr_mintotaljsheapsize = message.min_total_js_heap_size
+ n.performancetrackaggr_avgtotaljsheapsize = message.avg_total_js_heap_size
+ n.performancetrackaggr_maxtotaljsheapsize = message.max_total_js_heap_size
+ n.performancetrackaggr_minusedjsheapsize = message.min_used_js_heap_size
+ n.performancetrackaggr_avgusedjsheapsize = message.avg_used_js_heap_size
+ n.performancetrackaggr_maxusedjsheapsize = message.max_used_js_heap_size
+ return n
+
+ if isinstance(message, ConnectionInformation):
+ n.connectioninformation_downlink = message.downlink
+ n.connectioninformation_type = message.type
+ return n
+
+ if isinstance(message, PageEvent):
+ n.pageevent_messageid = message.message_id
+ n.pageevent_timestamp = message.timestamp
+ n.pageevent_url = message.url
+ n.pageevent_referrer = message.referrer
+ n.pageevent_loaded = message.loaded
+ n.pageevent_requeststart = message.request_start
+ n.pageevent_responsestart = message.response_start
+ n.pageevent_responseend = message.response_end
+ n.pageevent_domcontentloadedeventstart = message.dom_content_loaded_event_start
+ n.pageevent_domcontentloadedeventend = message.dom_content_loaded_event_end
+ n.pageevent_loadeventstart = message.load_event_start
+ n.pageevent_loadeventend = message.load_event_end
+ n.pageevent_firstpaint = message.first_paint
+ n.pageevent_firstcontentfulpaint = message.first_contentful_paint
+ n.pageevent_speedindex = message.speed_index
+ return n
+
+ if isinstance(message, InputEvent):
+ n.inputevent_messageid = message.message_id
+ n.inputevent_timestamp = message.timestamp
+ n.inputevent_value = message.value
+ n.inputevent_valuemasked = message.value_masked
+ n.inputevent_label = message.label
+ return n
+
+ if isinstance(message, ClickEvent):
+ n.clickevent_messageid = message.message_id
+ n.clickevent_timestamp = message.timestamp
+ n.clickevent_hesitationtime = message.hesitation_time
+ n.clickevent_label = message.label
+ return n
+
+ if isinstance(message, ErrorEvent):
+ n.errorevent_messageid = message.message_id
+ n.errorevent_timestamp = message.timestamp
+ n.errorevent_source = message.source
+ n.errorevent_name = message.name
+ n.errorevent_message = message.message
+ n.errorevent_payload = message.payload
+ return n
+
+ if isinstance(message, ResourceEvent):
+ n.resourceevent_messageid = message.message_id
+ n.resourceevent_timestamp = message.timestamp
+ n.resourceevent_duration = message.duration
+ n.resourceevent_ttfb = message.ttfb
+ n.resourceevent_headersize = message.header_size
+ n.resourceevent_encodedbodysize = message.encoded_body_size
+ n.resourceevent_decodedbodysize = message.decoded_body_size
+ n.resourceevent_url = message.url
+ n.resourceevent_type = message.type
+ n.resourceevent_success = message.success
+ n.resourceevent_method = message.method
+ n.resourceevent_status = message.status
+ return n
+
+ if isinstance(message, CustomEvent):
+ n.customevent_messageid = message.message_id
+ n.customevent_timestamp = message.timestamp
+ n.customevent_name = message.name
+ n.customevent_payload = message.payload
+ return n
+
+ # if isinstance(message, CreateDocument):
+ # n.createdocument = True
+ # return n
+ #
+ # if isinstance(message, CreateElementNode):
+ # n.createelementnode_id = message.id
+ # if isinstance(message.parent_id, tuple):
+ # n.createelementnode_parentid = message.parent_id[0]
+ # else:
+ # n.createelementnode_parentid = message.parent_id
+ # return n
+
+ # if isinstance(message, CSSInsertRule):
+ # n.cssinsertrule_stylesheetid = message.id
+ # n.cssinsertrule_rule = message.rule
+ # n.cssinsertrule_index = message.index
+ # return n
+ #
+ # if isinstance(message, CSSDeleteRule):
+ # n.cssdeleterule_stylesheetid = message.id
+ # n.cssdeleterule_index = message.index
+ # return n
+
+ if isinstance(message, Fetch):
+ n.fetch_method = message.method
+ n.fetch_url = message.url
+ n.fetch_request = message.request
+ n.fetch_status = message.status
+ n.fetch_timestamp = message.timestamp
+ n.fetch_duration = message.duration
+ return n
+
+ if isinstance(message, Profiler):
+ n.profiler_name = message.name
+ n.profiler_duration = message.duration
+ n.profiler_args = message.args
+ n.profiler_result = message.result
+ return n
+
+ if isinstance(message, GraphQL):
+ n.graphql_operationkind = message.operation_kind
+ n.graphql_operationname = message.operation_name
+ n.graphql_variables = message.variables
+ n.graphql_response = message.response
+ return n
+
+ if isinstance(message, GraphQLEvent):
+ n.graphqlevent_messageid = message.message_id
+ n.graphqlevent_timestamp = message.timestamp
+ n.graphqlevent_name = message.name
+ return n
+
+ if isinstance(message, DomDrop):
+ n.domdrop_timestamp = message.timestamp
+ return n
+
+ if isinstance(message, MouseClick):
+ n.mouseclick_id = message.id
+ n.mouseclick_hesitationtime = message.hesitation_time
+ n.mouseclick_label = message.label
+ return n
+
+ if isinstance(message, SetPageLocation):
+ n.setpagelocation_url = message.url
+ n.setpagelocation_referrer = message.referrer
+ n.setpagelocation_navigationstart = message.navigation_start
+ return n
+
+ if isinstance(message, MouseMove):
+ n.mousemove_x = message.x
+ n.mousemove_y = message.y
+ return n
+
+ if isinstance(message, LongTask):
+ n.longtasks_timestamp = message.timestamp
+ n.longtasks_duration = message.duration
+ n.longtask_context = message.context
+ n.longtask_containertype = message.container_type
+ n.longtasks_containersrc = message.container_src
+ n.longtasks_containerid = message.container_id
+ n.longtasks_containername = message.container_name
+ return n
+
+ if isinstance(message, SetNodeURLBasedAttribute):
+ n.setnodeurlbasedattribute_id = message.id
+ n.setnodeurlbasedattribute_name = message.name
+ n.setnodeurlbasedattribute_value = message.value
+ n.setnodeurlbasedattribute_baseurl = message.base_url
+ return n
+
+ if isinstance(message, SetStyleData):
+ n.setstyledata_id = message.id
+ n.setstyledata_data = message.data
+ n.setstyledata_baseurl = message.base_url
+ return n
+
+ if isinstance(message, IssueEvent):
+ n.issueevent_messageid = message.message_id
+ n.issueevent_timestamp = message.timestamp
+ n.issueevent_type = message.type
+ n.issueevent_contextstring = message.context_string
+ n.issueevent_context = message.context
+ n.issueevent_payload = message.payload
+ return n
+
+ if isinstance(message, TechnicalInfo):
+ n.technicalinfo_type = message.type
+ n.technicalinfo_value = message.value
+ return n
+
+ if isinstance(message, CustomIssue):
+ n.customissue_name = message.name
+ n.customissue_payload = message.payload
+ return n
+
+ if isinstance(message, PageClose):
+ n.pageclose = True
+ return n
+
+ if isinstance(message, IOSSessionStart):
+ n.iossessionstart_timestamp = message.timestamp
+ n.iossessionstart_projectid = message.project_id
+ n.iossessionstart_trackerversion = message.tracker_version
+ n.iossessionstart_revid = message.rev_id
+ n.iossessionstart_useruuid = message.user_uuid
+ n.iossessionstart_useros = message.user_os
+ n.iossessionstart_userosversion = message.user_os_version
+ n.iossessionstart_userdevice = message.user_device
+ n.iossessionstart_userdevicetype = message.user_device_type
+ n.iossessionstart_usercountry = message.user_country
+ return n
+
+ if isinstance(message, IOSSessionEnd):
+ n.iossessionend_timestamp = message.timestamp
+ return n
+
+ if isinstance(message, IOSMetadata):
+ n.iosmetadata_timestamp = message.timestamp
+ n.iosmetadata_length = message.length
+ n.iosmetadata_key = message.key
+ n.iosmetadata_value = message.value
+ return n
+
+ if isinstance(message, IOSUserID):
+ n.iosuserid_timestamp = message.timestamp
+ n.iosuserid_length = message.length
+ n.iosuserid_value = message.value
+ return n
+
+ if isinstance(message, IOSUserAnonymousID):
+ n.iosuseranonymousid_timestamp = message.timestamp
+ n.iosuseranonymousid_length = message.length
+ n.iosuseranonymousid_value = message.value
+ return n
+
+ if isinstance(message, IOSScreenLeave):
+ n.iosscreenleave_timestamp = message.timestamp
+ n.iosscreenleave_length = message.length
+ n.iosscreenleave_title = message.title
+ n.iosscreenleave_viewname = message.view_name
+ return n
+
+ if isinstance(message, IOSLog):
+ n.ioslog_timestamp = message.timestamp
+ n.ioslog_length = message.length
+ n.ioslog_severity = message.severity
+ n.ioslog_content = message.content
+ return n
+
+ if isinstance(message, IOSInternalError):
+ n.iosinternalerror_timestamp = message.timestamp
+ n.iosinternalerror_length = message.length
+ n.iosinternalerror_content = message.content
+ return n
+
+ if isinstance(message, IOSPerformanceAggregated):
+ n.iosperformanceaggregated_timestampstart = message.timestamp_start
+ n.iosperformanceaggregated_timestampend = message.timestamp_end
+ n.iosperformanceaggregated_minfps = message.min_fps
+ n.iosperformanceaggregated_avgfps = message.avg_fps
+ n.iosperformanceaggregated_maxfps = message.max_fps
+ n.iosperformanceaggregated_mincpu = message.min_cpu
+ n.iosperformanceaggregated_avgcpu = message.avg_cpu
+ n.iosperformanceaggregated_maxcpu = message.max_cpu
+ n.iosperformanceaggregated_minmemory = message.min_memory
+ n.iosperformanceaggregated_avgmemory = message.avg_memory
+ n.iosperformanceaggregated_maxmemory = message.max_memory
+ n.iosperformanceaggregated_minbattery = message.min_battery
+ n.iosperformanceaggregated_avgbattery = message.avg_battery
+ n.iosperformanceaggregated_maxbattery = message.max_battery
+ return n
+ return None
diff --git a/ee/connectors/main.py b/ee/connectors/main.py
new file mode 100644
index 000000000..57349f6e9
--- /dev/null
+++ b/ee/connectors/main.py
@@ -0,0 +1,121 @@
+import os
+from kafka import KafkaConsumer
+from datetime import datetime
+from collections import defaultdict
+
+from msgcodec.codec import MessageCodec
+from msgcodec.messages import SessionEnd
+from db.api import DBConnection
+from db.models import events_detailed_table_name, events_table_name, sessions_table_name, conf
+from db.writer import insert_batch
+from handler import handle_message, handle_normal_message, handle_session
+
+DATABASE = os.environ['DATABASE_NAME']
+LEVEL = conf[DATABASE]['level']
+
+db = DBConnection(DATABASE)
+
+if LEVEL == 'detailed':
+ table_name = events_detailed_table_name
+elif LEVEL == 'normal':
+ table_name = events_table_name
+
+
+def main():
+ batch_size = 4000
+ sessions_batch_size = 400
+ batch = []
+ sessions = defaultdict(lambda: None)
+ sessions_batch = []
+
+ codec = MessageCodec()
+ consumer = KafkaConsumer(security_protocol="SSL",
+ bootstrap_servers=[os.environ['KAFKA_SERVER_1'],
+ os.environ['KAFKA_SERVER_2']],
+ group_id=f"connector_{DATABASE}",
+ auto_offset_reset="earliest",
+ enable_auto_commit=False)
+
+ consumer.subscribe(topics=["events", "messages"])
+ print("Kafka consumer subscribed")
+ for msg in consumer:
+ message = codec.decode(msg.value)
+ if message is None:
+ print('-')
+ continue
+
+ if LEVEL == 'detailed':
+ n = handle_message(message)
+ elif LEVEL == 'normal':
+ n = handle_normal_message(message)
+
+ session_id = codec.decode_key(msg.key)
+ sessions[session_id] = handle_session(sessions[session_id], message)
+ if sessions[session_id]:
+ sessions[session_id].sessionid = session_id
+
+ # put in a batch for insertion if received a SessionEnd
+ if isinstance(message, SessionEnd):
+ if sessions[session_id]:
+ sessions_batch.append(sessions[session_id])
+
+ # try to insert sessions
+ if len(sessions_batch) >= sessions_batch_size:
+ attempt_session_insert(sessions_batch)
+ for s in sessions_batch:
+ try:
+ del sessions[s.sessionid]
+ except KeyError as e:
+ print(repr(e))
+ sessions_batch = []
+
+ if n:
+ n.sessionid = session_id
+ n.received_at = int(datetime.now().timestamp() * 1000)
+ n.batch_order_number = len(batch)
+ batch.append(n)
+ else:
+ continue
+
+ # insert a batch of events
+ if len(batch) >= batch_size:
+ attempt_batch_insert(batch)
+ batch = []
+ consumer.commit()
+ print("sessions in cache:", len(sessions))
+
+
+def attempt_session_insert(sess_batch):
+ if sess_batch:
+ try:
+ print("inserting sessions...")
+ insert_batch(db, sess_batch, table=sessions_table_name, level='sessions')
+ print("inserted sessions succesfully")
+ except TypeError as e:
+ print("Type conversion error")
+ print(repr(e))
+ except ValueError as e:
+ print("Message value could not be processed or inserted correctly")
+ print(repr(e))
+ except Exception as e:
+ print(repr(e))
+
+
+def attempt_batch_insert(batch):
+ # insert a batch
+ try:
+ print("inserting...")
+ insert_batch(db=db, batch=batch, table=table_name, level=LEVEL)
+ print("inserted succesfully")
+ except TypeError as e:
+ print("Type conversion error")
+ print(repr(e))
+ except ValueError as e:
+ print("Message value could not be processed or inserted correctly")
+ print(repr(e))
+ except Exception as e:
+ print(repr(e))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ee/connectors/msgcodec/codec.py b/ee/connectors/msgcodec/codec.py
new file mode 100644
index 000000000..18f074a33
--- /dev/null
+++ b/ee/connectors/msgcodec/codec.py
@@ -0,0 +1,670 @@
+import io
+
+from msgcodec.messages import *
+
+
+class Codec:
+ """
+ Implements encode/decode primitives
+ """
+
+ @staticmethod
+ def read_boolean(reader: io.BytesIO):
+ b = reader.read(1)
+ return b == 1
+
+ @staticmethod
+ def read_uint(reader: io.BytesIO):
+ """
+ The ending "big" doesn't play any role here,
+ since we're dealing with data per one byte
+ """
+ x = 0 # the result
+ s = 0 # the shift (our result is big-ending)
+ i = 0 # n of byte (max 9 for uint64)
+ while True:
+ b = reader.read(1)
+ num = int.from_bytes(b, "big", signed=False)
+ # print(i, x)
+
+ if num < 0x80:
+ if i > 9 | i == 9 & num > 1:
+ raise OverflowError()
+ return int(x | num << s)
+ x |= (num & 0x7f) << s
+ s += 7
+ i += 1
+
+ @staticmethod
+ def read_int(reader: io.BytesIO) -> int:
+ """
+ ux, err := ReadUint(reader)
+ x := int64(ux >> 1)
+ if err != nil {
+ return x, err
+ }
+ if ux&1 != 0 {
+ x = ^x
+ }
+ return x, err
+ """
+ ux = Codec.read_uint(reader)
+ x = int(ux >> 1)
+
+ if ux & 1 != 0:
+ x = - x - 1
+ return x
+
+ @staticmethod
+ def read_string(reader: io.BytesIO) -> str:
+ length = Codec.read_uint(reader)
+ s = reader.read(length)
+ try:
+ return s.decode("utf-8", errors="replace").replace("\x00", "\uFFFD")
+ except UnicodeDecodeError:
+ return None
+
+
+class MessageCodec(Codec):
+
+ def encode(self, m: Message) -> bytes:
+ ...
+
+ def decode(self, b: bytes) -> Message:
+ reader = io.BytesIO(b)
+ message_id = self.read_message_id(reader)
+
+ if message_id == 0:
+ return Timestamp(
+ timestamp=self.read_uint(reader)
+ )
+ if message_id == 1:
+ return SessionStart(
+ timestamp=self.read_uint(reader),
+ project_id=self.read_uint(reader),
+ tracker_version=self.read_string(reader),
+ rev_id=self.read_string(reader),
+ user_uuid=self.read_string(reader),
+ user_agent=self.read_string(reader),
+ user_os=self.read_string(reader),
+ user_os_version=self.read_string(reader),
+ user_browser=self.read_string(reader),
+ user_browser_version=self.read_string(reader),
+ user_device=self.read_string(reader),
+ user_device_type=self.read_string(reader),
+ user_device_memory_size=self.read_uint(reader),
+ user_device_heap_size=self.read_uint(reader),
+ user_country=self.read_string(reader)
+ )
+
+ if message_id == 2:
+ return SessionDisconnect(
+ timestamp=self.read_uint(reader)
+ )
+
+ if message_id == 3:
+ return SessionEnd(
+ timestamp=self.read_uint(reader)
+ )
+
+ if message_id == 4:
+ return SetPageLocation(
+ url=self.read_string(reader),
+ referrer=self.read_string(reader),
+ navigation_start=self.read_uint(reader)
+ )
+
+ if message_id == 5:
+ return SetViewportSize(
+ width=self.read_uint(reader),
+ height=self.read_uint(reader)
+ )
+
+ if message_id == 6:
+ return SetViewportScroll(
+ x=self.read_int(reader),
+ y=self.read_int(reader)
+ )
+
+ if message_id == 7:
+ return CreateDocument()
+
+ if message_id == 8:
+ return CreateElementNode(
+ id=self.read_uint(reader),
+ parent_id=self.read_uint(reader),
+ index=self.read_uint(reader),
+ tag=self.read_string(reader),
+ svg=self.read_boolean(reader),
+ )
+
+ if message_id == 9:
+ return CreateTextNode(
+ id=self.read_uint(reader),
+ parent_id=self.read_uint(reader),
+ index=self.read_uint(reader)
+ )
+
+ if message_id == 10:
+ return MoveNode(
+ id=self.read_uint(reader),
+ parent_id=self.read_uint(reader),
+ index=self.read_uint(reader)
+ )
+
+ if message_id == 11:
+ return RemoveNode(
+ id=self.read_uint(reader)
+ )
+
+ if message_id == 12:
+ return SetNodeAttribute(
+ id=self.read_uint(reader),
+ name=self.read_string(reader),
+ value=self.read_string(reader)
+ )
+
+ if message_id == 13:
+ return RemoveNodeAttribute(
+ id=self.read_uint(reader),
+ name=self.read_string(reader)
+ )
+
+ if message_id == 14:
+ return SetNodeData(
+ id=self.read_uint(reader),
+ data=self.read_string(reader)
+ )
+
+ if message_id == 15:
+ return SetCSSData(
+ id=self.read_uint(reader),
+ data=self.read_string(reader)
+ )
+
+ if message_id == 16:
+ return SetNodeScroll(
+ id=self.read_uint(reader),
+ x=self.read_int(reader),
+ y=self.read_int(reader),
+ )
+
+ if message_id == 17:
+ return SetInputTarget(
+ id=self.read_uint(reader),
+ label=self.read_string(reader)
+ )
+
+ if message_id == 18:
+ return SetInputValue(
+ id=self.read_uint(reader),
+ value=self.read_string(reader),
+ mask=self.read_int(reader),
+ )
+
+ if message_id == 19:
+ return SetInputChecked(
+ id=self.read_uint(reader),
+ checked=self.read_boolean(reader)
+ )
+
+ if message_id == 20:
+ return MouseMove(
+ x=self.read_uint(reader),
+ y=self.read_uint(reader)
+ )
+
+ if message_id == 21:
+ return MouseClick(
+ id=self.read_uint(reader),
+ hesitation_time=self.read_uint(reader),
+ label=self.read_string(reader)
+ )
+
+ if message_id == 22:
+ return ConsoleLog(
+ level=self.read_string(reader),
+ value=self.read_string(reader)
+ )
+
+ if message_id == 23:
+ return PageLoadTiming(
+ request_start=self.read_uint(reader),
+ response_start=self.read_uint(reader),
+ response_end=self.read_uint(reader),
+ dom_content_loaded_event_start=self.read_uint(reader),
+ dom_content_loaded_event_end=self.read_uint(reader),
+ load_event_start=self.read_uint(reader),
+ load_event_end=self.read_uint(reader),
+ first_paint=self.read_uint(reader),
+ first_contentful_paint=self.read_uint(reader)
+ )
+
+ if message_id == 24:
+ return PageRenderTiming(
+ speed_index=self.read_uint(reader),
+ visually_complete=self.read_uint(reader),
+ time_to_interactive=self.read_uint(reader),
+ )
+
+ if message_id == 25:
+ return JSException(
+ name=self.read_string(reader),
+ message=self.read_string(reader),
+ payload=self.read_string(reader)
+ )
+
+ if message_id == 26:
+ return RawErrorEvent(
+ timestamp=self.read_uint(reader),
+ source=self.read_string(reader),
+ name=self.read_string(reader),
+ message=self.read_string(reader),
+ payload=self.read_string(reader)
+ )
+
+ if message_id == 27:
+ return RawCustomEvent(
+ name=self.read_string(reader),
+ payload=self.read_string(reader)
+ )
+
+ if message_id == 28:
+ return UserID(
+ id=self.read_string(reader)
+ )
+
+ if message_id == 29:
+ return UserAnonymousID(
+ id=self.read_string(reader)
+ )
+
+ if message_id == 30:
+ return Metadata(
+ key=self.read_string(reader),
+ value=self.read_string(reader)
+ )
+
+ if message_id == 31:
+ return PageEvent(
+ message_id=self.read_uint(reader),
+ timestamp=self.read_uint(reader),
+ url=self.read_string(reader),
+ referrer=self.read_string(reader),
+ loaded=self.read_boolean(reader),
+ request_start=self.read_uint(reader),
+ response_start=self.read_uint(reader),
+ response_end=self.read_uint(reader),
+ dom_content_loaded_event_start=self.read_uint(reader),
+ dom_content_loaded_event_end=self.read_uint(reader),
+ load_event_start=self.read_uint(reader),
+ load_event_end=self.read_uint(reader),
+ first_paint=self.read_uint(reader),
+ first_contentful_paint=self.read_uint(reader),
+ speed_index=self.read_uint(reader),
+ visually_complete=self.read_uint(reader),
+ time_to_interactive=self.read_uint(reader)
+ )
+
+ if message_id == 32:
+ return InputEvent(
+ message_id=self.read_uint(reader),
+ timestamp=self.read_uint(reader),
+ value=self.read_string(reader),
+ value_masked=self.read_boolean(reader),
+ label=self.read_string(reader),
+ )
+
+ if message_id == 33:
+ return ClickEvent(
+ message_id=self.read_uint(reader),
+ timestamp=self.read_uint(reader),
+ hesitation_time=self.read_uint(reader),
+ label=self.read_string(reader)
+ )
+
+ if message_id == 34:
+ return ErrorEvent(
+ message_id=self.read_uint(reader),
+ timestamp=self.read_uint(reader),
+ source=self.read_string(reader),
+ name=self.read_string(reader),
+ message=self.read_string(reader),
+ payload=self.read_string(reader)
+ )
+
+ if message_id == 35:
+
+ message_id = self.read_uint(reader)
+ ts = self.read_uint(reader)
+ if ts > 9999999999999:
+ ts = None
+ return ResourceEvent(
+ message_id=message_id,
+ timestamp=ts,
+ duration=self.read_uint(reader),
+ ttfb=self.read_uint(reader),
+ header_size=self.read_uint(reader),
+ encoded_body_size=self.read_uint(reader),
+ decoded_body_size=self.read_uint(reader),
+ url=self.read_string(reader),
+ type=self.read_string(reader),
+ success=self.read_boolean(reader),
+ method=self.read_string(reader),
+ status=self.read_uint(reader)
+ )
+
+ if message_id == 36:
+ return CustomEvent(
+ message_id=self.read_uint(reader),
+ timestamp=self.read_uint(reader),
+ name=self.read_string(reader),
+ payload=self.read_string(reader)
+ )
+
+ if message_id == 37:
+ return CSSInsertRule(
+ id=self.read_uint(reader),
+ rule=self.read_string(reader),
+ index=self.read_uint(reader)
+ )
+
+ if message_id == 38:
+ return CSSDeleteRule(
+ id=self.read_uint(reader),
+ index=self.read_uint(reader)
+ )
+
+ if message_id == 39:
+ return Fetch(
+ method=self.read_string(reader),
+ url=self.read_string(reader),
+ request=self.read_string(reader),
+ response=self.read_string(reader),
+ status=self.read_uint(reader),
+ timestamp=self.read_uint(reader),
+ duration=self.read_uint(reader)
+ )
+
+ if message_id == 40:
+ return Profiler(
+ name=self.read_string(reader),
+ duration=self.read_uint(reader),
+ args=self.read_string(reader),
+ result=self.read_string(reader)
+ )
+
+ if message_id == 41:
+ return OTable(
+ key=self.read_string(reader),
+ value=self.read_string(reader)
+ )
+
+ if message_id == 42:
+ return StateAction(
+ type=self.read_string(reader)
+ )
+
+ if message_id == 43:
+ return StateActionEvent(
+ message_id=self.read_uint(reader),
+ timestamp=self.read_uint(reader),
+ type=self.read_string(reader)
+ )
+
+ if message_id == 44:
+ return Redux(
+ action=self.read_string(reader),
+ state=self.read_string(reader),
+ duration=self.read_uint(reader)
+ )
+
+ if message_id == 45:
+ return Vuex(
+ mutation=self.read_string(reader),
+ state=self.read_string(reader),
+ )
+
+ if message_id == 46:
+ return MobX(
+ type=self.read_string(reader),
+ payload=self.read_string(reader),
+ )
+
+ if message_id == 47:
+ return NgRx(
+ action=self.read_string(reader),
+ state=self.read_string(reader),
+ duration=self.read_uint(reader)
+ )
+
+ if message_id == 48:
+ return GraphQL(
+ operation_kind=self.read_string(reader),
+ operation_name=self.read_string(reader),
+ variables=self.read_string(reader),
+ response=self.read_string(reader)
+ )
+
+ if message_id == 49:
+ return PerformanceTrack(
+ frames=self.read_int(reader),
+ ticks=self.read_int(reader),
+ total_js_heap_size=self.read_uint(reader),
+ used_js_heap_size=self.read_uint(reader)
+ )
+
+ if message_id == 50:
+ return GraphQLEvent(
+ message_id=self.read_uint(reader),
+ timestamp=self.read_uint(reader),
+ name=self.read_string(reader)
+ )
+
+ if message_id == 52:
+ return DomDrop(
+ timestamp=self.read_uint(reader)
+ )
+
+ if message_id == 53:
+ return ResourceTiming(
+ timestamp=self.read_uint(reader),
+ duration=self.read_uint(reader),
+ ttfb=self.read_uint(reader),
+ header_size=self.read_uint(reader),
+ encoded_body_size=self.read_uint(reader),
+ decoded_body_size=self.read_uint(reader),
+ url=self.read_string(reader),
+ initiator=self.read_string(reader)
+ )
+
+ if message_id == 54:
+ return ConnectionInformation(
+ downlink=self.read_uint(reader),
+ type=self.read_string(reader)
+ )
+
+ if message_id == 55:
+ return SetPageVisibility(
+ hidden=self.read_boolean(reader)
+ )
+
+ if message_id == 56:
+ return PerformanceTrackAggr(
+ timestamp_start=self.read_uint(reader),
+ timestamp_end=self.read_uint(reader),
+ min_fps=self.read_uint(reader),
+ avg_fps=self.read_uint(reader),
+ max_fps=self.read_uint(reader),
+ min_cpu=self.read_uint(reader),
+ avg_cpu=self.read_uint(reader),
+ max_cpu=self.read_uint(reader),
+ min_total_js_heap_size=self.read_uint(reader),
+ avg_total_js_heap_size=self.read_uint(reader),
+ max_total_js_heap_size=self.read_uint(reader),
+ min_used_js_heap_size=self.read_uint(reader),
+ avg_used_js_heap_size=self.read_uint(reader),
+ max_used_js_heap_size=self.read_uint(reader)
+ )
+
+ if message_id == 59:
+ return LongTask(
+ timestamp=self.read_uint(reader),
+ duration=self.read_uint(reader),
+ context=self.read_uint(reader),
+ container_type=self.read_uint(reader),
+ container_src=self.read_string(reader),
+ container_id=self.read_string(reader),
+ container_name=self.read_string(reader)
+ )
+
+ if message_id == 60:
+ return SetNodeURLBasedAttribute(
+ id=self.read_uint(reader),
+ name=self.read_string(reader),
+ value=self.read_string(reader),
+ base_url=self.read_string(reader)
+ )
+
+ if message_id == 61:
+ return SetStyleData(
+ id=self.read_uint(reader),
+ data=self.read_string(reader),
+ base_url=self.read_string(reader)
+ )
+
+ if message_id == 62:
+ return IssueEvent(
+ message_id=self.read_uint(reader),
+ timestamp=self.read_uint(reader),
+ type=self.read_string(reader),
+ context_string=self.read_string(reader),
+ context=self.read_string(reader),
+ payload=self.read_string(reader)
+ )
+
+ if message_id == 63:
+ return TechnicalInfo(
+ type=self.read_string(reader),
+ value=self.read_string(reader)
+ )
+
+ if message_id == 64:
+ return CustomIssue(
+ name=self.read_string(reader),
+ payload=self.read_string(reader)
+ )
+
+ if message_id == 65:
+ return PageClose()
+
+ if message_id == 90:
+ return IOSSessionStart(
+ timestamp=self.read_uint(reader),
+ project_id=self.read_uint(reader),
+ tracker_version=self.read_string(reader),
+ rev_id=self.read_string(reader),
+ user_uuid=self.read_string(reader),
+ user_os=self.read_string(reader),
+ user_os_version=self.read_string(reader),
+ user_device=self.read_string(reader),
+ user_device_type=self.read_string(reader),
+ user_country=self.read_string(reader)
+ )
+
+ if message_id == 91:
+ return IOSSessionEnd(
+ timestamp=self.read_uint(reader)
+ )
+
+ if message_id == 92:
+ return IOSMetadata(
+ timestamp=self.read_uint(reader),
+ length=self.read_uint(reader),
+ key=self.read_string(reader),
+ value=self.read_string(reader)
+ )
+
+ if message_id == 94:
+ return IOSUserID(
+ timestamp=self.read_uint(reader),
+ length=self.read_uint(reader),
+ value=self.read_string(reader)
+ )
+
+ if message_id == 95:
+ return IOSUserAnonymousID(
+ timestamp=self.read_uint(reader),
+ length=self.read_uint(reader),
+ value=self.read_string(reader)
+ )
+
+ if message_id == 99:
+ return IOSScreenLeave(
+ timestamp=self.read_uint(reader),
+ length=self.read_uint(reader),
+ title=self.read_string(reader),
+ view_name=self.read_string(reader)
+ )
+
+ if message_id == 103:
+ return IOSLog(
+ timestamp=self.read_uint(reader),
+ length=self.read_uint(reader),
+ severity=self.read_string(reader),
+ content=self.read_string(reader)
+ )
+
+ if message_id == 104:
+ return IOSInternalError(
+ timestamp=self.read_uint(reader),
+ length=self.read_uint(reader),
+ content=self.read_string(reader)
+ )
+
+ if message_id == 110:
+ return IOSPerformanceAggregated(
+ timestamp_start=self.read_uint(reader),
+ timestamp_end=self.read_uint(reader),
+ min_fps=self.read_uint(reader),
+ avg_fps=self.read_uint(reader),
+ max_fps=self.read_uint(reader),
+ min_cpu=self.read_uint(reader),
+ avg_cpu=self.read_uint(reader),
+ max_cpu=self.read_uint(reader),
+ min_memory=self.read_uint(reader),
+ avg_memory=self.read_uint(reader),
+ max_memory=self.read_uint(reader),
+ min_battery=self.read_uint(reader),
+ avg_battery=self.read_uint(reader),
+ max_battery=self.read_uint(reader)
+ )
+
+ def read_message_id(self, reader: io.BytesIO) -> int:
+ """
+ Read and return the first byte where the message id is encoded
+ """
+ id_ = self.read_uint(reader)
+ return id_
+
+ @staticmethod
+ def check_message_id(b: bytes) -> int:
+ """
+ todo: make it static and without reader. It's just the first byte
+ Read and return the first byte where the message id is encoded
+ """
+ reader = io.BytesIO(b)
+ id_ = Codec.read_uint(reader)
+
+ return id_
+
+ @staticmethod
+ def decode_key(b) -> int:
+ """
+ Decode the message key (encoded with little endian)
+ """
+ try:
+ decoded = int.from_bytes(b, "little", signed=False)
+ except Exception as e:
+ raise UnicodeDecodeError(f"Error while decoding message key (SessionID) from {b}\n{e}")
+ return decoded
diff --git a/ee/connectors/msgcodec/messages.py b/ee/connectors/msgcodec/messages.py
new file mode 100644
index 000000000..c6e53b445
--- /dev/null
+++ b/ee/connectors/msgcodec/messages.py
@@ -0,0 +1,752 @@
+"""
+Representations of Kafka messages
+"""
+from abc import ABC
+
+
+class Message(ABC):
+ pass
+
+
+class Timestamp(Message):
+ __id__ = 0
+
+ def __init__(self, timestamp):
+ self.timestamp = timestamp
+
+
+class SessionStart(Message):
+ __id__ = 1
+
+ def __init__(self, timestamp, project_id, tracker_version, rev_id, user_uuid,
+ user_agent, user_os, user_os_version, user_browser, user_browser_version,
+ user_device, user_device_type, user_device_memory_size, user_device_heap_size,
+ user_country):
+ self.timestamp = timestamp
+ self.project_id = project_id
+ self.tracker_version = tracker_version
+ self.rev_id = rev_id
+ self.user_uuid = user_uuid
+ self.user_agent = user_agent
+ self.user_os = user_os
+ self.user_os_version = user_os_version
+ self.user_browser = user_browser
+ self.user_browser_version = user_browser_version
+ self.user_device = user_device
+ self.user_device_type = user_device_type
+ self.user_device_memory_size = user_device_memory_size
+ self.user_device_heap_size = user_device_heap_size
+ self.user_country = user_country
+
+
+class SessionDisconnect(Message):
+ __id__ = 2
+
+ def __init__(self, timestamp):
+ self.timestamp = timestamp
+
+
+class SessionEnd(Message):
+ __id__ = 3
+ __name__ = 'SessionEnd'
+
+ def __init__(self, timestamp):
+ self.timestamp = timestamp
+
+
+class SetPageLocation(Message):
+ __id__ = 4
+
+ def __init__(self, url, referrer, navigation_start):
+ self.url = url
+ self.referrer = referrer
+ self.navigation_start = navigation_start
+
+
+class SetViewportSize(Message):
+ __id__ = 5
+
+ def __init__(self, width, height):
+ self.width = width
+ self.height = height
+
+
+class SetViewportScroll(Message):
+ __id__ = 6
+
+ def __init__(self, x, y):
+ self.x = x
+ self.y = y
+
+
+class CreateDocument(Message):
+ __id__ = 7
+
+
+class CreateElementNode(Message):
+ __id__ = 8
+
+ def __init__(self, id, parent_id, index, tag, svg):
+ self.id = id
+ self.parent_id = parent_id,
+ self.index = index
+ self.tag = tag
+ self.svg = svg
+
+
+class CreateTextNode(Message):
+ __id__ = 9
+
+ def __init__(self, id, parent_id, index):
+ self.id = id
+ self.parent_id = parent_id
+ self.index = index
+
+
+class MoveNode(Message):
+ __id__ = 10
+
+ def __init__(self, id, parent_id, index):
+ self.id = id
+ self.parent_id = parent_id
+ self.index = index
+
+
+class RemoveNode(Message):
+ __id__ = 11
+
+ def __init__(self, id):
+ self.id = id
+
+
+class SetNodeAttribute(Message):
+ __id__ = 12
+
+ def __init__(self, id, name: str, value: str):
+ self.id = id
+ self.name = name
+ self.value = value
+
+
+class RemoveNodeAttribute(Message):
+ __id__ = 13
+
+ def __init__(self, id, name: str):
+ self.id = id
+ self.name = name
+
+
+class SetNodeData(Message):
+ __id__ = 14
+
+ def __init__(self, id, data: str):
+ self.id = id
+ self.data = data
+
+
+class SetCSSData(Message):
+ __id__ = 15
+
+ def __init__(self, id, data: str):
+ self.id = id
+ self.data = data
+
+
+class SetNodeScroll(Message):
+ __id__ = 16
+
+ def __init__(self, id, x: int, y: int):
+ self.id = id
+ self.x = x
+ self.y = y
+
+
+class SetInputTarget(Message):
+ __id__ = 17
+
+ def __init__(self, id, label: str):
+ self.id = id
+ self.label = label
+
+
+class SetInputValue(Message):
+ __id__ = 18
+
+ def __init__(self, id, value: str, mask: int):
+ self.id = id
+ self.value = value
+ self.mask = mask
+
+
+class SetInputChecked(Message):
+ __id__ = 19
+
+ def __init__(self, id, checked: bool):
+ self.id = id
+ self.checked = checked
+
+
+class MouseMove(Message):
+ __id__ = 20
+
+ def __init__(self, x, y):
+ self.x = x
+ self.y = y
+
+
+class MouseClick(Message):
+ __id__ = 21
+
+ def __init__(self, id, hesitation_time, label: str):
+ self.id = id
+ self.hesitation_time = hesitation_time
+ self.label = label
+
+
+class ConsoleLog(Message):
+ __id__ = 22
+
+ def __init__(self, level: str, value: str):
+ self.level = level
+ self.value = value
+
+
+class PageLoadTiming(Message):
+ __id__ = 23
+
+ def __init__(self, request_start, response_start, response_end, dom_content_loaded_event_start,
+ dom_content_loaded_event_end, load_event_start, load_event_end,
+ first_paint, first_contentful_paint):
+ self.request_start = request_start
+ self.response_start = response_start
+ self.response_end = response_end
+ self.dom_content_loaded_event_start = dom_content_loaded_event_start
+ self.dom_content_loaded_event_end = dom_content_loaded_event_end
+ self.load_event_start = load_event_start
+ self.load_event_end = load_event_end
+ self.first_paint = first_paint
+ self.first_contentful_paint = first_contentful_paint
+
+
+class PageRenderTiming(Message):
+ __id__ = 24
+
+ def __init__(self, speed_index, visually_complete, time_to_interactive):
+ self.speed_index = speed_index
+ self.visually_complete = visually_complete
+ self.time_to_interactive = time_to_interactive
+
+class JSException(Message):
+ __id__ = 25
+
+ def __init__(self, name: str, message: str, payload: str):
+ self.name = name
+ self.message = message
+ self.payload = payload
+
+
+class RawErrorEvent(Message):
+ __id__ = 26
+
+ def __init__(self, timestamp, source: str, name: str, message: str,
+ payload: str):
+ self.timestamp = timestamp
+ self.source = source
+ self.name = name
+ self.message = message
+ self.payload = payload
+
+
+class RawCustomEvent(Message):
+ __id__ = 27
+
+ def __init__(self, name: str, payload: str):
+ self.name = name
+ self.payload = payload
+
+
+class UserID(Message):
+ __id__ = 28
+
+ def __init__(self, id: str):
+ self.id = id
+
+
+class UserAnonymousID(Message):
+ __id__ = 29
+
+ def __init__(self, id: str):
+ self.id = id
+
+
+class Metadata(Message):
+ __id__ = 30
+
+ def __init__(self, key: str, value: str):
+ self.key = key
+ self.value = value
+
+
+class PerformanceTrack(Message):
+ __id__ = 49
+
+ def __init__(self, frames: int, ticks: int, total_js_heap_size,
+ used_js_heap_size):
+ self.frames = frames
+ self.ticks = ticks
+ self.total_js_heap_size = total_js_heap_size
+ self.used_js_heap_size = used_js_heap_size
+
+
+class PageEvent(Message):
+ __id__ = 31
+
+ def __init__(self, message_id, timestamp, url: str, referrer: str,
+ loaded: bool, request_start, response_start, response_end,
+ dom_content_loaded_event_start, dom_content_loaded_event_end,
+ load_event_start, load_event_end, first_paint, first_contentful_paint,
+ speed_index, visually_complete, time_to_interactive):
+ self.message_id = message_id
+ self.timestamp = timestamp
+ self.url = url
+ self.referrer = referrer
+ self.loaded = loaded
+ self.request_start = request_start
+ self.response_start = response_start
+ self.response_end = response_end
+ self.dom_content_loaded_event_start = dom_content_loaded_event_start
+ self.dom_content_loaded_event_end = dom_content_loaded_event_end
+ self.load_event_start = load_event_start
+ self.load_event_end = load_event_end
+ self.first_paint = first_paint
+ self.first_contentful_paint = first_contentful_paint
+ self.speed_index = speed_index
+ self.visually_complete = visually_complete
+ self.time_to_interactive = time_to_interactive
+
+
+class InputEvent(Message):
+ __id__ = 32
+
+ def __init__(self, message_id, timestamp, value: str, value_masked: bool, label: str):
+ self.message_id = message_id
+ self.timestamp = timestamp
+ self.value = value
+ self.value_masked = value_masked
+ self.label = label
+
+
+class ClickEvent(Message):
+ __id__ = 33
+
+ def __init__(self, message_id, timestamp, hesitation_time, label: str):
+ self.message_id = message_id
+ self.timestamp = timestamp
+ self.hesitation_time = hesitation_time
+ self.label = label
+
+
+class ErrorEvent(Message):
+ __id__ = 34
+
+ def __init__(self, message_id, timestamp, source: str, name: str, message: str,
+ payload: str):
+ self.message_id = message_id
+ self.timestamp = timestamp
+ self.source = source
+ self.name = name
+ self.message = message
+ self.payload = payload
+
+
+class ResourceEvent(Message):
+ __id__ = 35
+
+ def __init__(self, message_id, timestamp, duration, ttfb, header_size, encoded_body_size,
+ decoded_body_size, url: str, type: str, success: bool, method: str, status):
+ self.message_id = message_id
+ self.timestamp = timestamp
+ self.duration = duration
+ self.ttfb = ttfb
+ self.header_size = header_size
+ self.encoded_body_size = encoded_body_size
+ self.decoded_body_size = decoded_body_size
+ self.url = url
+ self.type = type
+ self.success = success
+ self.method = method
+ self.status = status
+
+
+class CustomEvent(Message):
+ __id__ = 36
+
+ def __init__(self, message_id, timestamp, name: str, payload: str):
+ self.message_id = message_id
+ self.timestamp = timestamp
+ self.name = name
+ self.payload = payload
+
+
+class CSSInsertRule(Message):
+ __id__ = 37
+
+ def __init__(self, id, rule: str, index):
+ self.id = id
+ self.rule = rule
+ self.index = index
+
+
+class CSSDeleteRule(Message):
+ __id__ = 38
+
+ def __init__(self, id, index):
+ self.id = id
+ self.index = index
+
+
+class Fetch(Message):
+ __id__ = 39
+
+ def __init__(self, method: str, url: str, request: str, response: str, status,
+ timestamp, duration):
+ self.method = method
+ self.url = url
+ self.request = request
+ self.response = response
+ self.status = status
+ self.timestamp = timestamp
+ self.duration = duration
+
+
+class Profiler(Message):
+ __id__ = 40
+
+ def __init__(self, name: str, duration, args: str, result: str):
+ self.name = name
+ self.duration = duration
+ self.args = args
+ self.result = result
+
+
+class OTable(Message):
+ __id__ = 41
+
+ def __init__(self, key: str, value: str):
+ self.key = key
+ self.value = value
+
+
+class StateAction(Message):
+ __id__ = 42
+
+ def __init__(self, type: str):
+ self.type = type
+
+
+class StateActionEvent(Message):
+ __id__ = 43
+
+ def __init__(self, message_id, timestamp, type: str):
+ self.message_id = message_id
+ self.timestamp = timestamp
+ self.type = type
+
+
+class Redux(Message):
+ __id__ = 44
+
+ def __init__(self, action: str, state: str, duration):
+ self.action = action
+ self.state = state
+ self.duration = duration
+
+
+class Vuex(Message):
+ __id__ = 45
+
+ def __init__(self, mutation: str, state: str):
+ self.mutation = mutation
+ self.state = state
+
+
+class MobX(Message):
+ __id__ = 46
+
+ def __init__(self, type: str, payload: str):
+ self.type = type
+ self.payload = payload
+
+
+class NgRx(Message):
+ __id__ = 47
+
+ def __init__(self, action: str, state: str, duration):
+ self.action = action
+ self.state = state
+ self.duration = duration
+
+
+class GraphQL(Message):
+ __id__ = 48
+
+ def __init__(self, operation_kind: str, operation_name: str,
+ variables: str, response: str):
+ self.operation_kind = operation_kind
+ self.operation_name = operation_name
+ self.variables = variables
+ self.response = response
+
+
+class PerformanceTrack(Message):
+ __id__ = 49
+
+ def __init__(self, frames: int, ticks: int,
+ total_js_heap_size, used_js_heap_size):
+ self.frames = frames
+ self.ticks = ticks
+ self.total_js_heap_size = total_js_heap_size
+ self.used_js_heap_size = used_js_heap_size
+
+
+class GraphQLEvent(Message):
+ __id__ = 50
+
+ def __init__(self, message_id, timestamp, name: str):
+ self.message_id = message_id
+ self.timestamp = timestamp
+ self.name = name
+
+
+class DomDrop(Message):
+ __id__ = 52
+
+ def __init__(self, timestamp):
+ self.timestamp = timestamp
+
+
+class ResourceTiming(Message):
+ __id__ = 53
+
+ def __init__(self, timestamp, duration, ttfb, header_size, encoded_body_size,
+ decoded_body_size, url, initiator):
+ self.timestamp = timestamp
+ self.duration = duration
+ self.ttfb = ttfb
+ self.header_size = header_size
+ self.encoded_body_size = encoded_body_size
+ self.decoded_body_size = decoded_body_size
+ self.url = url
+ self.initiator = initiator
+
+
+class ConnectionInformation(Message):
+ __id__ = 54
+
+ def __init__(self, downlink, type: str):
+ self.downlink = downlink
+ self.type = type
+
+
+class SetPageVisibility(Message):
+ __id__ = 55
+
+ def __init__(self, hidden: bool):
+ self.hidden = hidden
+
+
+class PerformanceTrackAggr(Message):
+ __id__ = 56
+
+ def __init__(self, timestamp_start, timestamp_end, min_fps, avg_fps,
+ max_fps, min_cpu, avg_cpu, max_cpu,
+ min_total_js_heap_size, avg_total_js_heap_size,
+ max_total_js_heap_size, min_used_js_heap_size,
+ avg_used_js_heap_size, max_used_js_heap_size
+ ):
+ self.timestamp_start = timestamp_start
+ self.timestamp_end = timestamp_end
+ self.min_fps = min_fps
+ self.avg_fps = avg_fps
+ self.max_fps = max_fps
+ self.min_cpu = min_cpu
+ self.avg_cpu = avg_cpu
+ self.max_cpu = max_cpu
+ self.min_total_js_heap_size = min_total_js_heap_size
+ self.avg_total_js_heap_size = avg_total_js_heap_size
+ self.max_total_js_heap_size = max_total_js_heap_size
+ self.min_used_js_heap_size = min_used_js_heap_size
+ self.avg_used_js_heap_size = avg_used_js_heap_size
+ self.max_used_js_heap_size = max_used_js_heap_size
+
+
+class LongTask(Message):
+ __id__ = 59
+
+ def __init__(self, timestamp, duration, context, container_type, container_src: str,
+ container_id: str, container_name: str):
+ self.timestamp = timestamp
+ self.duration = duration
+ self.context = context
+ self.container_type = container_type
+ self.container_src = container_src
+ self.container_id = container_id
+ self.container_name = container_name
+
+
+class SetNodeURLBasedAttribute(Message):
+ __id__ = 60
+
+ def __init__(self, id, name: str, value: str, base_url: str):
+ self.id = id
+ self.name = name
+ self.value = value
+ self.base_url = base_url
+
+
+class SetStyleData(Message):
+ __id__ = 61
+
+ def __init__(self, id, data: str, base_url: str):
+ self.id = id
+ self.data = data
+ self.base_url = base_url
+
+
+class IssueEvent(Message):
+ __id__ = 62
+
+ def __init__(self, message_id, timestamp, type: str, context_string: str,
+ context: str, payload: str):
+ self.message_id = message_id
+ self.timestamp = timestamp
+ self.type = type
+ self.context_string = context_string
+ self.context = context
+ self.payload = payload
+
+
+class TechnicalInfo(Message):
+ __id__ = 63
+
+ def __init__(self, type: str, value: str):
+ self.type = type
+ self.value = value
+
+
+class CustomIssue(Message):
+ __id__ = 64
+
+ def __init__(self, name: str, payload: str):
+ self.name = name
+ self.payload = payload
+
+
+class PageClose(Message):
+ __id__ = 65
+
+
+class IOSSessionStart(Message):
+ __id__ = 90
+
+ def __init__(self, timestamp, project_id, tracker_version: str,
+ rev_id: str, user_uuid: str, user_os: str, user_os_version: str,
+ user_device: str, user_device_type: str, user_country: str):
+ self.timestamp = timestamp
+ self.project_id = project_id
+ self.tracker_version = tracker_version
+ self.rev_id = rev_id
+ self.user_uuid = user_uuid
+ self.user_os = user_os
+ self.user_os_version = user_os_version
+ self.user_device = user_device
+ self.user_device_type = user_device_type
+ self.user_country = user_country
+
+
+class IOSSessionEnd(Message):
+ __id__ = 91
+
+ def __init__(self, timestamp):
+ self.timestamp = timestamp
+
+
+class IOSMetadata(Message):
+ __id__ = 92
+
+ def __init__(self, timestamp, length, key: str, value: str):
+ self.timestamp = timestamp
+ self.length = length
+ self.key = key
+ self.value = value
+
+
+class IOSUserID(Message):
+ __id__ = 94
+
+ def __init__(self, timestamp, length, value: str):
+ self.timestamp = timestamp
+ self.length = length
+ self.value = value
+
+
+class IOSUserAnonymousID(Message):
+ __id__ = 95
+
+ def __init__(self, timestamp, length, value: str):
+ self.timestamp = timestamp
+ self.length = length
+ self.value = value
+
+
+class IOSScreenLeave(Message):
+ __id__ = 99
+
+ def __init__(self, timestamp, length, title: str, view_name: str):
+ self.timestamp = timestamp
+ self.length = length
+ self.title = title
+ self.view_name = view_name
+
+
+class IOSLog(Message):
+ __id__ = 103
+
+ def __init__(self, timestamp, length, severity: str, content: str):
+ self.timestamp = timestamp
+ self.length = length
+ self.severity = severity
+ self.content = content
+
+
+class IOSInternalError(Message):
+ __id__ = 104
+
+ def __init__(self, timestamp, length, content: str):
+ self.timestamp = timestamp
+ self.length = length
+ self.content = content
+
+
+class IOSPerformanceAggregated(Message):
+ __id__ = 110
+
+ def __init__(self, timestamp_start, timestamp_end, min_fps, avg_fps,
+ max_fps, min_cpu, avg_cpu, max_cpu,
+ min_memory, avg_memory, max_memory,
+ min_battery, avg_battery, max_battery
+ ):
+ self.timestamp_start = timestamp_start
+ self.timestamp_end = timestamp_end
+ self.min_fps = min_fps
+ self.avg_fps = avg_fps
+ self.max_fps = max_fps
+ self.min_cpu = min_cpu
+ self.avg_cpu = avg_cpu
+ self.max_cpu = max_cpu
+ self.min_memory = min_memory
+ self.avg_memory = avg_memory
+ self.max_memory = max_memory
+ self.min_battery = min_battery
+ self.avg_battery = avg_battery
+ self.max_battery = max_battery
diff --git a/ee/connectors/requirements.txt b/ee/connectors/requirements.txt
new file mode 100644
index 000000000..a6b6a0720
--- /dev/null
+++ b/ee/connectors/requirements.txt
@@ -0,0 +1,43 @@
+certifi==2020.12.5
+chardet==4.0.0
+clickhouse-driver==0.2.0
+clickhouse-sqlalchemy==0.1.5
+idna==2.10
+kafka-python==2.0.2
+pandas==1.2.3
+psycopg2-binary==2.8.6
+pytz==2021.1
+requests==2.25.1
+SQLAlchemy==1.3.23
+tzlocal==2.1
+urllib3==1.26.3
+PyYAML==5.4.1
+pandas-redshift
+awswrangler
+google-auth-httplib2
+google-auth-oauthlib
+google-cloud-bigquery
+pandas-gbq
+snowflake-connector-python==2.4.1
+snowflake-sqlalchemy==1.2.4
+asn1crypto==1.4.0
+azure-common==1.1.25
+azure-core==1.8.2
+azure-storage-blob==12.5.0
+boto3==1.15.18
+botocore==1.18.18
+cffi==1.14.3
+cryptography==2.9.2
+isodate==0.6.0
+jmespath==0.10.0
+msrest==0.6.19
+oauthlib==3.1.0
+oscrypto==1.2.1
+pycparser==2.20
+pycryptodomex==3.9.8
+PyJWT==1.7.1
+pyOpenSSL==19.1.0
+python-dateutil==2.8.1
+requests-oauthlib==1.3.0
+s3transfer==0.3.3
+six==1.15.0
diff --git a/ee/connectors/sql/clickhouse_events.sql b/ee/connectors/sql/clickhouse_events.sql
new file mode 100644
index 000000000..b5eb8b440
--- /dev/null
+++ b/ee/connectors/sql/clickhouse_events.sql
@@ -0,0 +1,56 @@
+CREATE TABLE IF NOT EXISTS connector_events
+(
+ sessionid UInt64,
+ connectioninformation_downlink Nullable(UInt64),
+ connectioninformation_type Nullable(String),
+ consolelog_level Nullable(String),
+ consolelog_value Nullable(String),
+ customevent_messageid Nullable(UInt64),
+ customevent_name Nullable(String),
+ customevent_payload Nullable(String),
+ customevent_timestamp Nullable(UInt64),
+ errorevent_message Nullable(String),
+ errorevent_messageid Nullable(UInt64),
+ errorevent_name Nullable(String),
+ errorevent_payload Nullable(String),
+ errorevent_source Nullable(String),
+ errorevent_timestamp Nullable(UInt64),
+ jsexception_message Nullable(String),
+ jsexception_name Nullable(String),
+ jsexception_payload Nullable(String),
+ metadata_key Nullable(String),
+ metadata_value Nullable(String),
+ mouseclick_id Nullable(UInt64),
+ mouseclick_hesitationtime Nullable(UInt64),
+ mouseclick_label Nullable(String),
+ pageevent_firstcontentfulpaint Nullable(UInt64),
+ pageevent_firstpaint Nullable(UInt64),
+ pageevent_messageid Nullable(UInt64),
+ pageevent_referrer Nullable(String),
+ pageevent_speedindex Nullable(UInt64),
+ pageevent_timestamp Nullable(UInt64),
+ pageevent_url Nullable(String),
+ pagerendertiming_timetointeractive Nullable(UInt64),
+ pagerendertiming_visuallycomplete Nullable(UInt64),
+ rawcustomevent_name Nullable(String),
+ rawcustomevent_payload Nullable(String),
+ setviewportsize_height Nullable(UInt64),
+ setviewportsize_width Nullable(UInt64),
+ timestamp_timestamp Nullable(UInt64),
+ user_anonymous_id Nullable(String),
+ user_id Nullable(String),
+ issueevent_messageid Nullable(UInt64),
+ issueevent_timestamp Nullable(UInt64),
+ issueevent_type Nullable(String),
+ issueevent_contextstring Nullable(String),
+ issueevent_context Nullable(String),
+ issueevent_payload Nullable(String),
+ customissue_name Nullable(String),
+ customissue_payload Nullable(String),
+ received_at UInt64,
+ batch_order_number UInt64
+) ENGINE = MergeTree()
+PARTITION BY intDiv(received_at, 100000)
+ORDER BY (received_at, batch_order_number, sessionid)
+PRIMARY KEY (received_at)
+SETTINGS use_minimalistic_part_header_in_zookeeper=1, index_granularity=1000;
\ No newline at end of file
diff --git a/ee/connectors/sql/clickhouse_events_buffer.sql b/ee/connectors/sql/clickhouse_events_buffer.sql
new file mode 100644
index 000000000..ed291c824
--- /dev/null
+++ b/ee/connectors/sql/clickhouse_events_buffer.sql
@@ -0,0 +1,52 @@
+CREATE TABLE IF NOT EXISTS connector_events_buffer
+(
+ sessionid UInt64,
+ connectioninformation_downlink Nullable(UInt64),
+ connectioninformation_type Nullable(String),
+ consolelog_level Nullable(String),
+ consolelog_value Nullable(String),
+ customevent_messageid Nullable(UInt64),
+ customevent_name Nullable(String),
+ customevent_payload Nullable(String),
+ customevent_timestamp Nullable(UInt64),
+ errorevent_message Nullable(String),
+ errorevent_messageid Nullable(UInt64),
+ errorevent_name Nullable(String),
+ errorevent_payload Nullable(String),
+ errorevent_source Nullable(String),
+ errorevent_timestamp Nullable(UInt64),
+ jsexception_message Nullable(String),
+ jsexception_name Nullable(String),
+ jsexception_payload Nullable(String),
+ metadata_key Nullable(String),
+ metadata_value Nullable(String),
+ mouseclick_id Nullable(UInt64),
+ mouseclick_hesitationtime Nullable(UInt64),
+ mouseclick_label Nullable(String),
+ pageevent_firstcontentfulpaint Nullable(UInt64),
+ pageevent_firstpaint Nullable(UInt64),
+ pageevent_messageid Nullable(UInt64),
+ pageevent_referrer Nullable(String),
+ pageevent_speedindex Nullable(UInt64),
+ pageevent_timestamp Nullable(UInt64),
+ pageevent_url Nullable(String),
+ pagerendertiming_timetointeractive Nullable(UInt64),
+ pagerendertiming_visuallycomplete Nullable(UInt64),
+ rawcustomevent_name Nullable(String),
+ rawcustomevent_payload Nullable(String),
+ setviewportsize_height Nullable(UInt64),
+ setviewportsize_width Nullable(UInt64),
+ timestamp_timestamp Nullable(UInt64),
+ user_anonymous_id Nullable(String),
+ user_id Nullable(String),
+ issueevent_messageid Nullable(UInt64),
+ issueevent_timestamp Nullable(UInt64),
+ issueevent_type Nullable(String),
+ issueevent_contextstring Nullable(String),
+ issueevent_context Nullable(String),
+ issueevent_payload Nullable(String),
+ customissue_name Nullable(String),
+ customissue_payload Nullable(String),
+ received_at UInt64,
+ batch_order_number UInt64
+) ENGINE = Buffer(default, connector_events, 16, 10, 120, 10000, 1000000, 10000, 100000000);
diff --git a/ee/connectors/sql/clickhouse_sessions.sql b/ee/connectors/sql/clickhouse_sessions.sql
new file mode 100644
index 000000000..4d648553e
--- /dev/null
+++ b/ee/connectors/sql/clickhouse_sessions.sql
@@ -0,0 +1,52 @@
+CREATE TABLE IF NOT EXISTS connector_user_sessions
+(
+-- SESSION METADATA
+ sessionid UInt64,
+ user_agent Nullable(String),
+ user_browser Nullable(String),
+ user_browser_version Nullable(String),
+ user_country Nullable(String),
+ user_device Nullable(String),
+ user_device_heap_size Nullable(UInt64),
+ user_device_memory_size Nullable(UInt64),
+ user_device_type Nullable(String),
+ user_os Nullable(String),
+ user_os_version Nullable(String),
+ user_uuid Nullable(String),
+ connection_effective_bandwidth Nullable(UInt64), -- Downlink
+ connection_type Nullable(String), --"bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown"
+ metadata_key Nullable(String),
+ metadata_value Nullable(String),
+ referrer Nullable(String),
+ user_anonymous_id Nullable(String),
+ user_id Nullable(String),
+-- TIME
+ session_start_timestamp Nullable(UInt64),
+ session_end_timestamp Nullable(UInt64),
+ session_duration Nullable(UInt64),
+-- SPEED INDEX RELATED
+ first_contentful_paint Nullable(UInt64),
+ speed_index Nullable(UInt64),
+ visually_complete Nullable(UInt64),
+ timing_time_to_interactive Nullable(UInt64),
+-- PERFORMANCE
+ avg_cpu Nullable(UInt64),
+ avg_fps Nullable(UInt64),
+ max_cpu Nullable(UInt64),
+ max_fps Nullable(UInt64),
+ max_total_js_heap_size Nullable(UInt64),
+ max_used_js_heap_size Nullable(UInt64),
+-- ISSUES AND EVENTS
+ js_exceptions_count Nullable(UInt64),
+ long_tasks_total_duration Nullable(UInt64),
+ long_tasks_max_duration Nullable(UInt64),
+ long_tasks_count Nullable(UInt64),
+ inputs_count Nullable(UInt64),
+ clicks_count Nullable(UInt64),
+ issues_count Nullable(UInt64),
+ issues Array(Nullable(String)),
+ urls_count Nullable(UInt64),
+ urls Array(Nullable(String))
+) ENGINE = MergeTree()
+ORDER BY (sessionid)
+PRIMARY KEY (sessionid);
\ No newline at end of file
diff --git a/ee/connectors/sql/clickhouse_sessions_buffer.sql b/ee/connectors/sql/clickhouse_sessions_buffer.sql
new file mode 100644
index 000000000..540700d45
--- /dev/null
+++ b/ee/connectors/sql/clickhouse_sessions_buffer.sql
@@ -0,0 +1,50 @@
+CREATE TABLE IF NOT EXISTS connector_user_sessions_buffer
+(
+-- SESSION METADATA
+ sessionid UInt64,
+ user_agent Nullable(String),
+ user_browser Nullable(String),
+ user_browser_version Nullable(String),
+ user_country Nullable(String),
+ user_device Nullable(String),
+ user_device_heap_size Nullable(UInt64),
+ user_device_memory_size Nullable(UInt64),
+ user_device_type Nullable(String),
+ user_os Nullable(String),
+ user_os_version Nullable(String),
+ user_uuid Nullable(String),
+ connection_effective_bandwidth Nullable(UInt64), -- Downlink
+ connection_type Nullable(String), --"bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown"
+ metadata_key Nullable(String),
+ metadata_value Nullable(String),
+ referrer Nullable(String),
+ user_anonymous_id Nullable(String),
+ user_id Nullable(String),
+-- TIME
+ session_start_timestamp Nullable(UInt64),
+ session_end_timestamp Nullable(UInt64),
+ session_duration Nullable(UInt64),
+-- SPEED INDEX RELATED
+ first_contentful_paint Nullable(UInt64),
+ speed_index Nullable(UInt64),
+ visually_complete Nullable(UInt64),
+ timing_time_to_interactive Nullable(UInt64),
+-- PERFORMANCE
+ avg_cpu Nullable(UInt64),
+ avg_fps Nullable(UInt64),
+ max_cpu Nullable(UInt64),
+ max_fps Nullable(UInt64),
+ max_total_js_heap_size Nullable(UInt64),
+ max_used_js_heap_size Nullable(UInt64),
+-- ISSUES AND EVENTS
+ js_exceptions_count Nullable(UInt64),
+ long_tasks_total_duration Nullable(UInt64),
+ long_tasks_max_duration Nullable(UInt64),
+ long_tasks_count Nullable(UInt64),
+ inputs_count Nullable(UInt64),
+ clicks_count Nullable(UInt64),
+ issues_count Nullable(UInt64),
+ issues Array(Nullable(String)),
+ urls_count Nullable(UInt64),
+ urls Array(Nullable(String))
+) ENGINE = Buffer(default, connector_user_sessions, 16, 10, 120, 10000, 1000000, 10000, 100000000);
diff --git a/ee/connectors/sql/postgres_events.sql b/ee/connectors/sql/postgres_events.sql
new file mode 100644
index 000000000..986de4df9
--- /dev/null
+++ b/ee/connectors/sql/postgres_events.sql
@@ -0,0 +1,52 @@
+CREATE TABLE IF NOT EXISTS connector_events
+(
+ sessionid bigint,
+ connectioninformation_downlink bigint,
+ connectioninformation_type text,
+ consolelog_level text,
+ consolelog_value text,
+ customevent_messageid bigint,
+ customevent_name text,
+ customevent_payload text,
+ customevent_timestamp bigint,
+ errorevent_message text,
+ errorevent_messageid bigint,
+ errorevent_name text,
+ errorevent_payload text,
+ errorevent_source text,
+ errorevent_timestamp bigint,
+ jsexception_message text,
+ jsexception_name text,
+ jsexception_payload text,
+ metadata_key text,
+ metadata_value text,
+ mouseclick_id bigint,
+ mouseclick_hesitationtime bigint,
+ mouseclick_label text,
+ pageevent_firstcontentfulpaint bigint,
+ pageevent_firstpaint bigint,
+ pageevent_messageid bigint,
+ pageevent_referrer text,
+ pageevent_speedindex bigint,
+ pageevent_timestamp bigint,
+ pageevent_url text,
+ pagerendertiming_timetointeractive bigint,
+ pagerendertiming_visuallycomplete bigint,
+ rawcustomevent_name text,
+ rawcustomevent_payload text,
+ setviewportsize_height bigint,
+ setviewportsize_width bigint,
+ timestamp_timestamp bigint,
+ user_anonymous_id text,
+ user_id text,
+ issueevent_messageid bigint,
+ issueevent_timestamp bigint,
+ issueevent_type text,
+ issueevent_contextstring text,
+ issueevent_context text,
+ issueevent_payload text,
+ customissue_name text,
+ customissue_payload text,
+ received_at bigint,
+ batch_order_number bigint
+);
\ No newline at end of file
diff --git a/ee/connectors/sql/postgres_sessions.sql b/ee/connectors/sql/postgres_sessions.sql
new file mode 100644
index 000000000..1f68309c2
--- /dev/null
+++ b/ee/connectors/sql/postgres_sessions.sql
@@ -0,0 +1,50 @@
+CREATE TABLE IF NOT EXISTS connector_user_sessions
+(
+-- SESSION METADATA
+ sessionid bigint,
+ user_agent text,
+ user_browser text,
+ user_browser_version text,
+ user_country text,
+ user_device text,
+ user_device_heap_size bigint,
+ user_device_memory_size bigint,
+ user_device_type text,
+ user_os text,
+ user_os_version text,
+ user_uuid text,
+ connection_effective_bandwidth bigint, -- Downlink
+ connection_type text, --"bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown"
+ metadata_key text,
+ metadata_value text,
+ referrer text,
+ user_anonymous_id text,
+ user_id text,
+-- TIME
+ session_start_timestamp bigint,
+ session_end_timestamp bigint,
+ session_duration bigint,
+-- SPEED INDEX RELATED
+ first_contentful_paint bigint,
+ speed_index bigint,
+ visually_complete bigint,
+ timing_time_to_interactive bigint,
+-- PERFORMANCE
+ avg_cpu bigint,
+ avg_fps bigint,
+ max_cpu bigint,
+ max_fps bigint,
+ max_total_js_heap_size bigint,
+ max_used_js_heap_size bigint,
+-- ISSUES AND EVENTS
+ js_exceptions_count bigint,
+ long_tasks_total_duration bigint,
+ long_tasks_max_duration bigint,
+ long_tasks_count bigint,
+ inputs_count bigint,
+ clicks_count bigint,
+ issues_count bigint,
+ issues text[],
+ urls_count bigint,
+ urls text[]
+);
\ No newline at end of file
diff --git a/ee/connectors/sql/redshift_events.sql b/ee/connectors/sql/redshift_events.sql
new file mode 100644
index 000000000..c310e3202
--- /dev/null
+++ b/ee/connectors/sql/redshift_events.sql
@@ -0,0 +1,52 @@
+CREATE TABLE connector_events
+(
+ sessionid BIGINT,
+ connectioninformation_downlink BIGINT,
+ connectioninformation_type VARCHAR(300),
+ consolelog_level VARCHAR(300),
+ consolelog_value VARCHAR(300),
+ customevent_messageid BIGINT,
+ customevent_name VARCHAR(300),
+ customevent_payload VARCHAR(300),
+ customevent_timestamp BIGINT,
+ errorevent_message VARCHAR(300),
+ errorevent_messageid BIGINT,
+ errorevent_name VARCHAR(300),
+ errorevent_payload VARCHAR(300),
+ errorevent_source VARCHAR(300),
+ errorevent_timestamp BIGINT,
+ jsexception_message VARCHAR(300),
+ jsexception_name VARCHAR(300),
+ jsexception_payload VARCHAR(300),
+ metadata_key VARCHAR(300),
+ metadata_value VARCHAR(300),
+ mouseclick_id BIGINT,
+ mouseclick_hesitationtime BIGINT,
+ mouseclick_label VARCHAR(300),
+ pageevent_firstcontentfulpaint BIGINT,
+ pageevent_firstpaint BIGINT,
+ pageevent_messageid BIGINT,
+ pageevent_referrer VARCHAR(300),
+ pageevent_speedindex BIGINT,
+ pageevent_timestamp BIGINT,
+ pageevent_url VARCHAR(300),
+ pagerendertiming_timetointeractive BIGINT,
+ pagerendertiming_visuallycomplete BIGINT,
+ rawcustomevent_name VARCHAR(300),
+ rawcustomevent_payload VARCHAR(300),
+ setviewportsize_height BIGINT,
+ setviewportsize_width BIGINT,
+ timestamp_timestamp BIGINT,
+ user_anonymous_id VARCHAR(300),
+ user_id VARCHAR(300),
+ issueevent_messageid BIGINT,
+ issueevent_timestamp BIGINT,
+ issueevent_type VARCHAR(300),
+ issueevent_contextstring VARCHAR(300),
+ issueevent_context VARCHAR(300),
+ issueevent_payload VARCHAR(300),
+ customissue_name VARCHAR(300),
+ customissue_payload VARCHAR(300),
+ received_at BIGINT,
+ batch_order_number BIGINT
+);
\ No newline at end of file
diff --git a/ee/connectors/sql/redshift_sessions.sql b/ee/connectors/sql/redshift_sessions.sql
new file mode 100644
index 000000000..f1750dcc2
--- /dev/null
+++ b/ee/connectors/sql/redshift_sessions.sql
@@ -0,0 +1,50 @@
+CREATE TABLE connector_user_sessions
+(
+-- SESSION METADATA
+ sessionid bigint,
+ user_agent VARCHAR,
+ user_browser VARCHAR,
+ user_browser_version VARCHAR,
+ user_country VARCHAR,
+ user_device VARCHAR,
+ user_device_heap_size bigint,
+ user_device_memory_size bigint,
+ user_device_type VARCHAR,
+ user_os VARCHAR,
+ user_os_version VARCHAR,
+ user_uuid VARCHAR,
+ connection_effective_bandwidth bigint, -- Downlink
+ connection_type VARCHAR, --"bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown"
+ metadata_key VARCHAR,
+ metadata_value VARCHAR,
+ referrer VARCHAR,
+ user_anonymous_id VARCHAR,
+ user_id VARCHAR,
+-- TIME
+ session_start_timestamp bigint,
+ session_end_timestamp bigint,
+ session_duration bigint,
+-- SPEED INDEX RELATED
+ first_contentful_paint bigint,
+ speed_index bigint,
+ visually_complete bigint,
+ timing_time_to_interactive bigint,
+-- PERFORMANCE
+ avg_cpu bigint,
+ avg_fps bigint,
+ max_cpu bigint,
+ max_fps bigint,
+ max_total_js_heap_size bigint,
+ max_used_js_heap_size bigint,
+-- ISSUES AND EVENTS
+ js_exceptions_count bigint,
+ long_tasks_total_duration bigint,
+ long_tasks_max_duration bigint,
+ long_tasks_count bigint,
+ inputs_count bigint,
+ clicks_count bigint,
+ issues_count bigint,
+ issues VARCHAR,
+ urls_count bigint,
+ urls VARCHAR
+);
\ No newline at end of file
diff --git a/ee/connectors/sql/snowflake_events.sql b/ee/connectors/sql/snowflake_events.sql
new file mode 100644
index 000000000..986de4df9
--- /dev/null
+++ b/ee/connectors/sql/snowflake_events.sql
@@ -0,0 +1,52 @@
+CREATE TABLE IF NOT EXISTS connector_events
+(
+ sessionid bigint,
+ connectioninformation_downlink bigint,
+ connectioninformation_type text,
+ consolelog_level text,
+ consolelog_value text,
+ customevent_messageid bigint,
+ customevent_name text,
+ customevent_payload text,
+ customevent_timestamp bigint,
+ errorevent_message text,
+ errorevent_messageid bigint,
+ errorevent_name text,
+ errorevent_payload text,
+ errorevent_source text,
+ errorevent_timestamp bigint,
+ jsexception_message text,
+ jsexception_name text,
+ jsexception_payload text,
+ metadata_key text,
+ metadata_value text,
+ mouseclick_id bigint,
+ mouseclick_hesitationtime bigint,
+ mouseclick_label text,
+ pageevent_firstcontentfulpaint bigint,
+ pageevent_firstpaint bigint,
+ pageevent_messageid bigint,
+ pageevent_referrer text,
+ pageevent_speedindex bigint,
+ pageevent_timestamp bigint,
+ pageevent_url text,
+ pagerendertiming_timetointeractive bigint,
+ pagerendertiming_visuallycomplete bigint,
+ rawcustomevent_name text,
+ rawcustomevent_payload text,
+ setviewportsize_height bigint,
+ setviewportsize_width bigint,
+ timestamp_timestamp bigint,
+ user_anonymous_id text,
+ user_id text,
+ issueevent_messageid bigint,
+ issueevent_timestamp bigint,
+ issueevent_type text,
+ issueevent_contextstring text,
+ issueevent_context text,
+ issueevent_payload text,
+ customissue_name text,
+ customissue_payload text,
+ received_at bigint,
+ batch_order_number bigint
+);
\ No newline at end of file
diff --git a/ee/connectors/sql/snowflake_sessions.sql b/ee/connectors/sql/snowflake_sessions.sql
new file mode 100644
index 000000000..c66bac2e6
--- /dev/null
+++ b/ee/connectors/sql/snowflake_sessions.sql
@@ -0,0 +1,50 @@
+CREATE TABLE IF NOT EXISTS connector_user_sessions
+(
+-- SESSION METADATA
+ sessionid bigint,
+ user_agent text,
+ user_browser text,
+ user_browser_version text,
+ user_country text,
+ user_device text,
+ user_device_heap_size bigint,
+ user_device_memory_size bigint,
+ user_device_type text,
+ user_os text,
+ user_os_version text,
+ user_uuid text,
+ connection_effective_bandwidth bigint, -- Downlink
+ connection_type text, --"bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown"
+ metadata_key text,
+ metadata_value text,
+ referrer text,
+ user_anonymous_id text,
+ user_id text,
+-- TIME
+ session_start_timestamp bigint,
+ session_end_timestamp bigint,
+ session_duration bigint,
+-- SPEED INDEX RELATED
+ first_contentful_paint bigint,
+ speed_index bigint,
+ visually_complete bigint,
+ timing_time_to_interactive bigint,
+-- PERFORMANCE
+ avg_cpu bigint,
+ avg_fps bigint,
+ max_cpu bigint,
+ max_fps bigint,
+ max_total_js_heap_size bigint,
+ max_used_js_heap_size bigint,
+-- ISSUES AND EVENTS
+ js_exceptions_count bigint,
+ long_tasks_total_duration bigint,
+ long_tasks_max_duration bigint,
+ long_tasks_count bigint,
+ inputs_count bigint,
+ clicks_count bigint,
+ issues_count bigint,
+ issues array,
+ urls_count bigint,
+ urls array
+);
\ No newline at end of file
diff --git a/ee/connectors/utils/bigquery.env.example b/ee/connectors/utils/bigquery.env.example
new file mode 100644
index 000000000..16d970501
--- /dev/null
+++ b/ee/connectors/utils/bigquery.env.example
@@ -0,0 +1,7 @@
+table_id='{project_id}.{dataset}.{table}'
+project_id=name-123456
+dataset=datasetname
+sessions_table=connector_user_sessions
+events_table_name=connector_events
+events_detailed_table_name=connector_events_detailed
+level=normal
diff --git a/ee/connectors/utils/bigquery_service_account.json.example b/ee/connectors/utils/bigquery_service_account.json.example
new file mode 100644
index 000000000..e6473eed7
--- /dev/null
+++ b/ee/connectors/utils/bigquery_service_account.json.example
@@ -0,0 +1,12 @@
+{
+ "type": "service_account",
+ "project_id": "aaaaaa-123456",
+ "private_key_id": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
+ "private_key": "-----BEGIN PRIVATE KEY-----\some_letters_and_numbers\n-----END PRIVATE KEY-----\n",
+ "client_email": "abc-aws@aaaaa-123456.iam.gserviceaccount.com",
+ "client_id": "12345678910111213",
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+ "token_uri": "https://oauth2.googleapis.com/token",
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/bigquery-connector-aws%40asayer-143408.iam.gserviceaccount.com"
+}
diff --git a/ee/connectors/utils/clickhouse.env.example b/ee/connectors/utils/clickhouse.env.example
new file mode 100644
index 000000000..038fa2a87
--- /dev/null
+++ b/ee/connectors/utils/clickhouse.env.example
@@ -0,0 +1,7 @@
+connect_str='clickhouse+native://{address}/{database}'
+address=1.1.1.1:9000
+database=default
+sessions_table=connector_user_sessions_buffer
+events_table_name=connector_events_buffer
+events_detailed_table_name=connector_events_detailed_buffer
+level=normal
diff --git a/ee/connectors/utils/pg.env.example b/ee/connectors/utils/pg.env.example
new file mode 100644
index 000000000..e50b041f8
--- /dev/null
+++ b/ee/connectors/utils/pg.env.example
@@ -0,0 +1,10 @@
+connect_str='postgresql://{user}:{password}@{address}:{port}/{database}'
+address=1.1.1.1
+port=8080
+database=dev
+user=qwerty
+password=qwertyQWERTY12345
+sessions_table=connector_user_sessions
+events_table_name=connector_events
+events_detailed_table_name=connector_events_detailed
+level=normal
diff --git a/ee/connectors/utils/redshift.env.example b/ee/connectors/utils/redshift.env.example
new file mode 100644
index 000000000..d78b9a8a2
--- /dev/null
+++ b/ee/connectors/utils/redshift.env.example
@@ -0,0 +1,15 @@
+aws_access_key_id=QWERTYQWERTYQWERTY
+aws_secret_access_key=abcdefgh12345678
+region_name=eu-central-3
+bucket=name_of_the_bucket
+subdirectory=name_of_the_bucket_subdirectory
+connect_str='postgresql://{user}:{password}@{address}:{port}/{schema}'
+address=redshift-cluster-1.aaaaaaaaa.eu-central-3.redshift.amazonaws.com
+port=5439
+schema=dev
+user=admin
+password=admin
+sessions_table=connector_user_sessions
+events_table_name=connector_events
+events_detailed_table_name=connector_events_detailed
+level=normal
diff --git a/ee/connectors/utils/snowflake.env.example b/ee/connectors/utils/snowflake.env.example
new file mode 100644
index 000000000..deed20462
--- /dev/null
+++ b/ee/connectors/utils/snowflake.env.example
@@ -0,0 +1,11 @@
+connect_str='snowflake://{user}:{password}@{account}/{database}/{schema}?warehouse={warehouse}'
+user=admin
+password=12345678
+account=aaaaaaa.eu-central-3
+database=dev
+schema=public
+warehouse=SOME_WH
+sessions_table=connector_user_sessions
+events_table_name=connector_events
+events_detailed_table_name=connector_events_detailed
+level=normal
diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
index 9010cb07a..e880024d3 100644
--- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
+++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql
@@ -47,77 +47,78 @@ CREATE TABLE tenants
CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member');
CREATE TABLE users
(
- user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
- tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
- email text NOT NULL UNIQUE,
- role user_role NOT NULL DEFAULT 'member',
- name text NOT NULL,
- created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
- deleted_at timestamp without time zone NULL DEFAULT NULL,
- appearance jsonb NOT NULL default '{
+ user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY,
+ tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE,
+ email text NOT NULL UNIQUE,
+ role user_role NOT NULL DEFAULT 'member',
+ name text NOT NULL,
+ created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'),
+ deleted_at timestamp without time zone NULL DEFAULT NULL,
+ appearance jsonb NOT NULL default '{
+ "role": "dev",
"dashboard": {
- "applicationActivity": true,
- "avgCpu": true,
- "avgDomContentLoadStart": true,
- "avgFirstContentfulPixel": false,
- "avgFirstPaint": false,
- "avgFps": false,
- "avgImageLoadTime": true,
- "avgPageLoadTime": true,
- "avgPagesDomBuildtime": true,
- "avgPagesResponseTime": false,
- "avgRequestLoadTime": true,
- "avgSessionDuration": false,
- "avgTillFirstBit": false,
- "avgTimeToInteractive": true,
- "avgTimeToRender": true,
- "avgUsedJsHeapSize": true,
- "avgVisitedPages": false,
- "busiestTimeOfDay": true,
- "callsErrors_4xx": true,
- "callsErrors_5xx": true,
- "countSessions": true,
- "cpu": true,
- "crashes": true,
- "errors": true,
- "errorsPerDomains": true,
- "errorsPerType": true,
- "errorsTrend": true,
+ "cpu": false,
"fps": false,
- "impactedSessionsByJsErrors": true,
- "impactedSessionsBySlowPages": true,
- "memoryConsumption": true,
- "missingResources": true,
+ "avgCpu": false,
+ "avgFps": false,
+ "errors": true,
+ "crashes": false,
"overview": true,
- "pageMetrics": true,
- "pagesResponseTime": true,
- "pagesResponseTimeDistribution": true,
- "performance": true,
- "resourceTypeVsResponseEnd": true,
- "resourcesByParty": false,
- "resourcesCountByType": true,
- "resourcesLoadingTime": true,
- "resourcesVsVisuallyComplete": true,
"sessions": true,
- "sessionsFeedback": false,
- "sessionsFrustration": false,
- "sessionsPerBrowser": false,
- "slowestDomains": true,
- "slowestImages": true,
- "slowestResources": true,
- "speedLocation": true,
- "timeToRender": false,
"topMetrics": true,
- "userActivity": false
+ "callsErrors": false,
+ "pageMetrics": true,
+ "performance": true,
+ "timeToRender": false,
+ "userActivity": false,
+ "avgFirstPaint": false,
+ "countSessions": false,
+ "errorsPerType": false,
+ "slowestImages": true,
+ "speedLocation": false,
+ "slowestDomains": false,
+ "avgPageLoadTime": false,
+ "avgTillFirstBit": false,
+ "avgTimeToRender": false,
+ "avgVisitedPages": false,
+ "avgImageLoadTime": false,
+ "busiestTimeOfDay": true,
+ "errorsPerDomains": false,
+ "missingResources": false,
+ "resourcesByParty": false,
+ "sessionsFeedback": false,
+ "slowestResources": false,
+ "avgUsedJsHeapSize": false,
+ "domainsErrors_4xx": false,
+ "domainsErrors_5xx": false,
+ "memoryConsumption": false,
+ "pagesDomBuildtime": false,
+ "pagesResponseTime": false,
+ "avgRequestLoadTime": false,
+ "avgSessionDuration": false,
+ "sessionsPerBrowser": false,
+ "applicationActivity": true,
+ "sessionsFrustration": false,
+ "avgPagesDomBuildtime": false,
+ "avgPagesResponseTime": false,
+ "avgTimeToInteractive": false,
+ "resourcesCountByType": false,
+ "resourcesLoadingTime": false,
+ "avgDomContentLoadStart": false,
+ "avgFirstContentfulPixel": false,
+ "resourceTypeVsResponseEnd": false,
+ "impactedSessionsByJsErrors": false,
+ "impactedSessionsBySlowPages": false,
+ "resourcesVsVisuallyComplete": false,
+ "pagesResponseTimeDistribution": false
},
- "runs": false,
- "tests": false,
- "pagesDomBuildtime": false
+ "sessionsLive": false,
+ "sessionsDevtools": true
}'::jsonb,
- api_key text UNIQUE default generate_api_key(20) not null,
- jwt_iat timestamp without time zone NULL DEFAULT NULL,
- data jsonb NOT NULL DEFAULT '{}'::jsonb,
- weekly_report boolean NOT NULL DEFAULT TRUE
+ api_key text UNIQUE default generate_api_key(20) not null,
+ jwt_iat timestamp without time zone NULL DEFAULT NULL,
+ data jsonb NOT NULL DEFAULT '{}'::jsonb,
+ weekly_report boolean NOT NULL DEFAULT TRUE
);
@@ -140,7 +141,7 @@ CREATE TABLE oauth_authentication
provider oauth_provider NOT NULL,
provider_user_id text NOT NULL,
token text NOT NULL,
- UNIQUE (provider, provider_user_id)
+ UNIQUE (user_id, provider)
);
@@ -445,7 +446,6 @@ CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error
-- --- sessions.sql ---
-
CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other');
CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS');
CREATE TYPE platform AS ENUM ('web','ios','android');
@@ -456,7 +456,7 @@ CREATE TABLE sessions
project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE,
tracker_version text NOT NULL,
start_ts bigint NOT NULL,
- duration integer NOT NULL,
+ duration integer NULL,
rev_id text DEFAULT NULL,
platform platform NOT NULL DEFAULT 'web',
is_snippet boolean NOT NULL DEFAULT FALSE,
@@ -508,6 +508,7 @@ CREATE INDEX ON sessions (project_id, metadata_7);
CREATE INDEX ON sessions (project_id, metadata_8);
CREATE INDEX ON sessions (project_id, metadata_9);
CREATE INDEX ON sessions (project_id, metadata_10);
+-- CREATE INDEX ON sessions (rehydration_id);
CREATE INDEX ON sessions (project_id, watchdogs_score DESC);
CREATE INDEX platform_idx ON public.sessions (platform);
@@ -558,6 +559,18 @@ CREATE TABLE user_favorite_sessions
);
+-- --- assignments.sql ---
+
+create table assigned_sessions
+(
+ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
+ issue_id text NOT NULL,
+ provider oauth_provider NOT NULL,
+ created_by integer NOT NULL,
+ created_at timestamp default timezone('utc'::text, now()) NOT NULL,
+ provider_data jsonb default '{}'::jsonb NOT NULL
+);
+
-- --- events_common.sql ---
CREATE SCHEMA events_common;
@@ -613,7 +626,6 @@ CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(ur
gin_trgm_ops);
-- --- events.sql ---
-
CREATE SCHEMA events;
CREATE TABLE events.pages
@@ -636,6 +648,7 @@ CREATE TABLE events.pages
time_to_interactive integer DEFAULT NULL,
response_time bigint DEFAULT NULL,
response_end bigint DEFAULT NULL,
+ ttfb integer DEFAULT NULL,
PRIMARY KEY (session_id, message_id)
);
CREATE INDEX ON events.pages (session_id);
@@ -655,6 +668,11 @@ CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_
gin_trgm_ops);
CREATE INDEX ON events.pages (response_time);
CREATE INDEX ON events.pages (response_end);
+CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops);
+CREATE INDEX pages_path_idx ON events.pages (path);
+CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0;
+CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0;
+CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0;
CREATE TABLE events.clicks
@@ -721,6 +739,61 @@ CREATE INDEX ON events.state_actions (name);
CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops);
CREATE INDEX ON events.state_actions (timestamp);
+CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media');
+CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' );
+CREATE TABLE events.resources
+(
+ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
+ message_id bigint NOT NULL,
+ timestamp bigint NOT NULL,
+ duration bigint NULL,
+ type events.resource_type NOT NULL,
+ url text NOT NULL,
+ url_host text NOT NULL,
+ url_hostpath text NOT NULL,
+ success boolean NOT NULL,
+ status smallint NULL,
+ method events.resource_method NULL,
+ ttfb bigint NULL,
+ header_size bigint NULL,
+ encoded_body_size integer NULL,
+ decoded_body_size integer NULL,
+ PRIMARY KEY (session_id, message_id)
+);
+CREATE INDEX ON events.resources (session_id);
+CREATE INDEX ON events.resources (timestamp);
+CREATE INDEX ON events.resources (success);
+CREATE INDEX ON events.resources (status);
+CREATE INDEX ON events.resources (type);
+CREATE INDEX ON events.resources (duration) WHERE duration > 0;
+CREATE INDEX ON events.resources (url_host);
+
+CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops);
+CREATE INDEX resources_url_idx ON events.resources (url);
+CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops);
+CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath);
+
+
+
+CREATE TABLE events.performance
+(
+ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE,
+ timestamp bigint NOT NULL,
+ message_id bigint NOT NULL,
+ min_fps smallint NOT NULL,
+ avg_fps smallint NOT NULL,
+ max_fps smallint NOT NULL,
+ min_cpu smallint NOT NULL,
+ avg_cpu smallint NOT NULL,
+ max_cpu smallint NOT NULL,
+ min_total_js_heap_size bigint NOT NULL,
+ avg_total_js_heap_size bigint NOT NULL,
+ max_total_js_heap_size bigint NOT NULL,
+ min_used_js_heap_size bigint NOT NULL,
+ avg_used_js_heap_size bigint NOT NULL,
+ max_used_js_heap_size bigint NOT NULL,
+ PRIMARY KEY (session_id, message_id)
+);
CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS
@@ -762,4 +835,4 @@ CREATE INDEX autocomplete_type_idx ON public.autocomplete (type);
CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops);
-COMMIT;
\ No newline at end of file
+COMMIT;
diff --git a/frontend/app/Router.js b/frontend/app/Router.js
index b2024c7d4..2706ca2e4 100644
--- a/frontend/app/Router.js
+++ b/frontend/app/Router.js
@@ -21,6 +21,7 @@ import FunnelIssueDetails from 'Components/Funnels/FunnelIssueDetails';
import APIClient from './api_client';
import * as routes from './routes';
+import { OB_DEFAULT_TAB } from 'App/routes';
import Signup from './components/Signup/Signup';
import { fetchTenants } from 'Duck/user';
@@ -48,6 +49,7 @@ const SIGNUP_PATH = routes.signup();
const FORGOT_PASSWORD = routes.forgotPassword();
const CLIENT_PATH = routes.client();
const ONBOARDING_PATH = routes.onboarding();
+const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB);
@withRouter
@connect((state) => {
@@ -67,6 +69,7 @@ const ONBOARDING_PATH = routes.onboarding();
organisation: state.getIn([ 'user', 'client', 'name' ]),
tenantId: state.getIn([ 'user', 'client', 'tenantId' ]),
tenants: state.getIn(['user', 'tenants']),
+ onboarding: state.getIn([ 'user', 'onboarding' ])
};
}, {
fetchUserInfo, fetchTenants
@@ -92,7 +95,7 @@ class Router extends React.Component {
}
render() {
- const { isLoggedIn, jwt, siteId, sites, loading, changePassword, location, tenants } = this.props;
+ const { isLoggedIn, jwt, siteId, sites, loading, changePassword, location, tenants, onboarding } = this.props;
const siteIdList = sites.map(({ id }) => id).toJS();
const hideHeader = location.pathname && location.pathname.includes('/session/');
@@ -121,6 +124,9 @@ class Router extends React.Component {
}
}
/>
+ { onboarding &&
+