diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml index 435d07126..c247b2a68 100644 --- a/.github/workflows/api.yaml +++ b/.github/workflows/api.yaml @@ -39,13 +39,11 @@ jobs: ENVIRONMENT: staging run: | cd api - bash build.sh - [[ -z "${DOCKER_REPO}" ]] || { - docker push ${DOCKER_REPO}/chalice:"${IMAGE_TAG}" - } + PUSH_IMAGE=1 bash build.sh - name: Deploy to kubernetes run: | cd scripts/helm/ + sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml sed -i "s/tag:.*/tag: \"$IMAGE_TAG\"/g" app/chalice.yaml bash kube-install.sh --app chalice diff --git a/api/.chalice/config.json b/api/.chalice/config.json index 8f2874beb..8385a17e7 100644 --- a/api/.chalice/config.json +++ b/api/.chalice/config.json @@ -28,14 +28,12 @@ "assign_link": "http://127.0.0.1:8000/async/email_assignment", "captcha_server": "", "captcha_key": "", - "sessions_bucket": "asayer-mobs", + "sessions_bucket": "mobs", "sessions_region": "us-east-1", "put_S3_TTL": "20", - "sourcemaps_bucket": "asayer-sourcemaps", - "sourcemaps_bucket_key": "", - "sourcemaps_bucket_secret": "", - "sourcemaps_bucket_region": "us-east-1", - "js_cache_bucket": "asayer-sessions-assets", + "sourcemaps_reader": "http://127.0.0.1:3000/", + "sourcemaps_bucket": "sourcemaps", + "js_cache_bucket": "sessions-assets", "async_Token": "", "EMAIL_HOST": "", "EMAIL_PORT": "587", diff --git a/api/.gitignore b/api/.gitignore index d9688e343..dd32b5d3f 100644 --- a/api/.gitignore +++ b/api/.gitignore @@ -170,7 +170,7 @@ logs*.txt *.csv *.p -*.js SUBNETS.json ./chalicelib/.configs +README/* \ No newline at end of file diff --git a/api/Dockerfile b/api/Dockerfile index 0ca8c1edf..84d1b88f5 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -4,6 +4,14 @@ WORKDIR /work COPY . . RUN pip install -r requirements.txt -t ./vendor --upgrade RUN pip install chalice==1.22.2 +# Installing Nodejs +RUN apt update && apt install -y curl && \ + curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \ + apt install -y nodejs && \ + apt remove --purge -y curl && \ + rm -rf /var/lib/apt/lists/* && \ + cd sourcemaps_reader && \ + npm install # Add Tini # Startup daemon @@ -13,4 +21,4 @@ ENV ENTERPRISE_BUILD ${envarg} ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini RUN chmod +x /tini ENTRYPOINT ["/tini", "--"] -CMD python env_handler.py && chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} \ No newline at end of file +CMD ./entrypoint.sh diff --git a/api/app.py b/api/app.py index 469d8a42f..2c4465189 100644 --- a/api/app.py +++ b/api/app.py @@ -23,13 +23,13 @@ import traceback old_tb = traceback.print_exception old_f = sys.stdout old_e = sys.stderr -ASAYER_SESSION_ID = None +OR_SESSION_TOKEN = None class F: def write(self, x): - if ASAYER_SESSION_ID is not None and x != '\n' and not helper.is_local(): - old_f.write(f"[asayer_session_id={ASAYER_SESSION_ID}] {x}") + if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local(): + old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}") else: old_f.write(x) @@ -38,9 +38,8 @@ class F: def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True): - if ASAYER_SESSION_ID is not None and not helper.is_local(): - # bugsnag.notify(Exception(str(value)), meta_data={"special_info": {"asayerSessionId": ASAYER_SESSION_ID}}) - value = type(value)(f"[asayer_session_id={ASAYER_SESSION_ID}] " + str(value)) + if OR_SESSION_TOKEN is not None and not helper.is_local(): + value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value)) old_tb(etype, value, tb, limit, file, chain) @@ -55,11 +54,11 @@ sys.stderr = F() _overrides.chalice_app(app) -# v0905 + @app.middleware('http') -def asayer_middleware(event, get_response): - global ASAYER_SESSION_ID - ASAYER_SESSION_ID = app.current_request.headers.get('vnd.openreplay.com.sid', +def or_middleware(event, get_response): + global OR_SESSION_TOKEN + OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid', app.current_request.headers.get('vnd.asayer.io.sid')) if "authorizer" in event.context and event.context["authorizer"] is None: print("Deleted user!!") @@ -71,19 +70,24 @@ def asayer_middleware(event, get_response): import time now = int(time.time() * 1000) response = get_response(event) + if response.status_code == 500 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local(): + with configure_scope() as scope: + scope.set_tag('stage', environ["stage"]) + scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN) + scope.set_extra("context", event.context) + sentry_sdk.capture_exception(Exception(response.body)) if helper.TRACK_TIME: print(f"Execution time: {int(time.time() * 1000) - now} ms") except Exception as e: - print("middleware exception handling") - print(e) - pg_client.close() - if helper.allow_sentry() and ASAYER_SESSION_ID is not None and not helper.is_local(): + if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local(): with configure_scope() as scope: scope.set_tag('stage', environ["stage"]) - scope.set_tag('openReplaySessionToken', ASAYER_SESSION_ID) + scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN) scope.set_extra("context", event.context) sentry_sdk.capture_exception(e) - raise e + response = Response(body={"Code": "InternalServerError", + "Message": "An internal server error occurred [level=Fatal]."}, + status_code=500) pg_client.close() return response diff --git a/api/chalicelib/blueprints/bp_core.py b/api/chalicelib/blueprints/bp_core.py index 3b2910606..bd42b2254 100644 --- a/api/chalicelib/blueprints/bp_core.py +++ b/api/chalicelib/blueprints/bp_core.py @@ -881,5 +881,5 @@ def all_issue_types(context): @app.route('/flows', methods=['GET', 'PUT', 'POST', 'DELETE']) @app.route('/{projectId}/flows', methods=['GET', 'PUT', 'POST', 'DELETE']) -def removed_endpoints(context): +def removed_endpoints(projectId=None, context=None): return Response(body={"errors": ["Endpoint no longer available"]}, status_code=410) diff --git a/api/chalicelib/blueprints/bp_core_dynamic.py b/api/chalicelib/blueprints/bp_core_dynamic.py index 4ec5278d7..1768896f9 100644 --- a/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/api/chalicelib/blueprints/bp_core_dynamic.py @@ -35,7 +35,7 @@ def login(): if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): return {"errors": ["Invalid captcha."]} r = users.authenticate(data['email'], data['password'], - for_plugin= False + for_plugin=False ) if r is None: return { @@ -73,10 +73,12 @@ def get_account(context): "projects": -1, "metadata": metadata.get_remaining_metadata_with_count(context['tenantId']) }, - **license.get_status(context["tenantId"]) + **license.get_status(context["tenantId"]), + "smtp": environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0 } } + @app.route('/projects', methods=['GET']) def get_projects(context): return {"data": projects.get_projects(tenant_id=context["tenantId"], recording_state=True, gdpr=True, recorded=True, @@ -156,12 +158,28 @@ def add_slack_client(context): data = app.current_request.json_body if "url" not in data or "name" not in data: return {"errors": ["please provide a url and a name"]} - if Slack.add_integration(tenant_id=context["tenantId"], url=data["url"], name=data["name"]): - return {"data": {"status": "success"}} - else: + n = Slack.add_channel(tenant_id=context["tenantId"], url=data["url"], name=data["name"]) + if n is None: return { - "errors": ["failed URL verification, if you received a message on slack, please notify our dev-team"] + "errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."] } + return {"data": n} + + +@app.route('/integrations/slack/{integrationId}', methods=['POST', 'PUT']) +def edit_slack_integration(integrationId, context): + data = app.current_request.json_body + if data.get("url") and len(data["url"]) > 0: + old = webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId) + if old["endpoint"] != data["url"]: + if not Slack.say_hello(data["url"]): + return { + "errors": [ + "We couldn't send you a test message on your Slack channel. Please verify your webhook url."] + } + return {"data": webhook.update(tenant_id=context["tenantId"], webhook_id=integrationId, + changes={"name": data.get("name", ""), "endpoint": data["url"]})} + @app.route('/{projectId}/errors/search', methods=['POST']) def errors_search(projectId, context): @@ -386,6 +404,7 @@ def search_sessions_by_metadata(context): m_key=key, project_id=project_id)} + @app.route('/plans', methods=['GET']) def get_current_plan(context): return { diff --git a/api/chalicelib/core/collaboration_slack.py b/api/chalicelib/core/collaboration_slack.py index 5fc80511c..b3da03a37 100644 --- a/api/chalicelib/core/collaboration_slack.py +++ b/api/chalicelib/core/collaboration_slack.py @@ -6,19 +6,18 @@ from chalicelib.core import webhook class Slack: @classmethod - def add_integration(cls, tenant_id, **args): + def add_channel(cls, tenant_id, **args): url = args["url"] name = args["name"] - if cls.__say_hello(url): - webhook.add(tenant_id=tenant_id, - endpoint=url, - webhook_type="slack", - name=name) - return True - return False + if cls.say_hello(url): + return webhook.add(tenant_id=tenant_id, + endpoint=url, + webhook_type="slack", + name=name) + return None @classmethod - def __say_hello(cls, url): + def say_hello(cls, url): r = requests.post( url=url, json={ diff --git a/api/chalicelib/core/dashboard.py b/api/chalicelib/core/dashboard.py index a778dcdfc..f306a51b4 100644 --- a/api/chalicelib/core/dashboard.py +++ b/api/chalicelib/core/dashboard.py @@ -146,7 +146,6 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) ORDER BY generated_timestamp;""" params = {"step_size": step_size, "project_id": project_id, "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - print(cur.mogrify(pg_query, params)) cur.execute(cur.mogrify(pg_query, params)) rows = cur.fetchall() results = { @@ -640,9 +639,6 @@ def search(text, resource_type, project_id, performance=False, pages_only=False, FROM events.pages INNER JOIN public.sessions USING (session_id) WHERE {" AND ".join(pg_sub_query)} LIMIT 10;""" - print(cur.mogrify(pg_query, {"project_id": project_id, - "value": helper.string_to_sql_like(text), - "platform_0": platform})) cur.execute(cur.mogrify(pg_query, {"project_id": project_id, "value": helper.string_to_sql_like(text), "platform_0": platform})) diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 65ade49ed..69213a079 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -365,7 +365,7 @@ def __get_merged_queries(queries, value, project_id): def __get_autocomplete_table(value, project_id): with pg_client.PostgresClient() as cur: cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type - FROM (SELECT * + FROM (SELECT project_id, type, value FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID FROM public.autocomplete diff --git a/api/chalicelib/core/integration_jira_cloud_issue.py b/api/chalicelib/core/integration_jira_cloud_issue.py index 00fac2fcb..bb847007a 100644 --- a/api/chalicelib/core/integration_jira_cloud_issue.py +++ b/api/chalicelib/core/integration_jira_cloud_issue.py @@ -34,7 +34,7 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue): if len(projects_map[integration_project_id]) > 0: jql += f" AND ID IN ({','.join(projects_map[integration_project_id])})" issues = self._client.get_issues(jql, offset=0) - results += [issues] + results += issues return {"issues": results} def get(self, integration_project_id, assignment_id): diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 439bca0fd..fa127b04a 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1,6 +1,6 @@ from chalicelib.utils import pg_client, helper from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, \ - sessions_mobs + sessions_mobs, issues from chalicelib.utils import dev from chalicelib.core import projects, errors @@ -25,7 +25,7 @@ SESSION_PROJECTION_COLS = """s.project_id, s.user_anonymous_id, s.platform, s.issue_score, - s.issue_types::text[] AS issue_types, + to_jsonb(s.issue_types) AS issue_types, favorite_sessions.session_id NOTNULL AS favorite, COALESCE((SELECT TRUE FROM public.user_viewed_sessions AS fs @@ -84,7 +84,6 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, session_id=session_id) data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data["socket"] = socket_ios.start_replay(project_id=project_id, session_id=session_id, device=data["userDevice"], os_version=data["userOsVersion"], @@ -101,9 +100,11 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['resources'] = resources.get_by_session_id(session_id=session_id) + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = issues.get_by_session_id(session_id=session_id) + return data return None diff --git a/api/chalicelib/core/sessions_assignments.py b/api/chalicelib/core/sessions_assignments.py index 2b9c28d8f..3e0929dad 100644 --- a/api/chalicelib/core/sessions_assignments.py +++ b/api/chalicelib/core/sessions_assignments.py @@ -119,7 +119,6 @@ def get_by_session(tenant_id, user_id, project_id, session_id): continue r = integration.issue_handler.get_by_ids(saved_issues=issues[tool]) - print(r) for i in r["issues"]: i["provider"] = tool results += r["issues"] diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 75ac59307..ea020d412 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -1,14 +1,10 @@ from chalicelib.utils.helper import environ -import boto3 +from chalicelib.utils.s3 import client def get_web(sessionId): - return boto3.client('s3', - endpoint_url=environ["S3_HOST"], - aws_access_key_id=environ["S3_KEY"], - aws_secret_access_key=environ["S3_SECRET"], - region_name=environ["sessions_region"]).generate_presigned_url( + return client.generate_presigned_url( 'get_object', Params={ 'Bucket': environ["sessions_bucket"], @@ -19,7 +15,7 @@ def get_web(sessionId): def get_ios(sessionId): - return boto3.client('s3', region_name=environ["ios_region"]).generate_presigned_url( + return client.generate_presigned_url( 'get_object', Params={ 'Bucket': environ["ios_bucket"], diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index c198b859b..01204847c 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -80,7 +80,12 @@ def get_traces_group(project_id, payload): payloads = {} all_exists = True for i, u in enumerate(frames): + print("===============================") + print(u["absPath"]) + print("converted to:") key = __get_key(project_id, u["absPath"]) # use filename instead? + print(key) + print("===============================") if key not in payloads: file_exists = s3.exists(environ['sourcemaps_bucket'], key) all_exists = all_exists and file_exists diff --git a/api/chalicelib/core/sourcemaps_parser.py b/api/chalicelib/core/sourcemaps_parser.py index cb0463d55..b7c17f3d3 100644 --- a/api/chalicelib/core/sourcemaps_parser.py +++ b/api/chalicelib/core/sourcemaps_parser.py @@ -8,14 +8,9 @@ def get_original_trace(key, positions): "key": key, "positions": positions, "padding": 5, - "bucket": environ['sourcemaps_bucket'], - "bucket_config": { - "aws_access_key_id": environ["sourcemaps_bucket_key"], - "aws_secret_access_key": environ["sourcemaps_bucket_secret"], - "aws_region": environ["sourcemaps_bucket_region"] - } + "bucket": environ['sourcemaps_bucket'] } - r = requests.post(environ["sourcemaps"], json=payload) + r = requests.post(environ["sourcemaps_reader"], json=payload) if r.status_code != 200: return {} diff --git a/api/chalicelib/core/telemetry.py b/api/chalicelib/core/telemetry.py index 362550553..48f403f57 100644 --- a/api/chalicelib/core/telemetry.py +++ b/api/chalicelib/core/telemetry.py @@ -30,7 +30,7 @@ def compute(): RETURNING *,(SELECT email FROM public.users WHERE role='owner' LIMIT 1);""" ) data = cur.fetchone() - requests.post('https://parrot.asayer.io/os/telemetry', json=process_data(data)) + requests.post('https://parrot.asayer.io/os/telemetry', json={"stats": [process_data(data)]}) def new_client(): diff --git a/api/chalicelib/core/tenants.py b/api/chalicelib/core/tenants.py index f047dcffa..4b439cfef 100644 --- a/api/chalicelib/core/tenants.py +++ b/api/chalicelib/core/tenants.py @@ -10,7 +10,7 @@ def get_by_tenant_id(tenant_id): f"""SELECT tenant_id, name, - api_key + api_key, created_at, edition, version_number, diff --git a/api/chalicelib/core/webhook.py b/api/chalicelib/core/webhook.py index 99a3b0569..fff2d4e7e 100644 --- a/api/chalicelib/core/webhook.py +++ b/api/chalicelib/core/webhook.py @@ -24,7 +24,7 @@ def get(tenant_id, webhook_id): cur.execute( cur.mogrify("""\ SELECT - w.* + webhook_id AS integration_id, webhook_id AS id, w.* FROM public.webhooks AS w where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""", {"webhook_id": webhook_id}) @@ -40,7 +40,7 @@ def get_by_type(tenant_id, webhook_type): cur.execute( cur.mogrify("""\ SELECT - w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at + w.webhook_id AS integration_id, w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at FROM public.webhooks AS w WHERE w.type =%(type)s AND deleted_at ISNULL;""", {"type": webhook_type}) @@ -55,7 +55,7 @@ def get_by_tenant(tenant_id, replace_none=False): with pg_client.PostgresClient() as cur: cur.execute("""\ SELECT - w.* + webhook_id AS integration_id, webhook_id AS id, w.* FROM public.webhooks AS w WHERE deleted_at ISNULL;""" ) @@ -81,7 +81,7 @@ def update(tenant_id, webhook_id, changes, replace_none=False): UPDATE public.webhooks SET {','.join(sub_query)} WHERE webhook_id =%(id)s AND deleted_at ISNULL - RETURNING *;""", + RETURNING webhook_id AS integration_id, webhook_id AS id,*;""", {"id": webhook_id, **changes}) ) w = helper.dict_to_camel_case(cur.fetchone()) @@ -98,7 +98,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", query = cur.mogrify("""\ INSERT INTO public.webhooks(endpoint,auth_header,type,name) VALUES (%(endpoint)s, %(auth_header)s, %(type)s,%(name)s) - RETURNING *;""", + RETURNING webhook_id AS integration_id, webhook_id AS id,*;""", {"endpoint": endpoint, "auth_header": auth_header, "type": webhook_type, "name": name}) cur.execute( diff --git a/api/chalicelib/utils/jira_client.py b/api/chalicelib/utils/jira_client.py index 6da501bbe..a7ab92932 100644 --- a/api/chalicelib/utils/jira_client.py +++ b/api/chalicelib/utils/jira_client.py @@ -68,7 +68,8 @@ class JiraManager: # print(issue.raw) issue_dict_list.append(self.__parser_issue_info(issue, include_comments=False)) - return {"total": issues.total, "issues": issue_dict_list} + # return {"total": issues.total, "issues": issue_dict_list} + return issue_dict_list def get_issue(self, issue_id: str): try: diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 8d1e37d40..89a9dc8fa 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -9,9 +9,25 @@ PG_CONFIG = {"host": environ["pg_host"], "port": int(environ["pg_port"])} from psycopg2 import pool +from threading import Semaphore + + +class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool): + def __init__(self, minconn, maxconn, *args, **kwargs): + self._semaphore = Semaphore(maxconn) + super().__init__(minconn, maxconn, *args, **kwargs) + + def getconn(self, *args, **kwargs): + self._semaphore.acquire() + return super().getconn(*args, **kwargs) + + def putconn(self, *args, **kwargs): + super().putconn(*args, **kwargs) + self._semaphore.release() + try: - postgreSQL_pool = psycopg2.pool.ThreadedConnectionPool(6, 20, **PG_CONFIG) + postgreSQL_pool = ORThreadedConnectionPool(20, 100, **PG_CONFIG) if (postgreSQL_pool): print("Connection pool created successfully") except (Exception, psycopg2.DatabaseError) as error: @@ -19,13 +35,6 @@ except (Exception, psycopg2.DatabaseError) as error: raise error -# finally: -# # closing database connection. -# # use closeall method to close all the active connection if you want to turn of the application -# if (postgreSQL_pool): -# postgreSQL_pool.closeall -# print("PostgreSQL connection pool is closed") - class PostgresClient: connection = None cursor = None diff --git a/api/chalicelib/utils/s3.py b/api/chalicelib/utils/s3.py index 29a8d28bc..49b6cfc85 100644 --- a/api/chalicelib/utils/s3.py +++ b/api/chalicelib/utils/s3.py @@ -2,7 +2,7 @@ from botocore.exceptions import ClientError from chalicelib.utils.helper import environ import boto3 - +import botocore from botocore.client import Config client = boto3.client('s3', endpoint_url=environ["S3_HOST"], @@ -13,14 +13,20 @@ client = boto3.client('s3', endpoint_url=environ["S3_HOST"], def exists(bucket, key): - response = client.list_objects_v2( - Bucket=bucket, - Prefix=key, - ) - for obj in response.get('Contents', []): - if obj['Key'] == key: - return True - return False + try: + boto3.resource('s3', endpoint_url=environ["S3_HOST"], + aws_access_key_id=environ["S3_KEY"], + aws_secret_access_key=environ["S3_SECRET"], + config=Config(signature_version='s3v4'), + region_name='us-east-1') \ + .Object(bucket, key).load() + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] == "404": + return False + else: + # Something else has gone wrong. + raise + return True def get_presigned_url_for_sharing(bucket, expires_in, key, check_exists=False): @@ -49,6 +55,9 @@ def get_presigned_url_for_upload(bucket, expires_in, key): def get_file(source_bucket, source_key): + print("******************************") + print(f"looking for: {source_key} in {source_bucket}") + print("******************************") try: result = client.get_object( Bucket=source_bucket, diff --git a/api/entrypoint.sh b/api/entrypoint.sh new file mode 100755 index 000000000..3c3d12fd5 --- /dev/null +++ b/api/entrypoint.sh @@ -0,0 +1,6 @@ +#!/bin/bash +cd sourcemaps_reader +nohup node server.js &> /tmp/sourcemaps_reader.log & +cd .. +python env_handler.py +chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} diff --git a/api/requirements.txt b/api/requirements.txt index 094d32758..671aa5da5 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -5,9 +5,6 @@ pyjwt==1.7.1 psycopg2-binary==2.8.6 pytz==2020.1 sentry-sdk==0.19.1 -rollbar==0.15.1 -bugsnag==4.0.1 -kubernetes==12.0.0 elasticsearch==7.9.1 jira==2.0.0 schedule==1.1.0 diff --git a/api/sourcemaps_reader/handler.js b/api/sourcemaps_reader/handler.js new file mode 100644 index 000000000..117808cae --- /dev/null +++ b/api/sourcemaps_reader/handler.js @@ -0,0 +1,111 @@ +'use strict'; +const sourceMap = require('source-map'); +const AWS = require('aws-sdk'); +const sourceMapVersion = require('./package.json').dependencies["source-map"]; +const URL = require('url'); +const getVersion = version => version.replace(/[\^\$\=\~]/, ""); + +module.exports.sourcemapReader = async event => { + sourceMap.SourceMapConsumer.initialize({ + "lib/mappings.wasm": `https://unpkg.com/source-map@${getVersion(sourceMapVersion)}/lib/mappings.wasm` + }); + let s3; + if (process.env.S3_HOST) { + s3 = new AWS.S3({ + endpoint: process.env.S3_HOST, + accessKeyId: process.env.S3_KEY, + secretAccessKey: process.env.S3_SECRET, + s3ForcePathStyle: true, // needed with minio? + signatureVersion: 'v4' + }); + } else { + s3 = new AWS.S3({ + 'AccessKeyID': process.env.aws_access_key_id, + 'SecretAccessKey': process.env.aws_secret_access_key, + 'Region': process.env.aws_region + }); + } + + var options = { + Bucket: event.bucket, + Key: event.key + }; + return new Promise(function (resolve, reject) { + s3.getObject(options, (err, data) => { + if (err) { + console.log("Get S3 object failed"); + console.log(err); + return reject(err); + } + const sourcemap = data.Body.toString(); + + return new sourceMap.SourceMapConsumer(sourcemap) + .then(consumer => { + let results = []; + for (let i = 0; i < event.positions.length; i++) { + let original = consumer.originalPositionFor({ + line: event.positions[i].line, + column: event.positions[i].column + }); + let url = URL.parse(""); + let preview = []; + if (original.source) { + preview = consumer.sourceContentFor(original.source, true); + if (preview !== null) { + preview = preview.split("\n") + .map((line, i) => [i + 1, line]); + if (event.padding) { + let start = original.line < event.padding ? 0 : original.line - event.padding; + preview = preview.slice(start, original.line + event.padding); + } + } else { + console.log("source not found, null preview for:"); + console.log(original.source); + preview = [] + } + url = URL.parse(original.source); + } else { + console.log("couldn't find original position of:"); + console.log({ + line: event.positions[i].line, + column: event.positions[i].column + }); + } + let result = { + "absPath": url.href, + "filename": url.pathname, + "lineNo": original.line, + "colNo": original.column, + "function": original.name, + "context": preview + }; + // console.log(result); + results.push(result); + } + + // Use this code if you don't use the http event with the LAMBDA-PROXY integration + return resolve(results); + }); + }); + }); +}; + + +// let v = { +// 'key': '1725/99f96f044fa7e941dbb15d7d68b20549', +// 'positions': [{'line': 1, 'column': 943}], +// 'padding': 5, +// 'bucket': 'asayer-sourcemaps' +// }; +// let v = { +// 'key': '1/65d8d3866bb8c92f3db612cb330f270c', +// 'positions': [{'line': 1, 'column': 0}], +// 'padding': 5, +// 'bucket': 'asayer-sourcemaps-staging' +// }; +// module.exports.sourcemapReader(v).then((r) => { +// // console.log(r); +// const fs = require('fs'); +// let data = JSON.stringify(r); +// fs.writeFileSync('results.json', data); +// }); \ No newline at end of file diff --git a/api/sourcemaps_reader/server.js b/api/sourcemaps_reader/server.js new file mode 100644 index 000000000..2a1c4dcf6 --- /dev/null +++ b/api/sourcemaps_reader/server.js @@ -0,0 +1,38 @@ +const http = require('http'); +const handler = require('./handler'); +const hostname = '127.0.0.1'; +const port = 3000; + +const server = http.createServer((req, res) => { + if (req.method === 'POST') { + let data = ''; + req.on('data', chunk => { + data += chunk; + }); + req.on('end', function () { + data = JSON.parse(data); + console.log("Starting parser for: " + data.key); + // process.env = {...process.env, ...data.bucket_config}; + handler.sourcemapReader(data) + .then((results) => { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify(results)); + }) + .catch((e) => { + console.error("Something went wrong"); + console.error(e); + res.statusCode(500); + res.end(e); + }); + }) + } else { + res.statusCode = 405; + res.setHeader('Content-Type', 'text/plain'); + res.end('Method Not Allowed'); + } +}); + +server.listen(port, hostname, () => { + console.log(`Server running at http://${hostname}:${port}/`); +}); \ No newline at end of file diff --git a/backend/pkg/db/postgres/messages_web.go b/backend/pkg/db/postgres/messages_web.go index 9156ab78e..25e044e68 100644 --- a/backend/pkg/db/postgres/messages_web.go +++ b/backend/pkg/db/postgres/messages_web.go @@ -92,8 +92,8 @@ func (conn *Conn) InsertWebPageEvent(sessionID uint64, e *PageEvent) error { if err = tx.commit(); err != nil { return err } - conn.insertAutocompleteValue(sessionID, url.DiscardURLQuery(path), "LOCATION") - conn.insertAutocompleteValue(sessionID, url.DiscardURLQuery(e.Referrer), "REFERRER") + conn.insertAutocompleteValue(sessionID, "LOCATION", url.DiscardURLQuery(path)) + conn.insertAutocompleteValue(sessionID, "REFERRER", url.DiscardURLQuery(e.Referrer)) return nil } @@ -123,7 +123,7 @@ func (conn *Conn) InsertWebClickEvent(sessionID uint64, e *ClickEvent) error { if err = tx.commit(); err != nil { return err } - conn.insertAutocompleteValue(sessionID, e.Label, "CLICK") + conn.insertAutocompleteValue(sessionID, "CLICK", e.Label) return nil } @@ -158,7 +158,7 @@ func (conn *Conn) InsertWebInputEvent(sessionID uint64, e *InputEvent) error { if err = tx.commit(); err != nil { return err } - conn.insertAutocompleteValue(sessionID, e.Label, "INPUT") + conn.insertAutocompleteValue(sessionID, "INPUT", e.Label) return nil } diff --git a/backend/services/db/messages.go b/backend/services/db/messages.go index 6aa4ac076..511165c5f 100644 --- a/backend/services/db/messages.go +++ b/backend/services/db/messages.go @@ -16,6 +16,7 @@ func insertMessage(sessionID uint64, msg Message) error { // Web case *SessionStart: + log.Printf("Session Start: %v", sessionID) return pg.InsertWebSessionStart(sessionID, m) case *SessionEnd: return pg.InsertWebSessionEnd(sessionID, m) diff --git a/backend/services/ender/builder/builder.go b/backend/services/ender/builder/builder.go index cccf96bcf..246b2f7e0 100644 --- a/backend/services/ender/builder/builder.go +++ b/backend/services/ender/builder/builder.go @@ -82,6 +82,9 @@ func (b *builder) iterateReadyMessage(iter func(msg Message)) { } func (b *builder) buildSessionEnd() { + if b.timestamp == 0 { + return + } sessionEnd := &SessionEnd{ Timestamp: b.timestamp, // + delay? } @@ -106,16 +109,25 @@ func (b *builder) buildInputEvent() { func (b *builder) handleMessage(message Message, messageID uint64) { timestamp := uint64(message.Meta().Timestamp) - if b.timestamp <= timestamp { + if b.timestamp <= timestamp { // unnecessary. TODO: test and remove b.timestamp = timestamp } - // Start from the first timestamp. + // Before the first timestamp. switch msg := message.(type) { case *SessionStart, *Metadata, *UserID, *UserAnonymousID: b.appendReadyMessage(msg) + case *RawErrorEvent: + b.appendReadyMessage(&ErrorEvent{ + MessageID: messageID, + Timestamp: msg.Timestamp, + Source: msg.Source, + Name: msg.Name, + Message: msg.Message, + Payload: msg.Payload, + }) } if b.timestamp == 0 { return @@ -177,15 +189,6 @@ func (b *builder) handleMessage(message Message, messageID uint64) { Timestamp: b.timestamp, }) } - case *RawErrorEvent: - b.appendReadyMessage(&ErrorEvent{ - MessageID: messageID, - Timestamp: msg.Timestamp, - Source: msg.Source, - Name: msg.Name, - Message: msg.Message, - Payload: msg.Payload, - }) case *JSException: b.appendReadyMessage(&ErrorEvent{ MessageID: messageID, diff --git a/backend/services/ender/builder/inputEventBuilder.go b/backend/services/ender/builder/inputEventBuilder.go index 4938e47a9..98c7ebaf6 100644 --- a/backend/services/ender/builder/inputEventBuilder.go +++ b/backend/services/ender/builder/inputEventBuilder.go @@ -69,10 +69,10 @@ func (b *inputEventBuilder) Build() *InputEvent { return nil } inputEvent := b.inputEvent - label := b.inputLabels[b.inputID] - // if !ok { - // return nil - // } + label, exists := b.inputLabels[b.inputID] + if !exists { + return nil + } inputEvent.Label = label b.inputEvent = nil diff --git a/backend/services/integrations/integration/sentry.go b/backend/services/integrations/integration/sentry.go index 39443f51a..0330430c3 100644 --- a/backend/services/integrations/integration/sentry.go +++ b/backend/services/integrations/integration/sentry.go @@ -111,7 +111,7 @@ PageLoop: c.errChan <- err continue } - if sessionID == 0 { // We can't felter them on request + if token == "" && sessionID == 0 { // We can't felter them on request continue } diff --git a/backend/services/integrations/main.go b/backend/services/integrations/main.go index 68b4ec5aa..e1ea58ebd 100644 --- a/backend/services/integrations/main.go +++ b/backend/services/integrations/main.go @@ -19,7 +19,7 @@ import ( func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - TOPIC_TRIGGER := env.String("TOPIC_TRIGGER") + TOPIC_RAW := env.String("TOPIC_RAW") POSTGRES_STRING := env.String("POSTGRES_STRING") pg := postgres.NewConn(POSTGRES_STRING) @@ -43,6 +43,7 @@ func main() { }) producer:= queue.NewProducer() + defer producer.Close(15000) listener, err := postgres.NewIntegrationsListener(POSTGRES_STRING) if err != nil { @@ -72,13 +73,14 @@ func main() { sessionID := event.SessionID if sessionID == 0 { sessData, err := tokenizer.Parse(event.Token) - if err != nil { + if err != nil && err != token.EXPIRED { log.Printf("Error on token parsing: %v; Token: %v", err, event.Token) continue } sessionID = sessData.ID } - producer.Produce(TOPIC_TRIGGER, sessionID, messages.Encode(event.RawErrorEvent)) + // TODO: send to ready-events topic. Otherwise it have to go through the events worker. + producer.Produce(TOPIC_RAW, sessionID, messages.Encode(event.RawErrorEvent)) case err := <-manager.Errors: log.Printf("Integration error: %v\n", err) case i := <-manager.RequestDataUpdates: @@ -86,10 +88,10 @@ func main() { if err := pg.UpdateIntegrationRequestData(&i); err != nil { log.Printf("Postgres Update request_data error: %v\n", err) } - //case err := <-listener.Errors: - //log.Printf("Postgres listen error: %v\n", err) + case err := <-listener.Errors: + log.Printf("Postgres listen error: %v\n", err) case iPointer := <-listener.Integrations: - // log.Printf("Integration update: %v\n", *iPointer) + log.Printf("Integration update: %v\n", *iPointer) err := manager.Update(iPointer) if err != nil { log.Printf("Integration parse error: %v | Integration: %v\n", err, *iPointer) diff --git a/ee/api/.chalice/config.json b/ee/api/.chalice/config.json index 605e5b7c1..5cda73bd3 100644 --- a/ee/api/.chalice/config.json +++ b/ee/api/.chalice/config.json @@ -31,14 +31,12 @@ "assign_link": "http://127.0.0.1:8000/async/email_assignment", "captcha_server": "", "captcha_key": "", - "sessions_bucket": "asayer-mobs", + "sessions_bucket": "mobs", "sessions_region": "us-east-1", "put_S3_TTL": "20", - "sourcemaps_bucket": "asayer-sourcemaps", - "sourcemaps_bucket_key": "", - "sourcemaps_bucket_secret": "", - "sourcemaps_bucket_region": "us-east-1", - "js_cache_bucket": "asayer-sessions-assets", + "sourcemaps_reader": "http://127.0.0.1:3000/", + "sourcemaps_bucket": "sourcemaps", + "js_cache_bucket": "sessions-assets", "async_Token": "", "EMAIL_HOST": "", "EMAIL_PORT": "587", diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 812abce9c..7e2873ee0 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -170,8 +170,8 @@ logs*.txt *.csv *.p -*.js SUBNETS.json chalicelib/.config -chalicelib/saas \ No newline at end of file +chalicelib/saas +README/* \ No newline at end of file diff --git a/ee/api/app.py b/ee/api/app.py index da75c1ac5..d604992a1 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -25,13 +25,13 @@ import traceback old_tb = traceback.print_exception old_f = sys.stdout old_e = sys.stderr -ASAYER_SESSION_ID = None +OR_SESSION_TOKEN = None class F: def write(self, x): - if ASAYER_SESSION_ID is not None and x != '\n' and not helper.is_local(): - old_f.write(f"[asayer_session_id={ASAYER_SESSION_ID}] {x}") + if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local(): + old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}") else: old_f.write(x) @@ -40,9 +40,8 @@ class F: def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True): - if ASAYER_SESSION_ID is not None and not helper.is_local(): - # bugsnag.notify(Exception(str(value)), meta_data={"special_info": {"asayerSessionId": ASAYER_SESSION_ID}}) - value = type(value)(f"[asayer_session_id={ASAYER_SESSION_ID}] " + str(value)) + if OR_SESSION_TOKEN is not None and not helper.is_local(): + value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value)) old_tb(etype, value, tb, limit, file, chain) @@ -59,7 +58,7 @@ _overrides.chalice_app(app) @app.middleware('http') -def asayer_middleware(event, get_response): +def or_middleware(event, get_response): from chalicelib.ee import unlock if not unlock.is_valid(): return Response(body={"errors": ["expired license"]}, status_code=403) @@ -68,12 +67,11 @@ def asayer_middleware(event, get_response): if not projects.is_authorized(project_id=event.uri_params["projectId"], tenant_id=event.context["authorizer"]["tenantId"]): print("unauthorized project") - # return {"errors": ["unauthorized project"]} pg_client.close() return Response(body={"errors": ["unauthorized project"]}, status_code=401) - global ASAYER_SESSION_ID - ASAYER_SESSION_ID = app.current_request.headers.get('vnd.openreplay.com.sid', - app.current_request.headers.get('vnd.asayer.io.sid')) + global OR_SESSION_TOKEN + OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid', + app.current_request.headers.get('vnd.asayer.io.sid')) if "authorizer" in event.context and event.context["authorizer"] is None: print("Deleted user!!") pg_client.close() @@ -84,19 +82,24 @@ def asayer_middleware(event, get_response): import time now = int(time.time() * 1000) response = get_response(event) + if response.status_code == 500 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local(): + with configure_scope() as scope: + scope.set_tag('stage', environ["stage"]) + scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN) + scope.set_extra("context", event.context) + sentry_sdk.capture_exception(Exception(response.body)) if helper.TRACK_TIME: print(f"Execution time: {int(time.time() * 1000) - now} ms") except Exception as e: - print("middleware exception handling") - print(e) - pg_client.close() - if helper.allow_sentry() and ASAYER_SESSION_ID is not None and not helper.is_local(): + if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local(): with configure_scope() as scope: scope.set_tag('stage', environ["stage"]) - scope.set_tag('openReplaySessionToken', ASAYER_SESSION_ID) + scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN) scope.set_extra("context", event.context) sentry_sdk.capture_exception(e) - raise e + response = Response(body={"Code": "InternalServerError", + "Message": "An internal server error occurred [level=Fatal]."}, + status_code=500) pg_client.close() return response diff --git a/ee/api/chalicelib/blueprints/bp_core.py b/ee/api/chalicelib/blueprints/bp_core.py index 3b2910606..bd42b2254 100644 --- a/ee/api/chalicelib/blueprints/bp_core.py +++ b/ee/api/chalicelib/blueprints/bp_core.py @@ -881,5 +881,5 @@ def all_issue_types(context): @app.route('/flows', methods=['GET', 'PUT', 'POST', 'DELETE']) @app.route('/{projectId}/flows', methods=['GET', 'PUT', 'POST', 'DELETE']) -def removed_endpoints(context): +def removed_endpoints(projectId=None, context=None): return Response(body={"errors": ["Endpoint no longer available"]}, status_code=410) diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic.py b/ee/api/chalicelib/blueprints/bp_core_dynamic.py index 505f10cb9..6e45627df 100644 --- a/ee/api/chalicelib/blueprints/bp_core_dynamic.py +++ b/ee/api/chalicelib/blueprints/bp_core_dynamic.py @@ -73,10 +73,12 @@ def get_account(context): "projects": -1, "metadata": metadata.get_remaining_metadata_with_count(context['tenantId']) }, - **license.get_status(context["tenantId"]) + **license.get_status(context["tenantId"]), + "smtp": environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0 } } + @app.route('/projects', methods=['GET']) def get_projects(context): return {"data": projects.get_projects(tenant_id=context["tenantId"], recording_state=True, gdpr=True, recorded=True, @@ -157,12 +159,27 @@ def add_slack_client(context): data = app.current_request.json_body if "url" not in data or "name" not in data: return {"errors": ["please provide a url and a name"]} - if Slack.add_integration(tenant_id=context["tenantId"], url=data["url"], name=data["name"]): - return {"data": {"status": "success"}} - else: + n = Slack.add_channel(tenant_id=context["tenantId"], url=data["url"], name=data["name"]) + if n is None: return { - "errors": ["failed URL verification, if you received a message on slack, please notify our dev-team"] + "errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."] } + return {"data": n} + + +@app.route('/integrations/slack/{integrationId}', methods=['POST', 'PUT']) +def edit_slack_integration(integrationId, context): + data = app.current_request.json_body + if data.get("url") and len(data["url"]) > 0: + old = webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId) + if old["endpoint"] != data["url"]: + if not Slack.say_hello(data["url"]): + return { + "errors": [ + "We couldn't send you a test message on your Slack channel. Please verify your webhook url."] + } + return {"data": webhook.update(tenant_id=context["tenantId"], webhook_id=integrationId, + changes={"name": data.get("name", ""), "endpoint": data["url"]})} @app.route('/{projectId}/errors/search', methods=['POST']) @@ -391,6 +408,7 @@ def search_sessions_by_metadata(context): m_key=key, project_id=project_id)} + @app.route('/plans', methods=['GET']) def get_current_plan(context): return { diff --git a/ee/api/chalicelib/core/collaboration_slack.py b/ee/api/chalicelib/core/collaboration_slack.py index 5fc80511c..b3da03a37 100644 --- a/ee/api/chalicelib/core/collaboration_slack.py +++ b/ee/api/chalicelib/core/collaboration_slack.py @@ -6,19 +6,18 @@ from chalicelib.core import webhook class Slack: @classmethod - def add_integration(cls, tenant_id, **args): + def add_channel(cls, tenant_id, **args): url = args["url"] name = args["name"] - if cls.__say_hello(url): - webhook.add(tenant_id=tenant_id, - endpoint=url, - webhook_type="slack", - name=name) - return True - return False + if cls.say_hello(url): + return webhook.add(tenant_id=tenant_id, + endpoint=url, + webhook_type="slack", + name=name) + return None @classmethod - def __say_hello(cls, url): + def say_hello(cls, url): r = requests.post( url=url, json={ diff --git a/ee/api/chalicelib/core/events.py b/ee/api/chalicelib/core/events.py index 65ade49ed..69213a079 100644 --- a/ee/api/chalicelib/core/events.py +++ b/ee/api/chalicelib/core/events.py @@ -365,7 +365,7 @@ def __get_merged_queries(queries, value, project_id): def __get_autocomplete_table(value, project_id): with pg_client.PostgresClient() as cur: cur.execute(cur.mogrify("""SELECT DISTINCT ON(value,type) project_id, value, type - FROM (SELECT * + FROM (SELECT project_id, type, value FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY type ORDER BY value) AS Row_ID FROM public.autocomplete diff --git a/ee/api/chalicelib/core/integration_jira_cloud_issue.py b/ee/api/chalicelib/core/integration_jira_cloud_issue.py index 00fac2fcb..bb847007a 100644 --- a/ee/api/chalicelib/core/integration_jira_cloud_issue.py +++ b/ee/api/chalicelib/core/integration_jira_cloud_issue.py @@ -34,7 +34,7 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue): if len(projects_map[integration_project_id]) > 0: jql += f" AND ID IN ({','.join(projects_map[integration_project_id])})" issues = self._client.get_issues(jql, offset=0) - results += [issues] + results += issues return {"issues": results} def get(self, integration_project_id, assignment_id): diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 9d9ff204a..56ba7c463 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -1,6 +1,6 @@ from chalicelib.utils import pg_client, helper from chalicelib.utils import dev -from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, sessions_mobs +from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, sessions_mobs, issues from chalicelib.ee import projects, errors @@ -24,7 +24,7 @@ SESSION_PROJECTION_COLS = """s.project_id, s.user_anonymous_id, s.platform, s.issue_score, - s.issue_types::text[] AS issue_types, + to_jsonb(s.issue_types) AS issue_types, favorite_sessions.session_id NOTNULL AS favorite, COALESCE((SELECT TRUE FROM public.user_viewed_sessions AS fs @@ -83,7 +83,6 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, session_id=session_id) data['mobsUrl'] = sessions_mobs.get_ios(sessionId=session_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data["socket"] = socket_ios.start_replay(project_id=project_id, session_id=session_id, device=data["userDevice"], os_version=data["userOsVersion"], @@ -100,9 +99,11 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_ data['userEvents'] = events.get_customs_by_sessionId2_pg(project_id=project_id, session_id=session_id) data['mobsUrl'] = sessions_mobs.get_web(sessionId=session_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['resources'] = resources.get_by_session_id(session_id=session_id) + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = issues.get_by_session_id(session_id=session_id) + return data return None diff --git a/ee/api/chalicelib/core/sessions_assignments.py b/ee/api/chalicelib/core/sessions_assignments.py index 2b9c28d8f..3e0929dad 100644 --- a/ee/api/chalicelib/core/sessions_assignments.py +++ b/ee/api/chalicelib/core/sessions_assignments.py @@ -119,7 +119,6 @@ def get_by_session(tenant_id, user_id, project_id, session_id): continue r = integration.issue_handler.get_by_ids(saved_issues=issues[tool]) - print(r) for i in r["issues"]: i["provider"] = tool results += r["issues"] diff --git a/ee/api/chalicelib/core/sessions_mobs.py b/ee/api/chalicelib/core/sessions_mobs.py index b96662c67..80fe59b28 100644 --- a/ee/api/chalicelib/core/sessions_mobs.py +++ b/ee/api/chalicelib/core/sessions_mobs.py @@ -1,11 +1,11 @@ from chalicelib.utils import helper from chalicelib.utils.helper import environ -import boto3 +from chalicelib.utils.s3 import client def get_web(sessionId): - return boto3.client('s3', region_name=environ["sessions_region"]).generate_presigned_url( + return client.generate_presigned_url( 'get_object', Params={ 'Bucket': environ["sessions_bucket"], @@ -16,7 +16,7 @@ def get_web(sessionId): def get_ios(sessionId): - return boto3.client('s3', region_name=environ["ios_region"]).generate_presigned_url( + return client.generate_presigned_url( 'get_object', Params={ 'Bucket': environ["ios_bucket"], diff --git a/ee/api/chalicelib/core/sourcemaps.py b/ee/api/chalicelib/core/sourcemaps.py index 5f82a31e2..dbd7213ea 100644 --- a/ee/api/chalicelib/core/sourcemaps.py +++ b/ee/api/chalicelib/core/sourcemaps.py @@ -79,7 +79,12 @@ def get_traces_group(project_id, payload): payloads = {} all_exists = True for i, u in enumerate(frames): + print("===============================") + print(u["absPath"]) + print("converted to:") key = __get_key(project_id, u["absPath"]) # use filename instead? + print(key) + print("===============================") if key not in payloads: file_exists = s3.exists(environ['sourcemaps_bucket'], key) all_exists = all_exists and file_exists diff --git a/ee/api/chalicelib/core/sourcemaps_parser.py b/ee/api/chalicelib/core/sourcemaps_parser.py index cb0463d55..b7c17f3d3 100644 --- a/ee/api/chalicelib/core/sourcemaps_parser.py +++ b/ee/api/chalicelib/core/sourcemaps_parser.py @@ -8,14 +8,9 @@ def get_original_trace(key, positions): "key": key, "positions": positions, "padding": 5, - "bucket": environ['sourcemaps_bucket'], - "bucket_config": { - "aws_access_key_id": environ["sourcemaps_bucket_key"], - "aws_secret_access_key": environ["sourcemaps_bucket_secret"], - "aws_region": environ["sourcemaps_bucket_region"] - } + "bucket": environ['sourcemaps_bucket'] } - r = requests.post(environ["sourcemaps"], json=payload) + r = requests.post(environ["sourcemaps_reader"], json=payload) if r.status_code != 200: return {} diff --git a/ee/api/chalicelib/ee/webhook.py b/ee/api/chalicelib/ee/webhook.py index 0a2406ab9..20e873f5c 100644 --- a/ee/api/chalicelib/ee/webhook.py +++ b/ee/api/chalicelib/ee/webhook.py @@ -8,7 +8,7 @@ def get_by_id(webhook_id): cur.execute( cur.mogrify("""\ SELECT - w.* + webhook_id AS integration_id, webhook_id AS id, w.* FROM public.webhooks AS w where w.webhook_id =%(webhook_id)s AND deleted_at ISNULL;""", {"webhook_id": webhook_id}) @@ -24,7 +24,7 @@ def get(tenant_id, webhook_id): cur.execute( cur.mogrify("""\ SELECT - w.* + webhook_id AS integration_id, webhook_id AS id, w.* FROM public.webhooks AS w where w.webhook_id =%(webhook_id)s AND w.tenant_id =%(tenant_id)s AND deleted_at ISNULL;""", {"webhook_id": webhook_id, "tenant_id": tenant_id}) @@ -40,7 +40,7 @@ def get_by_type(tenant_id, webhook_type): cur.execute( cur.mogrify("""\ SELECT - w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at + w.webhook_id AS integration_id, w.webhook_id AS id,w.webhook_id,w.endpoint,w.auth_header,w.type,w.index,w.name,w.created_at FROM public.webhooks AS w where w.tenant_id =%(tenant_id)s @@ -59,7 +59,7 @@ def get_by_tenant(tenant_id, replace_none=False): cur.execute( cur.mogrify("""\ SELECT - w.* + webhook_id AS integration_id, webhook_id AS id,w.* FROM public.webhooks AS w where w.tenant_id =%(tenant_id)s @@ -88,7 +88,7 @@ def update(tenant_id, webhook_id, changes, replace_none=False): UPDATE public.webhooks SET {','.join(sub_query)} WHERE tenant_id =%(tenant_id)s AND webhook_id =%(id)s AND deleted_at ISNULL - RETURNING *;""", + RETURNING webhook_id AS integration_id, webhook_id AS id,*;""", {"tenant_id": tenant_id, "id": webhook_id, **changes}) ) w = helper.dict_to_camel_case(cur.fetchone()) @@ -105,7 +105,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", query = cur.mogrify("""\ INSERT INTO public.webhooks(tenant_id, endpoint,auth_header,type,name) VALUES (%(tenant_id)s, %(endpoint)s, %(auth_header)s, %(type)s,%(name)s) - RETURNING *;""", + RETURNING webhook_id AS integration_id, webhook_id AS id,*;""", {"tenant_id": tenant_id, "endpoint": endpoint, "auth_header": auth_header, "type": webhook_type, "name": name}) cur.execute( diff --git a/ee/api/chalicelib/utils/jira_client.py b/ee/api/chalicelib/utils/jira_client.py index 6da501bbe..a7ab92932 100644 --- a/ee/api/chalicelib/utils/jira_client.py +++ b/ee/api/chalicelib/utils/jira_client.py @@ -68,7 +68,8 @@ class JiraManager: # print(issue.raw) issue_dict_list.append(self.__parser_issue_info(issue, include_comments=False)) - return {"total": issues.total, "issues": issue_dict_list} + # return {"total": issues.total, "issues": issue_dict_list} + return issue_dict_list def get_issue(self, issue_id: str): try: diff --git a/ee/api/chalicelib/utils/pg_client.py b/ee/api/chalicelib/utils/pg_client.py index e95527d64..4df29be39 100644 --- a/ee/api/chalicelib/utils/pg_client.py +++ b/ee/api/chalicelib/utils/pg_client.py @@ -9,11 +9,26 @@ PG_CONFIG = {"host": environ["pg_host"], "port": int(environ["pg_port"])} # connexion pool for FOS & EE - from psycopg2 import pool +from threading import Semaphore + + +class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool): + def __init__(self, minconn, maxconn, *args, **kwargs): + self._semaphore = Semaphore(maxconn) + super().__init__(minconn, maxconn, *args, **kwargs) + + def getconn(self, *args, **kwargs): + self._semaphore.acquire() + return super().getconn(*args, **kwargs) + + def putconn(self, *args, **kwargs): + super().putconn(*args, **kwargs) + self._semaphore.release() + try: - postgreSQL_pool = psycopg2.pool.ThreadedConnectionPool(6, 20, **PG_CONFIG) + postgreSQL_pool = ORThreadedConnectionPool(20, 100, **PG_CONFIG) if (postgreSQL_pool): print("Connection pool created successfully") except (Exception, psycopg2.DatabaseError) as error: @@ -21,13 +36,6 @@ except (Exception, psycopg2.DatabaseError) as error: raise error -# finally: -# # closing database connection. -# # use closeall method to close all the active connection if you want to turn of the application -# if (postgreSQL_pool): -# postgreSQL_pool.closeall -# print("PostgreSQL connection pool is closed") - class PostgresClient: connection = None cursor = None diff --git a/ee/api/chalicelib/utils/s3.py b/ee/api/chalicelib/utils/s3.py index 29a8d28bc..c9516982f 100644 --- a/ee/api/chalicelib/utils/s3.py +++ b/ee/api/chalicelib/utils/s3.py @@ -3,6 +3,7 @@ from chalicelib.utils.helper import environ import boto3 +import botocore from botocore.client import Config client = boto3.client('s3', endpoint_url=environ["S3_HOST"], @@ -13,51 +14,17 @@ client = boto3.client('s3', endpoint_url=environ["S3_HOST"], def exists(bucket, key): - response = client.list_objects_v2( - Bucket=bucket, - Prefix=key, - ) - for obj in response.get('Contents', []): - if obj['Key'] == key: - return True - return False - - -def get_presigned_url_for_sharing(bucket, expires_in, key, check_exists=False): - if check_exists and not exists(bucket, key): - return None - - return client.generate_presigned_url( - 'get_object', - Params={ - 'Bucket': bucket, - 'Key': key - }, - ExpiresIn=expires_in - ) - - -def get_presigned_url_for_upload(bucket, expires_in, key): - return client.generate_presigned_url( - 'put_object', - Params={ - 'Bucket': bucket, - 'Key': key - }, - ExpiresIn=expires_in - ) - - -def get_file(source_bucket, source_key): try: - result = client.get_object( - Bucket=source_bucket, - Key=source_key - ) - except ClientError as ex: - if ex.response['Error']['Code'] == 'NoSuchKey': - print(f'======> No object found - returning None for {source_bucket}/{source_key}') - return None + boto3.resource('s3', endpoint_url=environ["S3_HOST"], + aws_access_key_id=environ["S3_KEY"], + aws_secret_access_key=environ["S3_SECRET"], + config=Config(signature_version='s3v4'), + region_name='us-east-1') \ + .Object(bucket, key).load() + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] == "404": + return False else: - raise ex - return result["Body"].read().decode() + # Something else has gone wrong. + raise + return True diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 3944c0923..4fa698105 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -5,9 +5,6 @@ pyjwt==1.7.1 psycopg2-binary==2.8.6 pytz==2020.1 sentry-sdk==0.19.1 -rollbar==0.15.1 -bugsnag==4.0.1 -kubernetes==12.0.0 elasticsearch==7.9.1 jira==2.0.0 schedule==1.1.0 diff --git a/ee/api/sourcemaps_reader/handler.js b/ee/api/sourcemaps_reader/handler.js new file mode 100644 index 000000000..117808cae --- /dev/null +++ b/ee/api/sourcemaps_reader/handler.js @@ -0,0 +1,111 @@ +'use strict'; +const sourceMap = require('source-map'); +const AWS = require('aws-sdk'); +const sourceMapVersion = require('./package.json').dependencies["source-map"]; +const URL = require('url'); +const getVersion = version => version.replace(/[\^\$\=\~]/, ""); + +module.exports.sourcemapReader = async event => { + sourceMap.SourceMapConsumer.initialize({ + "lib/mappings.wasm": `https://unpkg.com/source-map@${getVersion(sourceMapVersion)}/lib/mappings.wasm` + }); + let s3; + if (process.env.S3_HOST) { + s3 = new AWS.S3({ + endpoint: process.env.S3_HOST, + accessKeyId: process.env.S3_KEY, + secretAccessKey: process.env.S3_SECRET, + s3ForcePathStyle: true, // needed with minio? + signatureVersion: 'v4' + }); + } else { + s3 = new AWS.S3({ + 'AccessKeyID': process.env.aws_access_key_id, + 'SecretAccessKey': process.env.aws_secret_access_key, + 'Region': process.env.aws_region + }); + } + + var options = { + Bucket: event.bucket, + Key: event.key + }; + return new Promise(function (resolve, reject) { + s3.getObject(options, (err, data) => { + if (err) { + console.log("Get S3 object failed"); + console.log(err); + return reject(err); + } + const sourcemap = data.Body.toString(); + + return new sourceMap.SourceMapConsumer(sourcemap) + .then(consumer => { + let results = []; + for (let i = 0; i < event.positions.length; i++) { + let original = consumer.originalPositionFor({ + line: event.positions[i].line, + column: event.positions[i].column + }); + let url = URL.parse(""); + let preview = []; + if (original.source) { + preview = consumer.sourceContentFor(original.source, true); + if (preview !== null) { + preview = preview.split("\n") + .map((line, i) => [i + 1, line]); + if (event.padding) { + let start = original.line < event.padding ? 0 : original.line - event.padding; + preview = preview.slice(start, original.line + event.padding); + } + } else { + console.log("source not found, null preview for:"); + console.log(original.source); + preview = [] + } + url = URL.parse(original.source); + } else { + console.log("couldn't find original position of:"); + console.log({ + line: event.positions[i].line, + column: event.positions[i].column + }); + } + let result = { + "absPath": url.href, + "filename": url.pathname, + "lineNo": original.line, + "colNo": original.column, + "function": original.name, + "context": preview + }; + // console.log(result); + results.push(result); + } + + // Use this code if you don't use the http event with the LAMBDA-PROXY integration + return resolve(results); + }); + }); + }); +}; + + +// let v = { +// 'key': '1725/99f96f044fa7e941dbb15d7d68b20549', +// 'positions': [{'line': 1, 'column': 943}], +// 'padding': 5, +// 'bucket': 'asayer-sourcemaps' +// }; +// let v = { +// 'key': '1/65d8d3866bb8c92f3db612cb330f270c', +// 'positions': [{'line': 1, 'column': 0}], +// 'padding': 5, +// 'bucket': 'asayer-sourcemaps-staging' +// }; +// module.exports.sourcemapReader(v).then((r) => { +// // console.log(r); +// const fs = require('fs'); +// let data = JSON.stringify(r); +// fs.writeFileSync('results.json', data); +// }); \ No newline at end of file diff --git a/ee/api/sourcemaps_reader/server.js b/ee/api/sourcemaps_reader/server.js new file mode 100644 index 000000000..2a1c4dcf6 --- /dev/null +++ b/ee/api/sourcemaps_reader/server.js @@ -0,0 +1,38 @@ +const http = require('http'); +const handler = require('./handler'); +const hostname = '127.0.0.1'; +const port = 3000; + +const server = http.createServer((req, res) => { + if (req.method === 'POST') { + let data = ''; + req.on('data', chunk => { + data += chunk; + }); + req.on('end', function () { + data = JSON.parse(data); + console.log("Starting parser for: " + data.key); + // process.env = {...process.env, ...data.bucket_config}; + handler.sourcemapReader(data) + .then((results) => { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify(results)); + }) + .catch((e) => { + console.error("Something went wrong"); + console.error(e); + res.statusCode(500); + res.end(e); + }); + }) + } else { + res.statusCode = 405; + res.setHeader('Content-Type', 'text/plain'); + res.end('Method Not Allowed'); + } +}); + +server.listen(port, hostname, () => { + console.log(`Server running at http://${hostname}:${port}/`); +}); \ No newline at end of file diff --git a/ee/connectors/bigquery_utils/create_table.py b/ee/connectors/bigquery_utils/create_table.py new file mode 100644 index 000000000..4b166e4ae --- /dev/null +++ b/ee/connectors/bigquery_utils/create_table.py @@ -0,0 +1,357 @@ +import os +from google.cloud import bigquery + +from db.loaders.bigquery_loader import creds_file + + +def create_tables_bigquery(): + create_sessions_table(creds_file=creds_file, + table_id=f"{os.environ['project_id']}.{os.environ['dataset']}.{os.environ['sessions_table']}") + print(f"`{os.environ['sessions_table']}` table created succesfully.") + create_events_table(creds_file=creds_file, + table_id=f"{os.environ['project_id']}.{os.environ['dataset']}.{os.environ['events_table_name']}") + print(f"`{os.environ['events_table_name']}` table created succesfully.") + + +def create_table(creds_file, table_id, schema): + client = bigquery.Client.from_service_account_json(creds_file) + table = bigquery.Table(table_id, schema=schema) + table = client.create_table(table) # Make an API request. + print( + "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id) + ) + + +def create_sessions_table(creds_file, table_id): + schema = [ + bigquery.SchemaField("sessionid", "INT64", mode="REQUIRED"), + bigquery.SchemaField("user_agent", "STRING"), + bigquery.SchemaField("user_browser", "STRING"), + bigquery.SchemaField("user_browser_version", "STRING"), + bigquery.SchemaField("user_country", "STRING"), + bigquery.SchemaField("user_device", "STRING"), + bigquery.SchemaField("user_device_heap_size", "INT64"), + bigquery.SchemaField("user_device_memory_size", "INT64"), + + bigquery.SchemaField("user_device_type", "STRING"), + bigquery.SchemaField("user_os", "STRING"), + bigquery.SchemaField("user_os_version", "STRING"), + bigquery.SchemaField("user_uuid", "STRING"), + bigquery.SchemaField("connection_effective_bandwidth", "INT64"), + + bigquery.SchemaField("connection_type", "STRING"), + bigquery.SchemaField("metadata_key", "STRING"), + bigquery.SchemaField("metadata_value", "STRING"), + bigquery.SchemaField("referrer", "STRING"), + bigquery.SchemaField("user_anonymous_id", "STRING"), + bigquery.SchemaField("user_id", "STRING"), + bigquery.SchemaField("session_start_timestamp", "INT64"), + bigquery.SchemaField("session_end_timestamp", "INT64"), + bigquery.SchemaField("session_duration", "INT64"), + + bigquery.SchemaField("first_contentful_paint", "INT64"), + bigquery.SchemaField("speed_index", "INT64"), + bigquery.SchemaField("visually_complete", "INT64"), + bigquery.SchemaField("timing_time_to_interactive", "INT64"), + + bigquery.SchemaField("avg_cpu", "INT64"), + bigquery.SchemaField("avg_fps", "INT64"), + bigquery.SchemaField("max_cpu", "INT64"), + bigquery.SchemaField("max_fps", "INT64"), + bigquery.SchemaField("max_total_js_heap_size", "INT64"), + bigquery.SchemaField("max_used_js_heap_size", "INT64"), + + bigquery.SchemaField("js_exceptions_count", "INT64"), + bigquery.SchemaField("long_tasks_total_duration", "INT64"), + bigquery.SchemaField("long_tasks_max_duration", "INT64"), + bigquery.SchemaField("long_tasks_count", "INT64"), + bigquery.SchemaField("inputs_count", "INT64"), + bigquery.SchemaField("clicks_count", "INT64"), + bigquery.SchemaField("issues_count", "INT64"), + bigquery.SchemaField("issues", "STRING"), + bigquery.SchemaField("urls_count", "INT64"), + bigquery.SchemaField("urls", "STRING")] + create_table(creds_file, table_id, schema) + + +def create_events_table(creds_file, table_id): + + schema = [ + bigquery.SchemaField("sessionid", "INT64"), + bigquery.SchemaField("connectioninformation_downlink", "INT64"), + bigquery.SchemaField("connectioninformation_type", "STRING"), + bigquery.SchemaField("consolelog_level", "STRING"), + bigquery.SchemaField("consolelog_value", "STRING"), + bigquery.SchemaField("customevent_messageid", "INT64"), + bigquery.SchemaField("customevent_name", "STRING"), + bigquery.SchemaField("customevent_payload", "STRING"), + bigquery.SchemaField("customevent_timestamp", "INT64"), + bigquery.SchemaField("errorevent_message", "STRING"), + bigquery.SchemaField("errorevent_messageid", "INT64"), + bigquery.SchemaField("errorevent_name", "STRING"), + bigquery.SchemaField("errorevent_payload", "STRING"), + bigquery.SchemaField("errorevent_source", "STRING"), + bigquery.SchemaField("errorevent_timestamp", "INT64"), + bigquery.SchemaField("jsexception_message", "STRING"), + bigquery.SchemaField("jsexception_name", "STRING"), + bigquery.SchemaField("jsexception_payload", "STRING"), + bigquery.SchemaField("metadata_key", "STRING"), + bigquery.SchemaField("metadata_value", "STRING"), + bigquery.SchemaField("mouseclick_id", "INT64"), + bigquery.SchemaField("mouseclick_hesitationtime", "INT64"), + bigquery.SchemaField("mouseclick_label", "STRING"), + bigquery.SchemaField("pageevent_firstcontentfulpaint", "INT64"), + bigquery.SchemaField("pageevent_firstpaint", "INT64"), + bigquery.SchemaField("pageevent_messageid", "INT64"), + bigquery.SchemaField("pageevent_referrer", "STRING"), + bigquery.SchemaField("pageevent_speedindex", "INT64"), + bigquery.SchemaField("pageevent_timestamp", "INT64"), + bigquery.SchemaField("pageevent_url", "STRING"), + bigquery.SchemaField("pagerendertiming_timetointeractive", "INT64"), + bigquery.SchemaField("pagerendertiming_visuallycomplete", "INT64"), + bigquery.SchemaField("rawcustomevent_name", "STRING"), + bigquery.SchemaField("rawcustomevent_payload", "STRING"), + bigquery.SchemaField("setviewportsize_height", "INT64"), + bigquery.SchemaField("setviewportsize_width", "INT64"), + bigquery.SchemaField("timestamp_timestamp", "INT64"), + bigquery.SchemaField("user_anonymous_id", "STRING"), + bigquery.SchemaField("user_id", "STRING"), + bigquery.SchemaField("issueevent_messageid", "INT64"), + bigquery.SchemaField("issueevent_timestamp", "INT64"), + bigquery.SchemaField("issueevent_type", "STRING"), + bigquery.SchemaField("issueevent_contextstring", "STRING"), + bigquery.SchemaField("issueevent_context", "STRING"), + bigquery.SchemaField("issueevent_payload", "STRING"), + bigquery.SchemaField("customissue_name", "STRING"), + bigquery.SchemaField("customissue_payload", "STRING"), + bigquery.SchemaField("received_at", "INT64"), + bigquery.SchemaField("batch_order_number", "INT64")] + create_table(creds_file, table_id, schema) + + +def create_table_negatives(creds_file, table_id): + client = bigquery.Client.from_service_account_json(creds_file) + + schema = [ + bigquery.SchemaField("sessionid", "INT64", mode="REQUIRED"), + bigquery.SchemaField("clickevent_hesitationtime", "INT64"), + bigquery.SchemaField("clickevent_label", "STRING"), + bigquery.SchemaField("clickevent_messageid", "INT64"), + bigquery.SchemaField("clickevent_timestamp", "INT64"), + bigquery.SchemaField("connectioninformation_downlink", "INT64"), + bigquery.SchemaField("connectioninformation_type", "STRING"), + bigquery.SchemaField("consolelog_level", "STRING"), + bigquery.SchemaField("consolelog_value", "STRING"), + bigquery.SchemaField("cpuissue_duration", "INT64"), + bigquery.SchemaField("cpuissue_rate", "INT64"), + bigquery.SchemaField("cpuissue_timestamp", "INT64"), + bigquery.SchemaField("createdocument", "BOOL"), + bigquery.SchemaField("createelementnode_id", "INT64"), + bigquery.SchemaField("createelementnode_parentid", "INT64"), + bigquery.SchemaField("cssdeleterule_index", "INT64"), + bigquery.SchemaField("cssdeleterule_stylesheetid", "INT64"), + bigquery.SchemaField("cssinsertrule_index", "INT64"), + bigquery.SchemaField("cssinsertrule_rule", "STRING"), + bigquery.SchemaField("cssinsertrule_stylesheetid", "INT64"), + bigquery.SchemaField("customevent_messageid", "INT64"), + bigquery.SchemaField("customevent_name", "STRING"), + bigquery.SchemaField("customevent_payload", "STRING"), + bigquery.SchemaField("customevent_timestamp", "INT64"), + bigquery.SchemaField("domdrop_timestamp", "INT64"), + bigquery.SchemaField("errorevent_message", "STRING"), + bigquery.SchemaField("errorevent_messageid", "INT64"), + bigquery.SchemaField("errorevent_name", "STRING"), + bigquery.SchemaField("errorevent_payload", "STRING"), + bigquery.SchemaField("errorevent_source", "STRING"), + bigquery.SchemaField("errorevent_timestamp", "INT64"), + bigquery.SchemaField("fetch_duration", "INT64"), + bigquery.SchemaField("fetch_method", "STRING"), + bigquery.SchemaField("fetch_request", "STRING"), + bigquery.SchemaField("fetch_response", "STRING"), + bigquery.SchemaField("fetch_status", "INT64"), + bigquery.SchemaField("fetch_timestamp", "INT64"), + bigquery.SchemaField("fetch_url", "STRING"), + bigquery.SchemaField("graphql_operationkind", "STRING"), + bigquery.SchemaField("graphql_operationname", "STRING"), + bigquery.SchemaField("graphql_response", "STRING"), + bigquery.SchemaField("graphql_variables", "STRING"), + bigquery.SchemaField("graphqlevent_messageid", "INT64"), + bigquery.SchemaField("graphqlevent_name", "STRING"), + bigquery.SchemaField("graphqlevent_timestamp", "INT64"), + bigquery.SchemaField("inputevent_label", "STRING"), + bigquery.SchemaField("inputevent_messageid", "INT64"), + bigquery.SchemaField("inputevent_timestamp", "INT64"), + bigquery.SchemaField("inputevent_value", "STRING"), + bigquery.SchemaField("inputevent_valuemasked", "BOOL"), + bigquery.SchemaField("is_asayer_event", "BOOL"), + bigquery.SchemaField("jsexception_message", "STRING"), + bigquery.SchemaField("jsexception_name", "STRING"), + bigquery.SchemaField("jsexception_payload", "STRING"), + bigquery.SchemaField("longtasks_timestamp", "INT64"), + bigquery.SchemaField("longtasks_duration", "INT64"), + bigquery.SchemaField("longtasks_containerid", "STRING"), + bigquery.SchemaField("longtasks_containersrc", "STRING"), + bigquery.SchemaField("memoryissue_duration", "INT64"), + bigquery.SchemaField("memoryissue_rate", "INT64"), + bigquery.SchemaField("memoryissue_timestamp", "INT64"), + bigquery.SchemaField("metadata_key", "STRING"), + bigquery.SchemaField("metadata_value", "STRING"), + bigquery.SchemaField("mobx_payload", "STRING"), + bigquery.SchemaField("mobx_type", "STRING"), + bigquery.SchemaField("mouseclick_id", "INT64"), + bigquery.SchemaField("mouseclick_hesitationtime", "INT64"), + bigquery.SchemaField("mouseclick_label", "STRING"), + bigquery.SchemaField("mousemove_x", "INT64"), + bigquery.SchemaField("mousemove_y", "INT64"), + bigquery.SchemaField("movenode_id", "INT64"), + bigquery.SchemaField("movenode_index", "INT64"), + bigquery.SchemaField("movenode_parentid", "INT64"), + bigquery.SchemaField("ngrx_action", "STRING"), + bigquery.SchemaField("ngrx_duration", "INT64"), + bigquery.SchemaField("ngrx_state", "STRING"), + bigquery.SchemaField("otable_key", "STRING"), + bigquery.SchemaField("otable_value", "STRING"), + bigquery.SchemaField("pageevent_domcontentloadedeventend", "INT64"), + bigquery.SchemaField("pageevent_domcontentloadedeventstart", "INT64"), + bigquery.SchemaField("pageevent_firstcontentfulpaint", "INT64"), + bigquery.SchemaField("pageevent_firstpaint", "INT64"), + bigquery.SchemaField("pageevent_loaded", "BOOL"), + bigquery.SchemaField("pageevent_loadeventend", "INT64"), + bigquery.SchemaField("pageevent_loadeventstart", "INT64"), + bigquery.SchemaField("pageevent_messageid", "INT64"), + bigquery.SchemaField("pageevent_referrer", "STRING"), + bigquery.SchemaField("pageevent_requeststart", "INT64"), + bigquery.SchemaField("pageevent_responseend", "INT64"), + bigquery.SchemaField("pageevent_responsestart", "INT64"), + bigquery.SchemaField("pageevent_speedindex", "INT64"), + bigquery.SchemaField("pageevent_timestamp", "INT64"), + bigquery.SchemaField("pageevent_url", "STRING"), + bigquery.SchemaField("pageloadtiming_domcontentloadedeventend", "INT64"), + bigquery.SchemaField("pageloadtiming_domcontentloadedeventstart", "INT64"), + bigquery.SchemaField("pageloadtiming_firstcontentfulpaint", "INT64"), + bigquery.SchemaField("pageloadtiming_firstpaint", "INT64"), + bigquery.SchemaField("pageloadtiming_loadeventend", "INT64"), + bigquery.SchemaField("pageloadtiming_loadeventstart", "INT64"), + bigquery.SchemaField("pageloadtiming_requeststart", "INT64"), + bigquery.SchemaField("pageloadtiming_responseend", "INT64"), + bigquery.SchemaField("pageloadtiming_responsestart", "INT64"), + bigquery.SchemaField("pagerendertiming_speedindex", "INT64"), + bigquery.SchemaField("pagerendertiming_timetointeractive", "INT64"), + bigquery.SchemaField("pagerendertiming_visuallycomplete", "INT64"), + bigquery.SchemaField("performancetrack_frames", "INT64"), + bigquery.SchemaField("performancetrack_ticks", "INT64"), + bigquery.SchemaField("performancetrack_totaljsheapsize", "INT64"), + bigquery.SchemaField("performancetrack_usedjsheapsize", "INT64"), + bigquery.SchemaField("performancetrackaggr_avgcpu", "INT64"), + bigquery.SchemaField("performancetrackaggr_avgfps", "INT64"), + bigquery.SchemaField("performancetrackaggr_avgtotaljsheapsize", "INT64"), + bigquery.SchemaField("performancetrackaggr_avgusedjsheapsize", "INT64"), + bigquery.SchemaField("performancetrackaggr_maxcpu", "INT64"), + bigquery.SchemaField("performancetrackaggr_maxfps", "INT64"), + bigquery.SchemaField("performancetrackaggr_maxtotaljsheapsize", "INT64"), + bigquery.SchemaField("performancetrackaggr_maxusedjsheapsize", "INT64"), + bigquery.SchemaField("performancetrackaggr_mincpu", "INT64"), + bigquery.SchemaField("performancetrackaggr_minfps", "INT64"), + bigquery.SchemaField("performancetrackaggr_mintotaljsheapsize", "INT64"), + bigquery.SchemaField("performancetrackaggr_minusedjsheapsize", "INT64"), + bigquery.SchemaField("performancetrackaggr_timestampend", "INT64"), + bigquery.SchemaField("performancetrackaggr_timestampstart", "INT64"), + bigquery.SchemaField("profiler_args", "STRING"), + bigquery.SchemaField("profiler_duration", "INT64"), + bigquery.SchemaField("profiler_name", "STRING"), + bigquery.SchemaField("profiler_result", "STRING"), + bigquery.SchemaField("rawcustomevent_name", "STRING"), + bigquery.SchemaField("rawcustomevent_payload", "STRING"), + bigquery.SchemaField("rawerrorevent_message", "STRING"), + bigquery.SchemaField("rawerrorevent_name", "STRING"), + bigquery.SchemaField("rawerrorevent_payload", "STRING"), + bigquery.SchemaField("rawerrorevent_source", "STRING"), + bigquery.SchemaField("rawerrorevent_timestamp", "INT64"), + bigquery.SchemaField("redux_action", "STRING"), + bigquery.SchemaField("redux_duration", "INT64"), + bigquery.SchemaField("redux_state", "STRING"), + bigquery.SchemaField("removenode_id", "INT64"), + bigquery.SchemaField("removenodeattribute_id", "INT64"), + bigquery.SchemaField("removenodeattribute_name", "STRING"), + bigquery.SchemaField("resourceevent_decodedbodysize", "INT64"), + bigquery.SchemaField("resourceevent_duration", "INT64"), + bigquery.SchemaField("resourceevent_encodedbodysize", "INT64"), + bigquery.SchemaField("resourceevent_headersize", "INT64"), + bigquery.SchemaField("resourceevent_messageid", "INT64"), + bigquery.SchemaField("resourceevent_method", "STRING"), + bigquery.SchemaField("resourceevent_status", "INT64"), + bigquery.SchemaField("resourceevent_success", "BOOL"), + bigquery.SchemaField("resourceevent_timestamp", "INT64"), + bigquery.SchemaField("resourceevent_ttfb", "INT64"), + bigquery.SchemaField("resourceevent_type", "STRING"), + bigquery.SchemaField("resourceevent_url", "STRING"), + bigquery.SchemaField("resourcetiming_decodedbodysize", "INT64"), + bigquery.SchemaField("resourcetiming_duration", "INT64"), + bigquery.SchemaField("resourcetiming_encodedbodysize", "INT64"), + bigquery.SchemaField("resourcetiming_headersize", "INT64"), + bigquery.SchemaField("resourcetiming_initiator", "STRING"), + bigquery.SchemaField("resourcetiming_timestamp", "INT64"), + bigquery.SchemaField("resourcetiming_ttfb", "INT64"), + bigquery.SchemaField("resourcetiming_url", "STRING"), + bigquery.SchemaField("sessiondisconnect", "BOOL"), + bigquery.SchemaField("sessiondisconnect_timestamp", "INT64"), + bigquery.SchemaField("sessionend", "BOOL"), + bigquery.SchemaField("sessionend_timestamp", "INT64"), + bigquery.SchemaField("sessionstart_projectid", "INT64"), + bigquery.SchemaField("sessionstart_revid", "STRING"), + bigquery.SchemaField("sessionstart_timestamp", "INT64"), + bigquery.SchemaField("sessionstart_trackerversion", "STRING"), + bigquery.SchemaField("sessionstart_useragent", "STRING"), + bigquery.SchemaField("sessionstart_userbrowser", "STRING"), + bigquery.SchemaField("sessionstart_userbrowserversion", "STRING"), + bigquery.SchemaField("sessionstart_usercountry", "STRING"), + bigquery.SchemaField("sessionstart_userdevice", "STRING"), + bigquery.SchemaField("sessionstart_userdeviceheapsize", "INT64"), + bigquery.SchemaField("sessionstart_userdevicememorysize", "INT64"), + bigquery.SchemaField("sessionstart_userdevicetype", "STRING"), + bigquery.SchemaField("sessionstart_useros", "STRING"), + bigquery.SchemaField("sessionstart_userosversion", "STRING"), + bigquery.SchemaField("sessionstart_useruuid", "STRING"), + bigquery.SchemaField("setcssdata_data", "INT64"), + bigquery.SchemaField("setcssdata_id", "INT64"), + bigquery.SchemaField("setinputchecked_checked", "INT64"), + bigquery.SchemaField("setinputchecked_id", "INT64"), + bigquery.SchemaField("setinputtarget_id", "INT64"), + bigquery.SchemaField("setinputtarget_label", "INT64"), + bigquery.SchemaField("setinputvalue_id", "INT64"), + bigquery.SchemaField("setinputvalue_mask", "INT64"), + bigquery.SchemaField("setinputvalue_value", "INT64"), + bigquery.SchemaField("setnodeattribute_id", "INT64"), + bigquery.SchemaField("setnodeattribute_name", "INT64"), + bigquery.SchemaField("setnodeattribute_value", "INT64"), + bigquery.SchemaField("setnodedata_data", "INT64"), + bigquery.SchemaField("setnodedata_id", "INT64"), + bigquery.SchemaField("setnodescroll_id", "INT64"), + bigquery.SchemaField("setnodescroll_x", "INT64"), + bigquery.SchemaField("setnodescroll_y", "INT64"), + bigquery.SchemaField("setpagelocation_navigationstart", "INT64"), + bigquery.SchemaField("setpagelocation_referrer", "STRING"), + bigquery.SchemaField("setpagelocation_url", "STRING"), + bigquery.SchemaField("setpagevisibility_hidden", "BOOL"), + bigquery.SchemaField("setviewportscroll_x", "BOOL"), + bigquery.SchemaField("setviewportscroll_y", "BOOL"), + bigquery.SchemaField("setviewportsize_height", "INT64"), + bigquery.SchemaField("setviewportsize_width", "INT64"), + bigquery.SchemaField("stateaction_type", "STRING"), + bigquery.SchemaField("stateactionevent_messageid", "INT64"), + bigquery.SchemaField("stateactionevent_timestamp", "INT64"), + bigquery.SchemaField("stateactionevent_type", "STRING"), + bigquery.SchemaField("timestamp_timestamp", "INT64"), + bigquery.SchemaField("useranonymousid_id", "STRING"), + bigquery.SchemaField("userid_id", "STRING"), + bigquery.SchemaField("vuex_mutation", "STRING"), + bigquery.SchemaField("vuex_state", "STRING"), + bigquery.SchemaField("received_at", "INT64", mode="REQUIRED"), + bigquery.SchemaField("batch_order_number", "INT64", mode="REQUIRED") + ] + + table = bigquery.Table(table_id, schema=schema) + table = client.create_table(table) # Make an API request. + print( + "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id) + ) diff --git a/ee/connectors/db/api.py b/ee/connectors/db/api.py new file mode 100644 index 000000000..33abf67cc --- /dev/null +++ b/ee/connectors/db/api.py @@ -0,0 +1,129 @@ +from sqlalchemy import create_engine +from sqlalchemy import MetaData +from sqlalchemy.orm import sessionmaker, session +from contextlib import contextmanager +import logging +import os +from pathlib import Path + +DATABASE = os.environ['DATABASE_NAME'] +if DATABASE == 'redshift': + import pandas_redshift as pr + +base_path = Path(__file__).parent.parent + +from db.models import Base + +logger = logging.getLogger(__file__) + + +def get_class_by_tablename(tablename): + """Return class reference mapped to table. + Raise an exception if class not found + + :param tablename: String with name of table. + :return: Class reference. + """ + for c in Base._decl_class_registry.values(): + if hasattr(c, '__tablename__') and c.__tablename__ == tablename: + return c + raise AttributeError(f'No model with tablename "{tablename}"') + + +class DBConnection: + """ + Initializes connection to a database + To update models file use: + sqlacodegen --outfile models_universal.py mysql+pymysql://{user}:{pwd}@{address} + """ + _sessions = sessionmaker() + + def __init__(self, config) -> None: + self.metadata = MetaData() + self.config = config + + if config == 'redshift': + self.pdredshift = pr + self.pdredshift.connect_to_redshift(dbname=os.environ['schema'], + host=os.environ['address'], + port=os.environ['port'], + user=os.environ['user'], + password=os.environ['password']) + + self.pdredshift.connect_to_s3(aws_access_key_id=os.environ['aws_access_key_id'], + aws_secret_access_key=os.environ['aws_secret_access_key'], + bucket=os.environ['bucket'], + subdirectory=os.environ['subdirectory']) + + self.connect_str = os.environ['connect_str'].format( + user=os.environ['user'], + password=os.environ['password'], + address=os.environ['address'], + port=os.environ['port'], + schema=os.environ['schema'] + ) + self.engine = create_engine(self.connect_str) + + elif config == 'clickhouse': + self.connect_str = os.environ['connect_str'].format( + address=os.environ['address'], + database=os.environ['database'] + ) + self.engine = create_engine(self.connect_str) + elif config == 'pg': + self.connect_str = os.environ['connect_str'].format( + user=os.environ['user'], + password=os.environ['password'], + address=os.environ['address'], + port=os.environ['port'], + database=os.environ['database'] + ) + self.engine = create_engine(self.connect_str) + elif config == 'bigquery': + pass + elif config == 'snowflake': + self.connect_str = os.environ['connect_str'].format( + user=os.environ['user'], + password=os.environ['password'], + account=os.environ['account'], + database=os.environ['database'], + schema = os.environ['schema'], + warehouse = os.environ['warehouse'] + ) + self.engine = create_engine(self.connect_str) + else: + raise ValueError("This db configuration doesn't exist. Add into keys file.") + + @contextmanager + def get_test_session(self, **kwargs) -> session: + """ + Test session context, even commits won't be persisted into db. + :Keyword Arguments: + * autoflush (``bool``) -- default: True + * autocommit (``bool``) -- default: False + * expire_on_commit (``bool``) -- default: True + """ + connection = self.engine.connect() + transaction = connection.begin() + my_session = type(self)._sessions(bind=connection, **kwargs) + yield my_session + + # Do cleanup, rollback and closing, whatever happens + my_session.close() + transaction.rollback() + connection.close() + + @contextmanager + def get_live_session(self) -> session: + """ + This is a session that can be committed. + Changes will be reflected in the database. + """ + # Automatic transaction and connection handling in session + connection = self.engine.connect() + my_session = type(self)._sessions(bind=connection) + + yield my_session + + my_session.close() + connection.close() diff --git a/ee/connectors/db/loaders/__init__.py b/ee/connectors/db/loaders/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/ee/connectors/db/loaders/bigquery_loader.py b/ee/connectors/db/loaders/bigquery_loader.py new file mode 100644 index 000000000..2f3747d0a --- /dev/null +++ b/ee/connectors/db/loaders/bigquery_loader.py @@ -0,0 +1,34 @@ +import os +from pathlib import Path + +from google.oauth2.service_account import Credentials + +# obtain the JSON file: +# In the Cloud Console, go to the Create service account key page. +# +# Go to the Create Service Account Key page +# From the Service account list, select New service account. +# In the Service account name field, enter a name. +# From the Role list, select Project > Owner. +# +# Note: The Role field affects which resources your service account can access in your project. You can revoke these roles or grant additional roles later. In production environments, do not grant the Owner, Editor, or Viewer roles. For more information, see Granting, changing, and revoking access to resources. +# Click Create. A JSON file that contains your key downloads to your computer. +# +# Put it in utils under a name bigquery_service_account + +base_path = Path(__file__).parent.parent.parent +creds_file = base_path / 'utils' / 'bigquery_service_account.json' +credentials = Credentials.from_service_account_file( + creds_file) + + +def insert_to_bigquery(df, table): + df.to_gbq(destination_table=f"{os.environ['dataset']}.{table}", + project_id=os.environ['project_id'], + if_exists='append', + credentials=credentials) + + +def transit_insert_to_bigquery(db, batch): + ... + diff --git a/ee/connectors/db/loaders/clickhouse_loader.py b/ee/connectors/db/loaders/clickhouse_loader.py new file mode 100644 index 000000000..2fea7fd01 --- /dev/null +++ b/ee/connectors/db/loaders/clickhouse_loader.py @@ -0,0 +1,4 @@ + +def insert_to_clickhouse(db, df, table: str): + df.to_sql(table, db.engine, if_exists='append', index=False) + diff --git a/ee/connectors/db/loaders/postgres_loader.py b/ee/connectors/db/loaders/postgres_loader.py new file mode 100644 index 000000000..bd982c607 --- /dev/null +++ b/ee/connectors/db/loaders/postgres_loader.py @@ -0,0 +1,3 @@ + +def insert_to_postgres(db, df, table: str): + df.to_sql(table, db.engine, if_exists='append', index=False) diff --git a/ee/connectors/db/loaders/redshift_loader.py b/ee/connectors/db/loaders/redshift_loader.py new file mode 100644 index 000000000..fe31d4fc4 --- /dev/null +++ b/ee/connectors/db/loaders/redshift_loader.py @@ -0,0 +1,19 @@ +from db.models import DetailedEvent +from psycopg2.errors import InternalError_ + + +def transit_insert_to_redshift(db, df, table): + + try: + insert_df(db.pdredshift, df, table) + except InternalError_ as e: + print(repr(e)) + print("loading failed. check stl_load_errors") + + +def insert_df(pr, df, table): + # Write the DataFrame to S3 and then to redshift + pr.pandas_to_redshift(data_frame=df, + redshift_table_name=table, + append=True, + delimiter='|') diff --git a/ee/connectors/db/loaders/snowflake_loader.py b/ee/connectors/db/loaders/snowflake_loader.py new file mode 100644 index 000000000..b0bfde37f --- /dev/null +++ b/ee/connectors/db/loaders/snowflake_loader.py @@ -0,0 +1,5 @@ + +def insert_to_snowflake(db, df, table): + df.to_sql(table, db.engine, if_exists='append', index=False) + + diff --git a/ee/connectors/db/models.py b/ee/connectors/db/models.py new file mode 100644 index 000000000..46654e249 --- /dev/null +++ b/ee/connectors/db/models.py @@ -0,0 +1,389 @@ +# coding: utf-8 +import yaml +from sqlalchemy import BigInteger, Boolean, Column, Integer, ARRAY, VARCHAR, text, VARCHAR +from sqlalchemy.ext.declarative import declarative_base +from pathlib import Path +import os + +DATABASE = os.environ['DATABASE_NAME'] + +Base = declarative_base() +metadata = Base.metadata + +base_path = Path(__file__).parent.parent + +# Load configuration file +conf = yaml.load( + open(f'{base_path}/utils/config.yml'), Loader=yaml.FullLoader) +try: + db_conf = conf[DATABASE] +except KeyError: + raise KeyError("Please provide a configuration in a YAML file with a key like\n" + "'snowflake', 'pg', 'bigquery', 'clickhouse' or 'redshift'.") + +# Get a table name from a configuration file +try: + events_table_name = db_conf['events_table_name'] +except KeyError as e: + events_table_name = None + print(repr(e)) +try: + events_detailed_table_name = db_conf['events_detailed_table_name'] +except KeyError as e: + print(repr(e)) + events_detailed_table_name = None +try: + sessions_table_name = db_conf['sessions_table'] +except KeyError as e: + print(repr(e)) + raise KeyError("Please provide a table name under a key 'table' in a YAML configuration file") + + +class Session(Base): + __tablename__ = sessions_table_name + + sessionid = Column(BigInteger, primary_key=True) + user_agent = Column(VARCHAR(5000)) + user_browser = Column(VARCHAR(5000)) + user_browser_version = Column(VARCHAR(5000)) + user_country = Column(VARCHAR(5000)) + user_device = Column(VARCHAR(5000)) + user_device_heap_size = Column(BigInteger) + user_device_memory_size = Column(BigInteger) + user_device_type = Column(VARCHAR(5000)) + user_os = Column(VARCHAR(5000)) + user_os_version = Column(VARCHAR(5000)) + user_uuid = Column(VARCHAR(5000)) + connection_effective_bandwidth = Column(BigInteger) # Downlink + connection_type = Column(VARCHAR(5000)) # "bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown" + metadata_key = Column(VARCHAR(5000)) + metadata_value = Column(VARCHAR(5000)) + referrer = Column(VARCHAR(5000)) + user_anonymous_id = Column(VARCHAR(5000)) + user_id = Column(VARCHAR(5000)) + + # TIME + session_start_timestamp = Column(BigInteger) + session_end_timestamp = Column(BigInteger) + session_duration = Column(BigInteger) + + # SPEED INDEX RELATED + first_contentful_paint = Column(BigInteger) + speed_index = Column(BigInteger) + visually_complete = Column(BigInteger) + timing_time_to_interactive = Column(BigInteger) + + # PERFORMANCE + avg_cpu = Column(Integer) + avg_fps = Column(BigInteger) + max_cpu = Column(Integer) + max_fps = Column(BigInteger) + max_total_js_heap_size = Column(BigInteger) + max_used_js_heap_size = Column(BigInteger) + + # ISSUES AND EVENTS + js_exceptions_count = Column(BigInteger) + long_tasks_total_duration = Column(BigInteger) + long_tasks_max_duration = Column(BigInteger) + long_tasks_count = Column(BigInteger) + inputs_count = Column(BigInteger) + clicks_count = Column(BigInteger) + issues_count = Column(BigInteger) + issues = ARRAY(VARCHAR(5000)) + urls_count = Column(BigInteger) + urls = ARRAY(VARCHAR(5000)) + + +class Event(Base): + __tablename__ = events_table_name + + sessionid = Column(BigInteger, primary_key=True) + connectioninformation_downlink = Column(BigInteger) + connectioninformation_type = Column(VARCHAR(5000)) + consolelog_level = Column(VARCHAR(5000)) + consolelog_value = Column(VARCHAR(5000)) + customevent_messageid = Column(BigInteger) + customevent_name = Column(VARCHAR(5000)) + customevent_payload = Column(VARCHAR(5000)) + customevent_timestamp = Column(BigInteger) + errorevent_message = Column(VARCHAR(5000)) + errorevent_messageid = Column(BigInteger) + errorevent_name = Column(VARCHAR(5000)) + errorevent_payload = Column(VARCHAR(5000)) + errorevent_source = Column(VARCHAR(5000)) + errorevent_timestamp = Column(BigInteger) + jsexception_message = Column(VARCHAR(5000)) + jsexception_name = Column(VARCHAR(5000)) + jsexception_payload = Column(VARCHAR(5000)) + metadata_key = Column(VARCHAR(5000)) + metadata_value = Column(VARCHAR(5000)) + mouseclick_id = Column(BigInteger) + mouseclick_hesitationtime = Column(BigInteger) + mouseclick_label = Column(VARCHAR(5000)) + pageevent_firstcontentfulpaint = Column(BigInteger) + pageevent_firstpaint = Column(BigInteger) + pageevent_messageid = Column(BigInteger) + pageevent_referrer = Column(VARCHAR(5000)) + pageevent_speedindex = Column(BigInteger) + pageevent_timestamp = Column(BigInteger) + pageevent_url = Column(VARCHAR(5000)) + pagerendertiming_timetointeractive = Column(BigInteger) + pagerendertiming_visuallycomplete = Column(BigInteger) + rawcustomevent_name = Column(VARCHAR(5000)) + rawcustomevent_payload = Column(VARCHAR(5000)) + setviewportsize_height = Column(BigInteger) + setviewportsize_width = Column(BigInteger) + timestamp_timestamp = Column(BigInteger) + user_anonymous_id = Column(VARCHAR(5000)) + user_id = Column(VARCHAR(5000)) + issueevent_messageid = Column(BigInteger) + issueevent_timestamp = Column(BigInteger) + issueevent_type = Column(VARCHAR(5000)) + issueevent_contextstring = Column(VARCHAR(5000)) + issueevent_context = Column(VARCHAR(5000)) + issueevent_payload = Column(VARCHAR(5000)) + customissue_name = Column(VARCHAR(5000)) + customissue_payload = Column(VARCHAR(5000)) + received_at = Column(BigInteger) + batch_order_number = Column(BigInteger) + + +class DetailedEvent(Base): + __tablename__ = events_detailed_table_name + + # id = Column(Integer, primary_key=True, server_default=text("\"identity\"(119029, 0, '0,1'::text)")) + sessionid = Column(BigInteger, primary_key=True) + clickevent_hesitationtime = Column(BigInteger) + clickevent_label = Column(VARCHAR(5000)) + clickevent_messageid = Column(BigInteger) + clickevent_timestamp = Column(BigInteger) + connectioninformation_downlink = Column(BigInteger) + connectioninformation_type = Column(VARCHAR(5000)) + consolelog_level = Column(VARCHAR(5000)) + consolelog_value = Column(VARCHAR(5000)) + cpuissue_duration = Column(BigInteger) + cpuissue_rate = Column(BigInteger) + cpuissue_timestamp = Column(BigInteger) + createdocument = Column(Boolean) + createelementnode_id = Column(BigInteger) + createelementnode_parentid = Column(BigInteger) + cssdeleterule_index = Column(BigInteger) + cssdeleterule_stylesheetid = Column(BigInteger) + cssinsertrule_index = Column(BigInteger) + cssinsertrule_rule = Column(VARCHAR(5000)) + cssinsertrule_stylesheetid = Column(BigInteger) + customevent_messageid = Column(BigInteger) + customevent_name = Column(VARCHAR(5000)) + customevent_payload = Column(VARCHAR(5000)) + customevent_timestamp = Column(BigInteger) + domdrop_timestamp = Column(BigInteger) + errorevent_message = Column(VARCHAR(5000)) + errorevent_messageid = Column(BigInteger) + errorevent_name = Column(VARCHAR(5000)) + errorevent_payload = Column(VARCHAR(5000)) + errorevent_source = Column(VARCHAR(5000)) + errorevent_timestamp = Column(BigInteger) + fetch_duration = Column(BigInteger) + fetch_method = Column(VARCHAR(5000)) + fetch_request = Column(VARCHAR(5000)) + fetch_response = Column(VARCHAR(5000)) + fetch_status = Column(BigInteger) + fetch_timestamp = Column(BigInteger) + fetch_url = Column(VARCHAR(5000)) + graphql_operationkind = Column(VARCHAR(5000)) + graphql_operationname = Column(VARCHAR(5000)) + graphql_response = Column(VARCHAR(5000)) + graphql_variables = Column(VARCHAR(5000)) + graphqlevent_messageid = Column(BigInteger) + graphqlevent_name = Column(VARCHAR(5000)) + graphqlevent_timestamp = Column(BigInteger) + inputevent_label = Column(VARCHAR(5000)) + inputevent_messageid = Column(BigInteger) + inputevent_timestamp = Column(BigInteger) + inputevent_value = Column(VARCHAR(5000)) + inputevent_valuemasked = Column(Boolean) + jsexception_message = Column(VARCHAR(5000)) + jsexception_name = Column(VARCHAR(5000)) + jsexception_payload = Column(VARCHAR(5000)) + memoryissue_duration = Column(BigInteger) + memoryissue_rate = Column(BigInteger) + memoryissue_timestamp = Column(BigInteger) + metadata_key = Column(VARCHAR(5000)) + metadata_value = Column(VARCHAR(5000)) + mobx_payload = Column(VARCHAR(5000)) + mobx_type = Column(VARCHAR(5000)) + mouseclick_id = Column(BigInteger) + mouseclick_hesitationtime = Column(BigInteger) + mouseclick_label = Column(VARCHAR(5000)) + mousemove_x = Column(BigInteger) + mousemove_y = Column(BigInteger) + movenode_id = Column(BigInteger) + movenode_index = Column(BigInteger) + movenode_parentid = Column(BigInteger) + ngrx_action = Column(VARCHAR(5000)) + ngrx_duration = Column(BigInteger) + ngrx_state = Column(VARCHAR(5000)) + otable_key = Column(VARCHAR(5000)) + otable_value = Column(VARCHAR(5000)) + pageevent_domcontentloadedeventend = Column(BigInteger) + pageevent_domcontentloadedeventstart = Column(BigInteger) + pageevent_firstcontentfulpaint = Column(BigInteger) + pageevent_firstpaint = Column(BigInteger) + pageevent_loaded = Column(Boolean) + pageevent_loadeventend = Column(BigInteger) + pageevent_loadeventstart = Column(BigInteger) + pageevent_messageid = Column(BigInteger) + pageevent_referrer = Column(VARCHAR(5000)) + pageevent_requeststart = Column(BigInteger) + pageevent_responseend = Column(BigInteger) + pageevent_responsestart = Column(BigInteger) + pageevent_speedindex = Column(BigInteger) + pageevent_timestamp = Column(BigInteger) + pageevent_url = Column(VARCHAR(5000)) + pageloadtiming_domcontentloadedeventend = Column(BigInteger) + pageloadtiming_domcontentloadedeventstart = Column(BigInteger) + pageloadtiming_firstcontentfulpaint = Column(BigInteger) + pageloadtiming_firstpaint = Column(BigInteger) + pageloadtiming_loadeventend = Column(BigInteger) + pageloadtiming_loadeventstart = Column(BigInteger) + pageloadtiming_requeststart = Column(BigInteger) + pageloadtiming_responseend = Column(BigInteger) + pageloadtiming_responsestart = Column(BigInteger) + pagerendertiming_speedindex = Column(BigInteger) + pagerendertiming_timetointeractive = Column(BigInteger) + pagerendertiming_visuallycomplete = Column(BigInteger) + performancetrack_frames = Column(BigInteger) + performancetrack_ticks = Column(BigInteger) + performancetrack_totaljsheapsize = Column(BigInteger) + performancetrack_usedjsheapsize = Column(BigInteger) + performancetrackaggr_avgcpu = Column(BigInteger) + performancetrackaggr_avgfps = Column(BigInteger) + performancetrackaggr_avgtotaljsheapsize = Column(BigInteger) + performancetrackaggr_avgusedjsheapsize = Column(BigInteger) + performancetrackaggr_maxcpu = Column(BigInteger) + performancetrackaggr_maxfps = Column(BigInteger) + performancetrackaggr_maxtotaljsheapsize = Column(BigInteger) + performancetrackaggr_maxusedjsheapsize = Column(BigInteger) + performancetrackaggr_mincpu = Column(BigInteger) + performancetrackaggr_minfps = Column(BigInteger) + performancetrackaggr_mintotaljsheapsize = Column(BigInteger) + performancetrackaggr_minusedjsheapsize = Column(BigInteger) + performancetrackaggr_timestampend = Column(BigInteger) + performancetrackaggr_timestampstart = Column(BigInteger) + profiler_args = Column(VARCHAR(5000)) + profiler_duration = Column(BigInteger) + profiler_name = Column(VARCHAR(5000)) + profiler_result = Column(VARCHAR(5000)) + rawcustomevent_name = Column(VARCHAR(5000)) + rawcustomevent_payload = Column(VARCHAR(5000)) + rawerrorevent_message = Column(VARCHAR(5000)) + rawerrorevent_name = Column(VARCHAR(5000)) + rawerrorevent_payload = Column(VARCHAR(5000)) + rawerrorevent_source = Column(VARCHAR(5000)) + rawerrorevent_timestamp = Column(BigInteger) + redux_action = Column(VARCHAR(5000)) + redux_duration = Column(BigInteger) + redux_state = Column(VARCHAR(5000)) + removenode_id = Column(BigInteger) + removenodeattribute_id = Column(BigInteger) + removenodeattribute_name = Column(VARCHAR(5000)) + resourceevent_decodedbodysize = Column(BigInteger) + resourceevent_duration = Column(BigInteger) + resourceevent_encodedbodysize = Column(BigInteger) + resourceevent_headersize = Column(BigInteger) + resourceevent_messageid = Column(BigInteger) + resourceevent_method = Column(VARCHAR(5000)) + resourceevent_status = Column(BigInteger) + resourceevent_success = Column(Boolean) + resourceevent_timestamp = Column(BigInteger) + resourceevent_ttfb = Column(BigInteger) + resourceevent_type = Column(VARCHAR(5000)) + resourceevent_url = Column(VARCHAR(5000)) + resourcetiming_decodedbodysize = Column(BigInteger) + resourcetiming_duration = Column(BigInteger) + resourcetiming_encodedbodysize = Column(BigInteger) + resourcetiming_headersize = Column(BigInteger) + resourcetiming_initiator = Column(VARCHAR(5000)) + resourcetiming_timestamp = Column(BigInteger) + resourcetiming_ttfb = Column(BigInteger) + resourcetiming_url = Column(VARCHAR(5000)) + sessiondisconnect = Column(Boolean) + sessiondisconnect_timestamp = Column(BigInteger) + sessionend = Column(Boolean) + sessionend_timestamp = Column(BigInteger) + sessionstart_projectid = Column(BigInteger) + sessionstart_revid = Column(VARCHAR(5000)) + sessionstart_timestamp = Column(BigInteger) + sessionstart_trackerversion = Column(VARCHAR(5000)) + sessionstart_useragent = Column(VARCHAR(5000)) + sessionstart_userbrowser = Column(VARCHAR(5000)) + sessionstart_userbrowserversion = Column(VARCHAR(5000)) + sessionstart_usercountry = Column(VARCHAR(5000)) + sessionstart_userdevice = Column(VARCHAR(5000)) + sessionstart_userdeviceheapsize = Column(BigInteger) + sessionstart_userdevicememorysize = Column(BigInteger) + sessionstart_userdevicetype = Column(VARCHAR(5000)) + sessionstart_useros = Column(VARCHAR(5000)) + sessionstart_userosversion = Column(VARCHAR(5000)) + sessionstart_useruuid = Column(VARCHAR(5000)) + setcssdata_data = Column(BigInteger) + setcssdata_id = Column(BigInteger) + setinputchecked_checked = Column(BigInteger) + setinputchecked_id = Column(BigInteger) + setinputtarget_id = Column(BigInteger) + setinputtarget_label = Column(BigInteger) + setinputvalue_id = Column(BigInteger) + setinputvalue_mask = Column(BigInteger) + setinputvalue_value = Column(BigInteger) + setnodeattribute_id = Column(BigInteger) + setnodeattribute_name = Column(BigInteger) + setnodeattribute_value = Column(BigInteger) + setnodedata_data = Column(BigInteger) + setnodedata_id = Column(BigInteger) + setnodescroll_id = Column(BigInteger) + setnodescroll_x = Column(BigInteger) + setnodescroll_y = Column(BigInteger) + setpagelocation_navigationstart = Column(BigInteger) + setpagelocation_referrer = Column(VARCHAR(5000)) + setpagelocation_url = Column(VARCHAR(5000)) + setpagevisibility_hidden = Column(Boolean) + setviewportscroll_x = Column(BigInteger) + setviewportscroll_y = Column(BigInteger) + setviewportsize_height = Column(BigInteger) + setviewportsize_width = Column(BigInteger) + stateaction_type = Column(VARCHAR(5000)) + stateactionevent_messageid = Column(BigInteger) + stateactionevent_timestamp = Column(BigInteger) + stateactionevent_type = Column(VARCHAR(5000)) + timestamp_timestamp = Column(BigInteger) + useranonymousid_id = Column(VARCHAR(5000)) + userid_id = Column(VARCHAR(5000)) + vuex_mutation = Column(VARCHAR(5000)) + vuex_state = Column(VARCHAR(5000)) + longtask_timestamp = Column(BigInteger) + longtask_duration = Column(BigInteger) + longtask_context = Column(BigInteger) + longtask_containertype = Column(BigInteger) + longtask_containersrc = Column(VARCHAR(5000)) + longtask_containerid = Column(VARCHAR(5000)) + longtask_containername = Column(VARCHAR(5000)) + setnodeurlbasedattribute_id = Column(BigInteger) + setnodeurlbasedattribute_name = Column(VARCHAR(5000)) + setnodeurlbasedattribute_value = Column(VARCHAR(5000)) + setnodeurlbasedattribute_baseurl = Column(VARCHAR(5000)) + setstyledata_id = Column(BigInteger) + setstyledata_data = Column(VARCHAR(5000)) + setstyledata_baseurl = Column(VARCHAR(5000)) + issueevent_messageid = Column(BigInteger) + issueevent_timestamp = Column(BigInteger) + issueevent_type = Column(VARCHAR(5000)) + issueevent_contextstring = Column(VARCHAR(5000)) + issueevent_context = Column(VARCHAR(5000)) + issueevent_payload = Column(VARCHAR(5000)) + technicalinfo_type = Column(VARCHAR(5000)) + technicalinfo_value = Column(VARCHAR(5000)) + customissue_name = Column(VARCHAR(5000)) + customissue_payload = Column(VARCHAR(5000)) + pageclose = Column(Boolean) + received_at = Column(BigInteger) + batch_order_number = Column(BigInteger) diff --git a/ee/connectors/db/tables.py b/ee/connectors/db/tables.py new file mode 100644 index 000000000..0127cbbd1 --- /dev/null +++ b/ee/connectors/db/tables.py @@ -0,0 +1,61 @@ +from pathlib import Path + +base_path = Path(__file__).parent.parent + + +def create_tables_clickhouse(db): + with open(base_path / 'sql' / 'clickhouse_events.sql') as f: + q = f.read() + db.engine.execute(q) + print(f"`connector_user_events` table created succesfully.") + + with open(base_path / 'sql' / 'clickhouse_events_buffer.sql') as f: + q = f.read() + db.engine.execute(q) + print(f"`connector_user_events_buffer` table created succesfully.") + + with open(base_path / 'sql' / 'clickhouse_sessions.sql') as f: + q = f.read() + db.engine.execute(q) + print(f"`connector_sessions` table created succesfully.") + + with open(base_path / 'sql' / 'clickhouse_sessions_buffer.sql') as f: + q = f.read() + db.engine.execute(q) + print(f"`connector_sessions_buffer` table created succesfully.") + + +def create_tables_postgres(db): + with open(base_path / 'sql' / 'postgres_events.sql') as f: + q = f.read() + db.engine.execute(q) + print(f"`connector_user_events` table created succesfully.") + + with open(base_path / 'sql' / 'postgres_sessions.sql') as f: + q = f.read() + db.engine.execute(q) + print(f"`connector_sessions` table created succesfully.") + + +def create_tables_snowflake(db): + with open(base_path / 'sql' / 'snowflake_events.sql') as f: + q = f.read() + db.engine.execute(q) + print(f"`connector_user_events` table created succesfully.") + + with open(base_path / 'sql' / 'snowflake_sessions.sql') as f: + q = f.read() + db.engine.execute(q) + print(f"`connector_sessions` table created succesfully.") + + +def create_tables_redshift(db): + with open(base_path / 'sql' / 'redshift_events.sql') as f: + q = f.read() + db.engine.execute(q) + print(f"`connector_user_events` table created succesfully.") + + with open(base_path / 'sql' / 'redshift_sessions.sql') as f: + q = f.read() + db.engine.execute(q) + print(f"`connector_sessions` table created succesfully.") diff --git a/ee/connectors/db/utils.py b/ee/connectors/db/utils.py new file mode 100644 index 000000000..7c268c6b3 --- /dev/null +++ b/ee/connectors/db/utils.py @@ -0,0 +1,368 @@ +import pandas as pd +from db.models import DetailedEvent, Event, Session, DATABASE + +dtypes_events = {'sessionid': "Int64", + 'connectioninformation_downlink': "Int64", + 'connectioninformation_type': "string", + 'consolelog_level': "string", + 'consolelog_value': "string", + 'customevent_messageid': "Int64", + 'customevent_name': "string", + 'customevent_payload': "string", + 'customevent_timestamp': "Int64", + 'errorevent_message': "string", + 'errorevent_messageid': "Int64", + 'errorevent_name': "string", + 'errorevent_payload': "string", + 'errorevent_source': "string", + 'errorevent_timestamp': "Int64", + 'jsexception_message': "string", + 'jsexception_name': "string", + 'jsexception_payload': "string", + 'metadata_key': "string", + 'metadata_value': "string", + 'mouseclick_id': "Int64", + 'mouseclick_hesitationtime': "Int64", + 'mouseclick_label': "string", + 'pageevent_firstcontentfulpaint': "Int64", + 'pageevent_firstpaint': "Int64", + 'pageevent_messageid': "Int64", + 'pageevent_referrer': "string", + 'pageevent_speedindex': "Int64", + 'pageevent_timestamp': "Int64", + 'pageevent_url': "string", + 'pagerendertiming_timetointeractive': "Int64", + 'pagerendertiming_visuallycomplete': "Int64", + 'rawcustomevent_name': "string", + 'rawcustomevent_payload': "string", + 'setviewportsize_height': "Int64", + 'setviewportsize_width': "Int64", + 'timestamp_timestamp': "Int64", + 'user_anonymous_id': "string", + 'user_id': "string", + 'issueevent_messageid': "Int64", + 'issueevent_timestamp': "Int64", + 'issueevent_type': "string", + 'issueevent_contextstring': "string", + 'issueevent_context': "string", + 'issueevent_payload': "string", + 'customissue_name': "string", + 'customissue_payload': "string", + 'received_at': "Int64", + 'batch_order_number': "Int64"} +dtypes_detailed_events = { + "sessionid": "Int64", + "clickevent_hesitationtime": "Int64", + "clickevent_label": "object", + "clickevent_messageid": "Int64", + "clickevent_timestamp": "Int64", + "connectioninformation_downlink": "Int64", + "connectioninformation_type": "object", + "consolelog_level": "object", + "consolelog_value": "object", + "cpuissue_duration": "Int64", + "cpuissue_rate": "Int64", + "cpuissue_timestamp": "Int64", + "createdocument": "boolean", + "createelementnode_id": "Int64", + "createelementnode_parentid": "Int64", + "cssdeleterule_index": "Int64", + "cssdeleterule_stylesheetid": "Int64", + "cssinsertrule_index": "Int64", + "cssinsertrule_rule": "object", + "cssinsertrule_stylesheetid": "Int64", + "customevent_messageid": "Int64", + "customevent_name": "object", + "customevent_payload": "object", + "customevent_timestamp": "Int64", + "domdrop_timestamp": "Int64", + "errorevent_message": "object", + "errorevent_messageid": "Int64", + "errorevent_name": "object", + "errorevent_payload": "object", + "errorevent_source": "object", + "errorevent_timestamp": "Int64", + "fetch_duration": "Int64", + "fetch_method": "object", + "fetch_request": "object", + "fetch_response": "object", + "fetch_status": "Int64", + "fetch_timestamp": "Int64", + "fetch_url": "object", + "graphql_operationkind": "object", + "graphql_operationname": "object", + "graphql_response": "object", + "graphql_variables": "object", + "graphqlevent_messageid": "Int64", + "graphqlevent_name": "object", + "graphqlevent_timestamp": "Int64", + "inputevent_label": "object", + "inputevent_messageid": "Int64", + "inputevent_timestamp": "Int64", + "inputevent_value": "object", + "inputevent_valuemasked": "boolean", + "jsexception_message": "object", + "jsexception_name": "object", + "jsexception_payload": "object", + "longtasks_timestamp": "Int64", + "longtasks_duration": "Int64", + "longtasks_containerid": "object", + "longtasks_containersrc": "object", + "memoryissue_duration": "Int64", + "memoryissue_rate": "Int64", + "memoryissue_timestamp": "Int64", + "metadata_key": "object", + "metadata_value": "object", + "mobx_payload": "object", + "mobx_type": "object", + "mouseclick_id": "Int64", + "mouseclick_hesitationtime": "Int64", + "mouseclick_label": "object", + "mousemove_x": "Int64", + "mousemove_y": "Int64", + "movenode_id": "Int64", + "movenode_index": "Int64", + "movenode_parentid": "Int64", + "ngrx_action": "object", + "ngrx_duration": "Int64", + "ngrx_state": "object", + "otable_key": "object", + "otable_value": "object", + "pageevent_domcontentloadedeventend": "Int64", + "pageevent_domcontentloadedeventstart": "Int64", + "pageevent_firstcontentfulpaint": "Int64", + "pageevent_firstpaint": "Int64", + "pageevent_loaded": "boolean", + "pageevent_loadeventend": "Int64", + "pageevent_loadeventstart": "Int64", + "pageevent_messageid": "Int64", + "pageevent_referrer": "object", + "pageevent_requeststart": "Int64", + "pageevent_responseend": "Int64", + "pageevent_responsestart": "Int64", + "pageevent_speedindex": "Int64", + "pageevent_timestamp": "Int64", + "pageevent_url": "object", + "pageloadtiming_domcontentloadedeventend": "Int64", + "pageloadtiming_domcontentloadedeventstart": "Int64", + "pageloadtiming_firstcontentfulpaint": "Int64", + "pageloadtiming_firstpaint": "Int64", + "pageloadtiming_loadeventend": "Int64", + "pageloadtiming_loadeventstart": "Int64", + "pageloadtiming_requeststart": "Int64", + "pageloadtiming_responseend": "Int64", + "pageloadtiming_responsestart": "Int64", + "pagerendertiming_speedindex": "Int64", + "pagerendertiming_timetointeractive": "Int64", + "pagerendertiming_visuallycomplete": "Int64", + "performancetrack_frames": "Int64", + "performancetrack_ticks": "Int64", + "performancetrack_totaljsheapsize": "Int64", + "performancetrack_usedjsheapsize": "Int64", + "performancetrackaggr_avgcpu": "Int64", + "performancetrackaggr_avgfps": "Int64", + "performancetrackaggr_avgtotaljsheapsize": "Int64", + "performancetrackaggr_avgusedjsheapsize": "Int64", + "performancetrackaggr_maxcpu": "Int64", + "performancetrackaggr_maxfps": "Int64", + "performancetrackaggr_maxtotaljsheapsize": "Int64", + "performancetrackaggr_maxusedjsheapsize": "Int64", + "performancetrackaggr_mincpu": "Int64", + "performancetrackaggr_minfps": "Int64", + "performancetrackaggr_mintotaljsheapsize": "Int64", + "performancetrackaggr_minusedjsheapsize": "Int64", + "performancetrackaggr_timestampend": "Int64", + "performancetrackaggr_timestampstart": "Int64", + "profiler_args": "object", + "profiler_duration": "Int64", + "profiler_name": "object", + "profiler_result": "object", + "rawcustomevent_name": "object", + "rawcustomevent_payload": "object", + "rawerrorevent_message": "object", + "rawerrorevent_name": "object", + "rawerrorevent_payload": "object", + "rawerrorevent_source": "object", + "rawerrorevent_timestamp": "Int64", + "redux_action": "object", + "redux_duration": "Int64", + "redux_state": "object", + "removenode_id": "Int64", + "removenodeattribute_id": "Int64", + "removenodeattribute_name": "object", + "resourceevent_decodedbodysize": "Int64", + "resourceevent_duration": "Int64", + "resourceevent_encodedbodysize": "Int64", + "resourceevent_headersize": "Int64", + "resourceevent_messageid": "Int64", + "resourceevent_method": "object", + "resourceevent_status": "Int64", + "resourceevent_success": "boolean", + "resourceevent_timestamp": "Int64", + "resourceevent_ttfb": "Int64", + "resourceevent_type": "object", + "resourceevent_url": "object", + "resourcetiming_decodedbodysize": "Int64", + "resourcetiming_duration": "Int64", + "resourcetiming_encodedbodysize": "Int64", + "resourcetiming_headersize": "Int64", + "resourcetiming_initiator": "object", + "resourcetiming_timestamp": "Int64", + "resourcetiming_ttfb": "Int64", + "resourcetiming_url": "object", + "sessiondisconnect": "boolean", + "sessiondisconnect_timestamp": "Int64", + "sessionend": "boolean", + "sessionend_timestamp": "Int64", + "sessionstart_projectid": "Int64", + "sessionstart_revid": "object", + "sessionstart_timestamp": "Int64", + "sessionstart_trackerversion": "object", + "sessionstart_useragent": "object", + "sessionstart_userbrowser": "object", + "sessionstart_userbrowserversion": "object", + "sessionstart_usercountry": "object", + "sessionstart_userdevice": "object", + "sessionstart_userdeviceheapsize": "Int64", + "sessionstart_userdevicememorysize": "Int64", + "sessionstart_userdevicetype": "object", + "sessionstart_useros": "object", + "sessionstart_userosversion": "object", + "sessionstart_useruuid": "object", + "setcssdata_data": "Int64", + "setcssdata_id": "Int64", + "setinputchecked_checked": "Int64", + "setinputchecked_id": "Int64", + "setinputtarget_id": "Int64", + "setinputtarget_label": "Int64", + "setinputvalue_id": "Int64", + "setinputvalue_mask": "Int64", + "setinputvalue_value": "Int64", + "setnodeattribute_id": "Int64", + "setnodeattribute_name": "Int64", + "setnodeattribute_value": "Int64", + "setnodedata_data": "Int64", + "setnodedata_id": "Int64", + "setnodescroll_id": "Int64", + "setnodescroll_x": "Int64", + "setnodescroll_y": "Int64", + "setpagelocation_navigationstart": "Int64", + "setpagelocation_referrer": "object", + "setpagelocation_url": "object", + "setpagevisibility_hidden": "boolean", + "setviewportscroll_x": "Int64", + "setviewportscroll_y": "Int64", + "setviewportsize_height": "Int64", + "setviewportsize_width": "Int64", + "stateaction_type": "object", + "stateactionevent_messageid": "Int64", + "stateactionevent_timestamp": "Int64", + "stateactionevent_type": "object", + "timestamp_timestamp": "Int64", + "useranonymousid_id": "object", + "userid_id": "object", + "vuex_mutation": "object", + "vuex_state": "string", + "received_at": "Int64", + "batch_order_number": "Int64" +} +dtypes_sessions = {'sessionid': 'Int64', + 'user_agent': 'string', + 'user_browser': 'string', + 'user_browser_version': 'string', + 'user_country': 'string', + 'user_device': 'string', + 'user_device_heap_size': 'Int64', + 'user_device_memory_size': 'Int64', + 'user_device_type': 'string', + 'user_os': 'string', + 'user_os_version': 'string', + 'user_uuid': 'string', + 'connection_effective_bandwidth': 'Int64', + 'connection_type': 'string', + 'metadata_key': 'string', + 'metadata_value': 'string', + 'referrer': 'string', + 'user_anonymous_id': 'string', + 'user_id': 'string', + 'session_start_timestamp': 'Int64', + 'session_end_timestamp': 'Int64', + 'session_duration': 'Int64', + 'first_contentful_paint': 'Int64', + 'speed_index': 'Int64', + 'visually_complete': 'Int64', + 'timing_time_to_interactive': 'Int64', + 'avg_cpu': 'Int64', + 'avg_fps': 'Int64', + 'max_cpu': 'Int64', + 'max_fps': 'Int64', + 'max_total_js_heap_size': 'Int64', + 'max_used_js_heap_size': 'Int64', + 'js_exceptions_count': 'Int64', + 'long_tasks_total_duration': 'Int64', + 'long_tasks_max_duration': 'Int64', + 'long_tasks_count': 'Int64', + 'inputs_count': 'Int64', + 'clicks_count': 'Int64', + 'issues_count': 'Int64', + 'issues': 'object', + 'urls_count': 'Int64', + 'urls': 'object'} + +if DATABASE == 'bigquery': + dtypes_sessions['urls'] = 'string' + dtypes_sessions['issues'] = 'string' + +detailed_events_col = [] +for col in DetailedEvent.__dict__: + if not col.startswith('_'): + detailed_events_col.append(col) + +events_col = [] +for col in Event.__dict__: + if not col.startswith('_'): + events_col.append(col) + +sessions_col = [] +for col in Session.__dict__: + if not col.startswith('_'): + sessions_col.append(col) + + +def get_df_from_batch(batch, level): + if level == 'normal': + df = pd.DataFrame([b.__dict__ for b in batch], columns=events_col) + if level == 'detailed': + df = pd.DataFrame([b.__dict__ for b in batch], columns=detailed_events_col) + if level == 'sessions': + df = pd.DataFrame([b.__dict__ for b in batch], columns=sessions_col) + + try: + df = df.drop('_sa_instance_state', axis=1) + except KeyError: + pass + + if level == 'normal': + df = df.astype(dtypes_events) + if level == 'detailed': + df['inputevent_value'] = None + df['customevent_payload'] = None + df = df.astype(dtypes_detailed_events) + if level == 'sessions': + df = df.astype(dtypes_sessions) + + if DATABASE == 'clickhouse' and level == 'sessions': + df['issues'] = df['issues'].fillna('') + df['urls'] = df['urls'].fillna('') + + for x in df.columns: + try: + if df[x].dtype == 'string': + df[x] = df[x].str.slice(0, 255) + df[x] = df[x].str.replace("|", "") + except TypeError as e: + print(repr(e)) + if df[x].dtype == 'str': + df[x] = df[x].str.slice(0, 255) + df[x] = df[x].str.replace("|", "") + return df diff --git a/ee/connectors/db/writer.py b/ee/connectors/db/writer.py new file mode 100644 index 000000000..b999b773f --- /dev/null +++ b/ee/connectors/db/writer.py @@ -0,0 +1,63 @@ +import os +DATABASE = os.environ['DATABASE_NAME'] + +from db.api import DBConnection +from db.utils import get_df_from_batch +from db.tables import * + +if DATABASE == 'redshift': + from db.loaders.redshift_loader import transit_insert_to_redshift +if DATABASE == 'clickhouse': + from db.loaders.clickhouse_loader import insert_to_clickhouse +if DATABASE == 'pg': + from db.loaders.postgres_loader import insert_to_postgres +if DATABASE == 'bigquery': + from db.loaders.bigquery_loader import insert_to_bigquery + from bigquery_utils.create_table import create_tables_bigquery +if DATABASE == 'snowflake': + from db.loaders.snowflake_loader import insert_to_snowflake + + +# create tables if don't exist +try: + db = DBConnection(DATABASE) + if DATABASE == 'pg': + create_tables_postgres(db) + if DATABASE == 'clickhouse': + create_tables_clickhouse(db) + if DATABASE == 'snowflake': + create_tables_snowflake(db) + if DATABASE == 'bigquery': + create_tables_bigquery() + if DATABASE == 'redshift': + create_tables_redshift(db) + db.engine.dispose() + db = None +except Exception as e: + print(repr(e)) + print("Please create the tables with scripts provided in " + "'/sql/{DATABASE}_sessions.sql' and '/sql/{DATABASE}_events.sql'") + + +def insert_batch(db: DBConnection, batch, table, level='normal'): + + if len(batch) == 0: + return + df = get_df_from_batch(batch, level=level) + + if db.config == 'redshift': + transit_insert_to_redshift(db=db, df=df, table=table) + return + + if db.config == 'clickhouse': + insert_to_clickhouse(db=db, df=df, table=table) + + if db.config == 'pg': + insert_to_postgres(db=db, df=df, table=table) + + if db.config == 'bigquery': + insert_to_bigquery(df=df, table=table) + + if db.config == 'snowflake': + insert_to_snowflake(db=db, df=df, table=table) + diff --git a/ee/connectors/handler.py b/ee/connectors/handler.py new file mode 100644 index 000000000..5167c7800 --- /dev/null +++ b/ee/connectors/handler.py @@ -0,0 +1,647 @@ +from typing import Optional, Union + +from db.models import Event, DetailedEvent, Session +from msgcodec.messages import * + + +def handle_normal_message(message: Message) -> Optional[Event]: + + n = Event() + + if isinstance(message, ConnectionInformation): + n.connectioninformation_downlink = message.downlink + n.connectioninformation_type = message.type + return n + + if isinstance(message, ConsoleLog): + n.consolelog_level = message.level + n.consolelog_value = message.value + return n + + if isinstance(message, CustomEvent): + n.customevent_messageid = message.message_id + n.customevent_name = message.name + n.customevent_timestamp = message.timestamp + n.customevent_payload = message.payload + return n + + if isinstance(message, ErrorEvent): + n.errorevent_message = message.message + n.errorevent_messageid = message.message_id + n.errorevent_name = message.name + n.errorevent_payload = message.payload + n.errorevent_source = message.source + n.errorevent_timestamp = message.timestamp + return n + + if isinstance(message, JSException): + n.jsexception_name = message.name + n.jsexception_payload = message.payload + n.jsexception_message = message.message + return n + + if isinstance(message, Metadata): + n.metadata_key = message.key + n.metadata_value = message.value + return n + + if isinstance(message, MouseClick): + n.mouseclick_hesitationtime = message.hesitation_time + n.mouseclick_id = message.id + n.mouseclick_label = message.label + return n + + if isinstance(message, PageEvent): + n.pageevent_firstcontentfulpaint = message.first_contentful_paint + n.pageevent_firstpaint = message.first_paint + n.pageevent_messageid = message.message_id + n.pageevent_referrer = message.referrer + n.pageevent_speedindex = message.speed_index + n.pageevent_timestamp = message.timestamp + n.pageevent_url = message.url + return n + + if isinstance(message, PageRenderTiming): + n.pagerendertiming_timetointeractive = message.time_to_interactive + n.pagerendertiming_visuallycomplete = message.visually_complete + return n + + if isinstance(message, RawCustomEvent): + n.rawcustomevent_name = message.name + n.rawcustomevent_payload = message.payload + return n + + if isinstance(message, SetViewportSize): + n.setviewportsize_height = message.height + n.setviewportsize_width = message.width + return n + + if isinstance(message, Timestamp): + n.timestamp_timestamp = message.timestamp + return n + + if isinstance(message, UserAnonymousID): + n.user_anonymous_id = message.id + return n + + if isinstance(message, UserID): + n.user_id = message.id + return n + + if isinstance(message, IssueEvent): + n.issueevent_messageid = message.message_id + n.issueevent_timestamp = message.timestamp + n.issueevent_type = message.type + n.issueevent_contextstring = message.context_string + n.issueevent_context = message.context + n.issueevent_payload = message.payload + return n + + if isinstance(message, CustomIssue): + n.customissue_name = message.name + n.customissue_payload = message.payload + return n + + +def handle_session(n: Session, message: Message) -> Optional[Session]: + + if not n: + n = Session() + + if isinstance(message, SessionStart): + n.session_start_timestamp = message.timestamp + + n.user_uuid = message.user_uuid + n.user_agent = message.user_agent + n.user_os = message.user_os + n.user_os_version = message.user_os_version + n.user_browser = message.user_browser + n.user_browser_version = message.user_browser_version + n.user_device = message.user_device + n.user_device_type = message.user_device_type + n.user_device_memory_size = message.user_device_memory_size + n.user_device_heap_size = message.user_device_heap_size + n.user_country = message.user_country + return n + + if isinstance(message, SessionEnd): + n.session_end_timestamp = message.timestamp + try: + n.session_duration = n.session_end_timestamp - n.session_start_timestamp + except TypeError: + pass + return n + + if isinstance(message, ConnectionInformation): + n.connection_effective_bandwidth = message.downlink + n.connection_type = message.type + return n + + if isinstance(message, Metadata): + n.metadata_key = message.key + n.metadata_value = message.value + return n + + if isinstance(message, PageEvent): + n.referrer = message.referrer + n.first_contentful_paint = message.first_contentful_paint + n.speed_index = message.speed_index + n.timing_time_to_interactive = message.time_to_interactive + n.visually_complete = message.visually_complete + try: + n.urls_count += 1 + except TypeError: + n.urls_count = 1 + try: + n.urls.append(message.url) + except AttributeError: + n.urls = [message.url] + return n + + if isinstance(message, PerformanceTrackAggr): + n.avg_cpu = message.avg_cpu + n.avg_fps = message.avg_fps + n.max_cpu = message.max_cpu + n.max_fps = message.max_fps + n.max_total_js_heap_size = message.max_total_js_heap_size + n.max_used_js_heap_size = message.max_used_js_heap_size + return n + + if isinstance(message, UserID): + n.user_id = message.id + return n + + if isinstance(message, UserAnonymousID): + n.user_anonymous_id = message.id + return n + + if isinstance(message, JSException): + try: + n.js_exceptions_count += 1 + except TypeError: + n.js_exceptions_count = 1 + return n + + if isinstance(message, LongTask): + try: + n.long_tasks_total_duration += message.duration + except TypeError: + n.long_tasks_total_duration = message.duration + + try: + if n.long_tasks_max_duration > message.duration: + n.long_tasks_max_duration = message.duration + except TypeError: + n.long_tasks_max_duration = message.duration + + try: + n.long_tasks_count += 1 + except TypeError: + n.long_tasks_count = 1 + return n + + if isinstance(message, InputEvent): + try: + n.inputs_count += 1 + except TypeError: + n.inputs_count = 1 + return n + + if isinstance(message, MouseClick): + try: + n.inputs_count += 1 + except TypeError: + n.inputs_count = 1 + return n + + if isinstance(message, IssueEvent): + try: + n.issues_count += 1 + except TypeError: + n.issues_count = 1 + + + n.inputs_count = 1 + return n + + if isinstance(message, MouseClick): + try: + n.inputs_count += 1 + except TypeError: + n.inputs_count = 1 + return n + + if isinstance(message, IssueEvent): + try: + n.issues_count += 1 + except TypeError: + n.issues_count = 1 + + try: + n.issues.append(message.type) + except AttributeError: + n.issues = [message.type] + return n + + +def handle_message(message: Message) -> Optional[DetailedEvent]: + n = DetailedEvent() + + if isinstance(message, SessionEnd): + n.sessionend = True + n.sessionend_timestamp = message.timestamp + return n + + if isinstance(message, Timestamp): + n.timestamp_timestamp = message.timestamp + return n + + if isinstance(message, SessionDisconnect): + n.sessiondisconnect = True + n.sessiondisconnect_timestamp = message.timestamp + return n + + if isinstance(message, SessionStart): + n.sessionstart_trackerversion = message.tracker_version + n.sessionstart_revid = message.rev_id + n.sessionstart_timestamp = message.timestamp + n.sessionstart_useruuid = message.user_uuid + n.sessionstart_useragent = message.user_agent + n.sessionstart_useros = message.user_os + n.sessionstart_userosversion = message.user_os_version + n.sessionstart_userbrowser = message.user_browser + n.sessionstart_userbrowserversion = message.user_browser_version + n.sessionstart_userdevice = message.user_device + n.sessionstart_userdevicetype = message.user_device_type + n.sessionstart_userdevicememorysize = message.user_device_memory_size + n.sessionstart_userdeviceheapsize = message.user_device_heap_size + n.sessionstart_usercountry = message.user_country + return n + + if isinstance(message, SetViewportSize): + n.setviewportsize_width = message.width + n.setviewportsize_height = message.height + return n + + if isinstance(message, SetViewportScroll): + n.setviewportscroll_x = message.x + n.setviewportscroll_y = message.y + return n + + if isinstance(message, SetNodeScroll): + n.setnodescroll_id = message.id + n.setnodescroll_x = message.x + n.setnodescroll_y = message.y + return n + + if isinstance(message, ConsoleLog): + n.consolelog_level = message.level + n.consolelog_value = message.value + return n + + if isinstance(message, PageLoadTiming): + n.pageloadtiming_requeststart = message.request_start + n.pageloadtiming_responsestart = message.response_start + n.pageloadtiming_responseend = message.response_end + n.pageloadtiming_domcontentloadedeventstart = message.dom_content_loaded_event_start + n.pageloadtiming_domcontentloadedeventend = message.dom_content_loaded_event_end + n.pageloadtiming_loadeventstart = message.load_event_start + n.pageloadtiming_loadeventend = message.load_event_end + n.pageloadtiming_firstpaint = message.first_paint + n.pageloadtiming_firstcontentfulpaint = message.first_contentful_paint + return n + + if isinstance(message, PageRenderTiming): + n.pagerendertiming_speedindex = message.speed_index + n.pagerendertiming_visuallycomplete = message.visually_complete + n.pagerendertiming_timetointeractive = message.time_to_interactive + return n + + if isinstance(message, ResourceTiming): + n.resourcetiming_timestamp = message.timestamp + n.resourcetiming_duration = message.duration + n.resourcetiming_ttfb = message.ttfb + n.resourcetiming_headersize = message.header_size + n.resourcetiming_encodedbodysize = message.encoded_body_size + n.resourcetiming_decodedbodysize = message.decoded_body_size + n.resourcetiming_url = message.url + n.resourcetiming_initiator = message.initiator + return n + + if isinstance(message, JSException): + n.jsexception_name = message.name + n.jsexception_message = message.message + n.jsexception_payload = message.payload + return n + + if isinstance(message, RawErrorEvent): + n.rawerrorevent_timestamp = message.timestamp + n.rawerrorevent_source = message.source + n.rawerrorevent_name = message.name + n.rawerrorevent_message = message.message + n.rawerrorevent_payload = message.payload + return n + + if isinstance(message, RawCustomEvent): + n.rawcustomevent_name = message.name + n.rawcustomevent_payload = message.payload + return n + + if isinstance(message, UserID): + n.userid_id = message.id + return n + + if isinstance(message, UserAnonymousID): + n.useranonymousid_id = message.id + return n + + if isinstance(message, Metadata): + n.metadata_key = message.key + n.metadata_value = message.value + return n + + if isinstance(message, PerformanceTrack): + n.performancetrack_frames = message.frames + n.performancetrack_ticks = message.ticks + n.performancetrack_totaljsheapsize = message.total_js_heap_size + n.performancetrack_usedjsheapsize = message.used_js_heap_size + return n + + if isinstance(message, PerformanceTrackAggr): + n.performancetrackaggr_timestampstart = message.timestamp_start + n.performancetrackaggr_timestampend = message.timestamp_end + n.performancetrackaggr_minfps = message.min_fps + n.performancetrackaggr_avgfps = message.avg_fps + n.performancetrackaggr_maxfps = message.max_fps + n.performancetrackaggr_mincpu = message.min_cpu + n.performancetrackaggr_avgcpu = message.avg_cpu + n.performancetrackaggr_maxcpu = message.max_cpu + n.performancetrackaggr_mintotaljsheapsize = message.min_total_js_heap_size + n.performancetrackaggr_avgtotaljsheapsize = message.avg_total_js_heap_size + n.performancetrackaggr_maxtotaljsheapsize = message.max_total_js_heap_size + n.performancetrackaggr_minusedjsheapsize = message.min_used_js_heap_size + n.performancetrackaggr_avgusedjsheapsize = message.avg_used_js_heap_size + n.performancetrackaggr_maxusedjsheapsize = message.max_used_js_heap_size + return n + + if isinstance(message, ConnectionInformation): + n.connectioninformation_downlink = message.downlink + n.connectioninformation_type = message.type + return n + + if isinstance(message, PageEvent): + n.pageevent_messageid = message.message_id + n.pageevent_timestamp = message.timestamp + n.pageevent_url = message.url + n.pageevent_referrer = message.referrer + n.pageevent_loaded = message.loaded + n.pageevent_requeststart = message.request_start + n.pageevent_responsestart = message.response_start + n.pageevent_responseend = message.response_end + n.pageevent_domcontentloadedeventstart = message.dom_content_loaded_event_start + n.pageevent_domcontentloadedeventend = message.dom_content_loaded_event_end + n.pageevent_loadeventstart = message.load_event_start + n.pageevent_loadeventend = message.load_event_end + n.pageevent_firstpaint = message.first_paint + n.pageevent_firstcontentfulpaint = message.first_contentful_paint + n.pageevent_speedindex = message.speed_index + return n + + if isinstance(message, InputEvent): + n.inputevent_messageid = message.message_id + n.inputevent_timestamp = message.timestamp + n.inputevent_value = message.value + n.inputevent_valuemasked = message.value_masked + n.inputevent_label = message.label + return n + + if isinstance(message, ClickEvent): + n.clickevent_messageid = message.message_id + n.clickevent_timestamp = message.timestamp + n.clickevent_hesitationtime = message.hesitation_time + n.clickevent_label = message.label + return n + + if isinstance(message, ErrorEvent): + n.errorevent_messageid = message.message_id + n.errorevent_timestamp = message.timestamp + n.errorevent_source = message.source + n.errorevent_name = message.name + n.errorevent_message = message.message + n.errorevent_payload = message.payload + return n + + if isinstance(message, ResourceEvent): + n.resourceevent_messageid = message.message_id + n.resourceevent_timestamp = message.timestamp + n.resourceevent_duration = message.duration + n.resourceevent_ttfb = message.ttfb + n.resourceevent_headersize = message.header_size + n.resourceevent_encodedbodysize = message.encoded_body_size + n.resourceevent_decodedbodysize = message.decoded_body_size + n.resourceevent_url = message.url + n.resourceevent_type = message.type + n.resourceevent_success = message.success + n.resourceevent_method = message.method + n.resourceevent_status = message.status + return n + + if isinstance(message, CustomEvent): + n.customevent_messageid = message.message_id + n.customevent_timestamp = message.timestamp + n.customevent_name = message.name + n.customevent_payload = message.payload + return n + + # if isinstance(message, CreateDocument): + # n.createdocument = True + # return n + # + # if isinstance(message, CreateElementNode): + # n.createelementnode_id = message.id + # if isinstance(message.parent_id, tuple): + # n.createelementnode_parentid = message.parent_id[0] + # else: + # n.createelementnode_parentid = message.parent_id + # return n + + # if isinstance(message, CSSInsertRule): + # n.cssinsertrule_stylesheetid = message.id + # n.cssinsertrule_rule = message.rule + # n.cssinsertrule_index = message.index + # return n + # + # if isinstance(message, CSSDeleteRule): + # n.cssdeleterule_stylesheetid = message.id + # n.cssdeleterule_index = message.index + # return n + + if isinstance(message, Fetch): + n.fetch_method = message.method + n.fetch_url = message.url + n.fetch_request = message.request + n.fetch_status = message.status + n.fetch_timestamp = message.timestamp + n.fetch_duration = message.duration + return n + + if isinstance(message, Profiler): + n.profiler_name = message.name + n.profiler_duration = message.duration + n.profiler_args = message.args + n.profiler_result = message.result + return n + + if isinstance(message, GraphQL): + n.graphql_operationkind = message.operation_kind + n.graphql_operationname = message.operation_name + n.graphql_variables = message.variables + n.graphql_response = message.response + return n + + if isinstance(message, GraphQLEvent): + n.graphqlevent_messageid = message.message_id + n.graphqlevent_timestamp = message.timestamp + n.graphqlevent_name = message.name + return n + + if isinstance(message, DomDrop): + n.domdrop_timestamp = message.timestamp + return n + + if isinstance(message, MouseClick): + n.mouseclick_id = message.id + n.mouseclick_hesitationtime = message.hesitation_time + n.mouseclick_label = message.label + return n + + if isinstance(message, SetPageLocation): + n.setpagelocation_url = message.url + n.setpagelocation_referrer = message.referrer + n.setpagelocation_navigationstart = message.navigation_start + return n + + if isinstance(message, MouseMove): + n.mousemove_x = message.x + n.mousemove_y = message.y + return n + + if isinstance(message, LongTask): + n.longtasks_timestamp = message.timestamp + n.longtasks_duration = message.duration + n.longtask_context = message.context + n.longtask_containertype = message.container_type + n.longtasks_containersrc = message.container_src + n.longtasks_containerid = message.container_id + n.longtasks_containername = message.container_name + return n + + if isinstance(message, SetNodeURLBasedAttribute): + n.setnodeurlbasedattribute_id = message.id + n.setnodeurlbasedattribute_name = message.name + n.setnodeurlbasedattribute_value = message.value + n.setnodeurlbasedattribute_baseurl = message.base_url + return n + + if isinstance(message, SetStyleData): + n.setstyledata_id = message.id + n.setstyledata_data = message.data + n.setstyledata_baseurl = message.base_url + return n + + if isinstance(message, IssueEvent): + n.issueevent_messageid = message.message_id + n.issueevent_timestamp = message.timestamp + n.issueevent_type = message.type + n.issueevent_contextstring = message.context_string + n.issueevent_context = message.context + n.issueevent_payload = message.payload + return n + + if isinstance(message, TechnicalInfo): + n.technicalinfo_type = message.type + n.technicalinfo_value = message.value + return n + + if isinstance(message, CustomIssue): + n.customissue_name = message.name + n.customissue_payload = message.payload + return n + + if isinstance(message, PageClose): + n.pageclose = True + return n + + if isinstance(message, IOSSessionStart): + n.iossessionstart_timestamp = message.timestamp + n.iossessionstart_projectid = message.project_id + n.iossessionstart_trackerversion = message.tracker_version + n.iossessionstart_revid = message.rev_id + n.iossessionstart_useruuid = message.user_uuid + n.iossessionstart_useros = message.user_os + n.iossessionstart_userosversion = message.user_os_version + n.iossessionstart_userdevice = message.user_device + n.iossessionstart_userdevicetype = message.user_device_type + n.iossessionstart_usercountry = message.user_country + return n + + if isinstance(message, IOSSessionEnd): + n.iossessionend_timestamp = message.timestamp + return n + + if isinstance(message, IOSMetadata): + n.iosmetadata_timestamp = message.timestamp + n.iosmetadata_length = message.length + n.iosmetadata_key = message.key + n.iosmetadata_value = message.value + return n + + if isinstance(message, IOSUserID): + n.iosuserid_timestamp = message.timestamp + n.iosuserid_length = message.length + n.iosuserid_value = message.value + return n + + if isinstance(message, IOSUserAnonymousID): + n.iosuseranonymousid_timestamp = message.timestamp + n.iosuseranonymousid_length = message.length + n.iosuseranonymousid_value = message.value + return n + + if isinstance(message, IOSScreenLeave): + n.iosscreenleave_timestamp = message.timestamp + n.iosscreenleave_length = message.length + n.iosscreenleave_title = message.title + n.iosscreenleave_viewname = message.view_name + return n + + if isinstance(message, IOSLog): + n.ioslog_timestamp = message.timestamp + n.ioslog_length = message.length + n.ioslog_severity = message.severity + n.ioslog_content = message.content + return n + + if isinstance(message, IOSInternalError): + n.iosinternalerror_timestamp = message.timestamp + n.iosinternalerror_length = message.length + n.iosinternalerror_content = message.content + return n + + if isinstance(message, IOSPerformanceAggregated): + n.iosperformanceaggregated_timestampstart = message.timestamp_start + n.iosperformanceaggregated_timestampend = message.timestamp_end + n.iosperformanceaggregated_minfps = message.min_fps + n.iosperformanceaggregated_avgfps = message.avg_fps + n.iosperformanceaggregated_maxfps = message.max_fps + n.iosperformanceaggregated_mincpu = message.min_cpu + n.iosperformanceaggregated_avgcpu = message.avg_cpu + n.iosperformanceaggregated_maxcpu = message.max_cpu + n.iosperformanceaggregated_minmemory = message.min_memory + n.iosperformanceaggregated_avgmemory = message.avg_memory + n.iosperformanceaggregated_maxmemory = message.max_memory + n.iosperformanceaggregated_minbattery = message.min_battery + n.iosperformanceaggregated_avgbattery = message.avg_battery + n.iosperformanceaggregated_maxbattery = message.max_battery + return n + return None diff --git a/ee/connectors/main.py b/ee/connectors/main.py new file mode 100644 index 000000000..57349f6e9 --- /dev/null +++ b/ee/connectors/main.py @@ -0,0 +1,121 @@ +import os +from kafka import KafkaConsumer +from datetime import datetime +from collections import defaultdict + +from msgcodec.codec import MessageCodec +from msgcodec.messages import SessionEnd +from db.api import DBConnection +from db.models import events_detailed_table_name, events_table_name, sessions_table_name, conf +from db.writer import insert_batch +from handler import handle_message, handle_normal_message, handle_session + +DATABASE = os.environ['DATABASE_NAME'] +LEVEL = conf[DATABASE]['level'] + +db = DBConnection(DATABASE) + +if LEVEL == 'detailed': + table_name = events_detailed_table_name +elif LEVEL == 'normal': + table_name = events_table_name + + +def main(): + batch_size = 4000 + sessions_batch_size = 400 + batch = [] + sessions = defaultdict(lambda: None) + sessions_batch = [] + + codec = MessageCodec() + consumer = KafkaConsumer(security_protocol="SSL", + bootstrap_servers=[os.environ['KAFKA_SERVER_1'], + os.environ['KAFKA_SERVER_2']], + group_id=f"connector_{DATABASE}", + auto_offset_reset="earliest", + enable_auto_commit=False) + + consumer.subscribe(topics=["events", "messages"]) + print("Kafka consumer subscribed") + for msg in consumer: + message = codec.decode(msg.value) + if message is None: + print('-') + continue + + if LEVEL == 'detailed': + n = handle_message(message) + elif LEVEL == 'normal': + n = handle_normal_message(message) + + session_id = codec.decode_key(msg.key) + sessions[session_id] = handle_session(sessions[session_id], message) + if sessions[session_id]: + sessions[session_id].sessionid = session_id + + # put in a batch for insertion if received a SessionEnd + if isinstance(message, SessionEnd): + if sessions[session_id]: + sessions_batch.append(sessions[session_id]) + + # try to insert sessions + if len(sessions_batch) >= sessions_batch_size: + attempt_session_insert(sessions_batch) + for s in sessions_batch: + try: + del sessions[s.sessionid] + except KeyError as e: + print(repr(e)) + sessions_batch = [] + + if n: + n.sessionid = session_id + n.received_at = int(datetime.now().timestamp() * 1000) + n.batch_order_number = len(batch) + batch.append(n) + else: + continue + + # insert a batch of events + if len(batch) >= batch_size: + attempt_batch_insert(batch) + batch = [] + consumer.commit() + print("sessions in cache:", len(sessions)) + + +def attempt_session_insert(sess_batch): + if sess_batch: + try: + print("inserting sessions...") + insert_batch(db, sess_batch, table=sessions_table_name, level='sessions') + print("inserted sessions succesfully") + except TypeError as e: + print("Type conversion error") + print(repr(e)) + except ValueError as e: + print("Message value could not be processed or inserted correctly") + print(repr(e)) + except Exception as e: + print(repr(e)) + + +def attempt_batch_insert(batch): + # insert a batch + try: + print("inserting...") + insert_batch(db=db, batch=batch, table=table_name, level=LEVEL) + print("inserted succesfully") + except TypeError as e: + print("Type conversion error") + print(repr(e)) + except ValueError as e: + print("Message value could not be processed or inserted correctly") + print(repr(e)) + except Exception as e: + print(repr(e)) + + +if __name__ == '__main__': + main() diff --git a/ee/connectors/msgcodec/codec.py b/ee/connectors/msgcodec/codec.py new file mode 100644 index 000000000..18f074a33 --- /dev/null +++ b/ee/connectors/msgcodec/codec.py @@ -0,0 +1,670 @@ +import io + +from msgcodec.messages import * + + +class Codec: + """ + Implements encode/decode primitives + """ + + @staticmethod + def read_boolean(reader: io.BytesIO): + b = reader.read(1) + return b == 1 + + @staticmethod + def read_uint(reader: io.BytesIO): + """ + The ending "big" doesn't play any role here, + since we're dealing with data per one byte + """ + x = 0 # the result + s = 0 # the shift (our result is big-ending) + i = 0 # n of byte (max 9 for uint64) + while True: + b = reader.read(1) + num = int.from_bytes(b, "big", signed=False) + # print(i, x) + + if num < 0x80: + if i > 9 | i == 9 & num > 1: + raise OverflowError() + return int(x | num << s) + x |= (num & 0x7f) << s + s += 7 + i += 1 + + @staticmethod + def read_int(reader: io.BytesIO) -> int: + """ + ux, err := ReadUint(reader) + x := int64(ux >> 1) + if err != nil { + return x, err + } + if ux&1 != 0 { + x = ^x + } + return x, err + """ + ux = Codec.read_uint(reader) + x = int(ux >> 1) + + if ux & 1 != 0: + x = - x - 1 + return x + + @staticmethod + def read_string(reader: io.BytesIO) -> str: + length = Codec.read_uint(reader) + s = reader.read(length) + try: + return s.decode("utf-8", errors="replace").replace("\x00", "\uFFFD") + except UnicodeDecodeError: + return None + + +class MessageCodec(Codec): + + def encode(self, m: Message) -> bytes: + ... + + def decode(self, b: bytes) -> Message: + reader = io.BytesIO(b) + message_id = self.read_message_id(reader) + + if message_id == 0: + return Timestamp( + timestamp=self.read_uint(reader) + ) + if message_id == 1: + return SessionStart( + timestamp=self.read_uint(reader), + project_id=self.read_uint(reader), + tracker_version=self.read_string(reader), + rev_id=self.read_string(reader), + user_uuid=self.read_string(reader), + user_agent=self.read_string(reader), + user_os=self.read_string(reader), + user_os_version=self.read_string(reader), + user_browser=self.read_string(reader), + user_browser_version=self.read_string(reader), + user_device=self.read_string(reader), + user_device_type=self.read_string(reader), + user_device_memory_size=self.read_uint(reader), + user_device_heap_size=self.read_uint(reader), + user_country=self.read_string(reader) + ) + + if message_id == 2: + return SessionDisconnect( + timestamp=self.read_uint(reader) + ) + + if message_id == 3: + return SessionEnd( + timestamp=self.read_uint(reader) + ) + + if message_id == 4: + return SetPageLocation( + url=self.read_string(reader), + referrer=self.read_string(reader), + navigation_start=self.read_uint(reader) + ) + + if message_id == 5: + return SetViewportSize( + width=self.read_uint(reader), + height=self.read_uint(reader) + ) + + if message_id == 6: + return SetViewportScroll( + x=self.read_int(reader), + y=self.read_int(reader) + ) + + if message_id == 7: + return CreateDocument() + + if message_id == 8: + return CreateElementNode( + id=self.read_uint(reader), + parent_id=self.read_uint(reader), + index=self.read_uint(reader), + tag=self.read_string(reader), + svg=self.read_boolean(reader), + ) + + if message_id == 9: + return CreateTextNode( + id=self.read_uint(reader), + parent_id=self.read_uint(reader), + index=self.read_uint(reader) + ) + + if message_id == 10: + return MoveNode( + id=self.read_uint(reader), + parent_id=self.read_uint(reader), + index=self.read_uint(reader) + ) + + if message_id == 11: + return RemoveNode( + id=self.read_uint(reader) + ) + + if message_id == 12: + return SetNodeAttribute( + id=self.read_uint(reader), + name=self.read_string(reader), + value=self.read_string(reader) + ) + + if message_id == 13: + return RemoveNodeAttribute( + id=self.read_uint(reader), + name=self.read_string(reader) + ) + + if message_id == 14: + return SetNodeData( + id=self.read_uint(reader), + data=self.read_string(reader) + ) + + if message_id == 15: + return SetCSSData( + id=self.read_uint(reader), + data=self.read_string(reader) + ) + + if message_id == 16: + return SetNodeScroll( + id=self.read_uint(reader), + x=self.read_int(reader), + y=self.read_int(reader), + ) + + if message_id == 17: + return SetInputTarget( + id=self.read_uint(reader), + label=self.read_string(reader) + ) + + if message_id == 18: + return SetInputValue( + id=self.read_uint(reader), + value=self.read_string(reader), + mask=self.read_int(reader), + ) + + if message_id == 19: + return SetInputChecked( + id=self.read_uint(reader), + checked=self.read_boolean(reader) + ) + + if message_id == 20: + return MouseMove( + x=self.read_uint(reader), + y=self.read_uint(reader) + ) + + if message_id == 21: + return MouseClick( + id=self.read_uint(reader), + hesitation_time=self.read_uint(reader), + label=self.read_string(reader) + ) + + if message_id == 22: + return ConsoleLog( + level=self.read_string(reader), + value=self.read_string(reader) + ) + + if message_id == 23: + return PageLoadTiming( + request_start=self.read_uint(reader), + response_start=self.read_uint(reader), + response_end=self.read_uint(reader), + dom_content_loaded_event_start=self.read_uint(reader), + dom_content_loaded_event_end=self.read_uint(reader), + load_event_start=self.read_uint(reader), + load_event_end=self.read_uint(reader), + first_paint=self.read_uint(reader), + first_contentful_paint=self.read_uint(reader) + ) + + if message_id == 24: + return PageRenderTiming( + speed_index=self.read_uint(reader), + visually_complete=self.read_uint(reader), + time_to_interactive=self.read_uint(reader), + ) + + if message_id == 25: + return JSException( + name=self.read_string(reader), + message=self.read_string(reader), + payload=self.read_string(reader) + ) + + if message_id == 26: + return RawErrorEvent( + timestamp=self.read_uint(reader), + source=self.read_string(reader), + name=self.read_string(reader), + message=self.read_string(reader), + payload=self.read_string(reader) + ) + + if message_id == 27: + return RawCustomEvent( + name=self.read_string(reader), + payload=self.read_string(reader) + ) + + if message_id == 28: + return UserID( + id=self.read_string(reader) + ) + + if message_id == 29: + return UserAnonymousID( + id=self.read_string(reader) + ) + + if message_id == 30: + return Metadata( + key=self.read_string(reader), + value=self.read_string(reader) + ) + + if message_id == 31: + return PageEvent( + message_id=self.read_uint(reader), + timestamp=self.read_uint(reader), + url=self.read_string(reader), + referrer=self.read_string(reader), + loaded=self.read_boolean(reader), + request_start=self.read_uint(reader), + response_start=self.read_uint(reader), + response_end=self.read_uint(reader), + dom_content_loaded_event_start=self.read_uint(reader), + dom_content_loaded_event_end=self.read_uint(reader), + load_event_start=self.read_uint(reader), + load_event_end=self.read_uint(reader), + first_paint=self.read_uint(reader), + first_contentful_paint=self.read_uint(reader), + speed_index=self.read_uint(reader), + visually_complete=self.read_uint(reader), + time_to_interactive=self.read_uint(reader) + ) + + if message_id == 32: + return InputEvent( + message_id=self.read_uint(reader), + timestamp=self.read_uint(reader), + value=self.read_string(reader), + value_masked=self.read_boolean(reader), + label=self.read_string(reader), + ) + + if message_id == 33: + return ClickEvent( + message_id=self.read_uint(reader), + timestamp=self.read_uint(reader), + hesitation_time=self.read_uint(reader), + label=self.read_string(reader) + ) + + if message_id == 34: + return ErrorEvent( + message_id=self.read_uint(reader), + timestamp=self.read_uint(reader), + source=self.read_string(reader), + name=self.read_string(reader), + message=self.read_string(reader), + payload=self.read_string(reader) + ) + + if message_id == 35: + + message_id = self.read_uint(reader) + ts = self.read_uint(reader) + if ts > 9999999999999: + ts = None + return ResourceEvent( + message_id=message_id, + timestamp=ts, + duration=self.read_uint(reader), + ttfb=self.read_uint(reader), + header_size=self.read_uint(reader), + encoded_body_size=self.read_uint(reader), + decoded_body_size=self.read_uint(reader), + url=self.read_string(reader), + type=self.read_string(reader), + success=self.read_boolean(reader), + method=self.read_string(reader), + status=self.read_uint(reader) + ) + + if message_id == 36: + return CustomEvent( + message_id=self.read_uint(reader), + timestamp=self.read_uint(reader), + name=self.read_string(reader), + payload=self.read_string(reader) + ) + + if message_id == 37: + return CSSInsertRule( + id=self.read_uint(reader), + rule=self.read_string(reader), + index=self.read_uint(reader) + ) + + if message_id == 38: + return CSSDeleteRule( + id=self.read_uint(reader), + index=self.read_uint(reader) + ) + + if message_id == 39: + return Fetch( + method=self.read_string(reader), + url=self.read_string(reader), + request=self.read_string(reader), + response=self.read_string(reader), + status=self.read_uint(reader), + timestamp=self.read_uint(reader), + duration=self.read_uint(reader) + ) + + if message_id == 40: + return Profiler( + name=self.read_string(reader), + duration=self.read_uint(reader), + args=self.read_string(reader), + result=self.read_string(reader) + ) + + if message_id == 41: + return OTable( + key=self.read_string(reader), + value=self.read_string(reader) + ) + + if message_id == 42: + return StateAction( + type=self.read_string(reader) + ) + + if message_id == 43: + return StateActionEvent( + message_id=self.read_uint(reader), + timestamp=self.read_uint(reader), + type=self.read_string(reader) + ) + + if message_id == 44: + return Redux( + action=self.read_string(reader), + state=self.read_string(reader), + duration=self.read_uint(reader) + ) + + if message_id == 45: + return Vuex( + mutation=self.read_string(reader), + state=self.read_string(reader), + ) + + if message_id == 46: + return MobX( + type=self.read_string(reader), + payload=self.read_string(reader), + ) + + if message_id == 47: + return NgRx( + action=self.read_string(reader), + state=self.read_string(reader), + duration=self.read_uint(reader) + ) + + if message_id == 48: + return GraphQL( + operation_kind=self.read_string(reader), + operation_name=self.read_string(reader), + variables=self.read_string(reader), + response=self.read_string(reader) + ) + + if message_id == 49: + return PerformanceTrack( + frames=self.read_int(reader), + ticks=self.read_int(reader), + total_js_heap_size=self.read_uint(reader), + used_js_heap_size=self.read_uint(reader) + ) + + if message_id == 50: + return GraphQLEvent( + message_id=self.read_uint(reader), + timestamp=self.read_uint(reader), + name=self.read_string(reader) + ) + + if message_id == 52: + return DomDrop( + timestamp=self.read_uint(reader) + ) + + if message_id == 53: + return ResourceTiming( + timestamp=self.read_uint(reader), + duration=self.read_uint(reader), + ttfb=self.read_uint(reader), + header_size=self.read_uint(reader), + encoded_body_size=self.read_uint(reader), + decoded_body_size=self.read_uint(reader), + url=self.read_string(reader), + initiator=self.read_string(reader) + ) + + if message_id == 54: + return ConnectionInformation( + downlink=self.read_uint(reader), + type=self.read_string(reader) + ) + + if message_id == 55: + return SetPageVisibility( + hidden=self.read_boolean(reader) + ) + + if message_id == 56: + return PerformanceTrackAggr( + timestamp_start=self.read_uint(reader), + timestamp_end=self.read_uint(reader), + min_fps=self.read_uint(reader), + avg_fps=self.read_uint(reader), + max_fps=self.read_uint(reader), + min_cpu=self.read_uint(reader), + avg_cpu=self.read_uint(reader), + max_cpu=self.read_uint(reader), + min_total_js_heap_size=self.read_uint(reader), + avg_total_js_heap_size=self.read_uint(reader), + max_total_js_heap_size=self.read_uint(reader), + min_used_js_heap_size=self.read_uint(reader), + avg_used_js_heap_size=self.read_uint(reader), + max_used_js_heap_size=self.read_uint(reader) + ) + + if message_id == 59: + return LongTask( + timestamp=self.read_uint(reader), + duration=self.read_uint(reader), + context=self.read_uint(reader), + container_type=self.read_uint(reader), + container_src=self.read_string(reader), + container_id=self.read_string(reader), + container_name=self.read_string(reader) + ) + + if message_id == 60: + return SetNodeURLBasedAttribute( + id=self.read_uint(reader), + name=self.read_string(reader), + value=self.read_string(reader), + base_url=self.read_string(reader) + ) + + if message_id == 61: + return SetStyleData( + id=self.read_uint(reader), + data=self.read_string(reader), + base_url=self.read_string(reader) + ) + + if message_id == 62: + return IssueEvent( + message_id=self.read_uint(reader), + timestamp=self.read_uint(reader), + type=self.read_string(reader), + context_string=self.read_string(reader), + context=self.read_string(reader), + payload=self.read_string(reader) + ) + + if message_id == 63: + return TechnicalInfo( + type=self.read_string(reader), + value=self.read_string(reader) + ) + + if message_id == 64: + return CustomIssue( + name=self.read_string(reader), + payload=self.read_string(reader) + ) + + if message_id == 65: + return PageClose() + + if message_id == 90: + return IOSSessionStart( + timestamp=self.read_uint(reader), + project_id=self.read_uint(reader), + tracker_version=self.read_string(reader), + rev_id=self.read_string(reader), + user_uuid=self.read_string(reader), + user_os=self.read_string(reader), + user_os_version=self.read_string(reader), + user_device=self.read_string(reader), + user_device_type=self.read_string(reader), + user_country=self.read_string(reader) + ) + + if message_id == 91: + return IOSSessionEnd( + timestamp=self.read_uint(reader) + ) + + if message_id == 92: + return IOSMetadata( + timestamp=self.read_uint(reader), + length=self.read_uint(reader), + key=self.read_string(reader), + value=self.read_string(reader) + ) + + if message_id == 94: + return IOSUserID( + timestamp=self.read_uint(reader), + length=self.read_uint(reader), + value=self.read_string(reader) + ) + + if message_id == 95: + return IOSUserAnonymousID( + timestamp=self.read_uint(reader), + length=self.read_uint(reader), + value=self.read_string(reader) + ) + + if message_id == 99: + return IOSScreenLeave( + timestamp=self.read_uint(reader), + length=self.read_uint(reader), + title=self.read_string(reader), + view_name=self.read_string(reader) + ) + + if message_id == 103: + return IOSLog( + timestamp=self.read_uint(reader), + length=self.read_uint(reader), + severity=self.read_string(reader), + content=self.read_string(reader) + ) + + if message_id == 104: + return IOSInternalError( + timestamp=self.read_uint(reader), + length=self.read_uint(reader), + content=self.read_string(reader) + ) + + if message_id == 110: + return IOSPerformanceAggregated( + timestamp_start=self.read_uint(reader), + timestamp_end=self.read_uint(reader), + min_fps=self.read_uint(reader), + avg_fps=self.read_uint(reader), + max_fps=self.read_uint(reader), + min_cpu=self.read_uint(reader), + avg_cpu=self.read_uint(reader), + max_cpu=self.read_uint(reader), + min_memory=self.read_uint(reader), + avg_memory=self.read_uint(reader), + max_memory=self.read_uint(reader), + min_battery=self.read_uint(reader), + avg_battery=self.read_uint(reader), + max_battery=self.read_uint(reader) + ) + + def read_message_id(self, reader: io.BytesIO) -> int: + """ + Read and return the first byte where the message id is encoded + """ + id_ = self.read_uint(reader) + return id_ + + @staticmethod + def check_message_id(b: bytes) -> int: + """ + todo: make it static and without reader. It's just the first byte + Read and return the first byte where the message id is encoded + """ + reader = io.BytesIO(b) + id_ = Codec.read_uint(reader) + + return id_ + + @staticmethod + def decode_key(b) -> int: + """ + Decode the message key (encoded with little endian) + """ + try: + decoded = int.from_bytes(b, "little", signed=False) + except Exception as e: + raise UnicodeDecodeError(f"Error while decoding message key (SessionID) from {b}\n{e}") + return decoded diff --git a/ee/connectors/msgcodec/messages.py b/ee/connectors/msgcodec/messages.py new file mode 100644 index 000000000..c6e53b445 --- /dev/null +++ b/ee/connectors/msgcodec/messages.py @@ -0,0 +1,752 @@ +""" +Representations of Kafka messages +""" +from abc import ABC + + +class Message(ABC): + pass + + +class Timestamp(Message): + __id__ = 0 + + def __init__(self, timestamp): + self.timestamp = timestamp + + +class SessionStart(Message): + __id__ = 1 + + def __init__(self, timestamp, project_id, tracker_version, rev_id, user_uuid, + user_agent, user_os, user_os_version, user_browser, user_browser_version, + user_device, user_device_type, user_device_memory_size, user_device_heap_size, + user_country): + self.timestamp = timestamp + self.project_id = project_id + self.tracker_version = tracker_version + self.rev_id = rev_id + self.user_uuid = user_uuid + self.user_agent = user_agent + self.user_os = user_os + self.user_os_version = user_os_version + self.user_browser = user_browser + self.user_browser_version = user_browser_version + self.user_device = user_device + self.user_device_type = user_device_type + self.user_device_memory_size = user_device_memory_size + self.user_device_heap_size = user_device_heap_size + self.user_country = user_country + + +class SessionDisconnect(Message): + __id__ = 2 + + def __init__(self, timestamp): + self.timestamp = timestamp + + +class SessionEnd(Message): + __id__ = 3 + __name__ = 'SessionEnd' + + def __init__(self, timestamp): + self.timestamp = timestamp + + +class SetPageLocation(Message): + __id__ = 4 + + def __init__(self, url, referrer, navigation_start): + self.url = url + self.referrer = referrer + self.navigation_start = navigation_start + + +class SetViewportSize(Message): + __id__ = 5 + + def __init__(self, width, height): + self.width = width + self.height = height + + +class SetViewportScroll(Message): + __id__ = 6 + + def __init__(self, x, y): + self.x = x + self.y = y + + +class CreateDocument(Message): + __id__ = 7 + + +class CreateElementNode(Message): + __id__ = 8 + + def __init__(self, id, parent_id, index, tag, svg): + self.id = id + self.parent_id = parent_id, + self.index = index + self.tag = tag + self.svg = svg + + +class CreateTextNode(Message): + __id__ = 9 + + def __init__(self, id, parent_id, index): + self.id = id + self.parent_id = parent_id + self.index = index + + +class MoveNode(Message): + __id__ = 10 + + def __init__(self, id, parent_id, index): + self.id = id + self.parent_id = parent_id + self.index = index + + +class RemoveNode(Message): + __id__ = 11 + + def __init__(self, id): + self.id = id + + +class SetNodeAttribute(Message): + __id__ = 12 + + def __init__(self, id, name: str, value: str): + self.id = id + self.name = name + self.value = value + + +class RemoveNodeAttribute(Message): + __id__ = 13 + + def __init__(self, id, name: str): + self.id = id + self.name = name + + +class SetNodeData(Message): + __id__ = 14 + + def __init__(self, id, data: str): + self.id = id + self.data = data + + +class SetCSSData(Message): + __id__ = 15 + + def __init__(self, id, data: str): + self.id = id + self.data = data + + +class SetNodeScroll(Message): + __id__ = 16 + + def __init__(self, id, x: int, y: int): + self.id = id + self.x = x + self.y = y + + +class SetInputTarget(Message): + __id__ = 17 + + def __init__(self, id, label: str): + self.id = id + self.label = label + + +class SetInputValue(Message): + __id__ = 18 + + def __init__(self, id, value: str, mask: int): + self.id = id + self.value = value + self.mask = mask + + +class SetInputChecked(Message): + __id__ = 19 + + def __init__(self, id, checked: bool): + self.id = id + self.checked = checked + + +class MouseMove(Message): + __id__ = 20 + + def __init__(self, x, y): + self.x = x + self.y = y + + +class MouseClick(Message): + __id__ = 21 + + def __init__(self, id, hesitation_time, label: str): + self.id = id + self.hesitation_time = hesitation_time + self.label = label + + +class ConsoleLog(Message): + __id__ = 22 + + def __init__(self, level: str, value: str): + self.level = level + self.value = value + + +class PageLoadTiming(Message): + __id__ = 23 + + def __init__(self, request_start, response_start, response_end, dom_content_loaded_event_start, + dom_content_loaded_event_end, load_event_start, load_event_end, + first_paint, first_contentful_paint): + self.request_start = request_start + self.response_start = response_start + self.response_end = response_end + self.dom_content_loaded_event_start = dom_content_loaded_event_start + self.dom_content_loaded_event_end = dom_content_loaded_event_end + self.load_event_start = load_event_start + self.load_event_end = load_event_end + self.first_paint = first_paint + self.first_contentful_paint = first_contentful_paint + + +class PageRenderTiming(Message): + __id__ = 24 + + def __init__(self, speed_index, visually_complete, time_to_interactive): + self.speed_index = speed_index + self.visually_complete = visually_complete + self.time_to_interactive = time_to_interactive + +class JSException(Message): + __id__ = 25 + + def __init__(self, name: str, message: str, payload: str): + self.name = name + self.message = message + self.payload = payload + + +class RawErrorEvent(Message): + __id__ = 26 + + def __init__(self, timestamp, source: str, name: str, message: str, + payload: str): + self.timestamp = timestamp + self.source = source + self.name = name + self.message = message + self.payload = payload + + +class RawCustomEvent(Message): + __id__ = 27 + + def __init__(self, name: str, payload: str): + self.name = name + self.payload = payload + + +class UserID(Message): + __id__ = 28 + + def __init__(self, id: str): + self.id = id + + +class UserAnonymousID(Message): + __id__ = 29 + + def __init__(self, id: str): + self.id = id + + +class Metadata(Message): + __id__ = 30 + + def __init__(self, key: str, value: str): + self.key = key + self.value = value + + +class PerformanceTrack(Message): + __id__ = 49 + + def __init__(self, frames: int, ticks: int, total_js_heap_size, + used_js_heap_size): + self.frames = frames + self.ticks = ticks + self.total_js_heap_size = total_js_heap_size + self.used_js_heap_size = used_js_heap_size + + +class PageEvent(Message): + __id__ = 31 + + def __init__(self, message_id, timestamp, url: str, referrer: str, + loaded: bool, request_start, response_start, response_end, + dom_content_loaded_event_start, dom_content_loaded_event_end, + load_event_start, load_event_end, first_paint, first_contentful_paint, + speed_index, visually_complete, time_to_interactive): + self.message_id = message_id + self.timestamp = timestamp + self.url = url + self.referrer = referrer + self.loaded = loaded + self.request_start = request_start + self.response_start = response_start + self.response_end = response_end + self.dom_content_loaded_event_start = dom_content_loaded_event_start + self.dom_content_loaded_event_end = dom_content_loaded_event_end + self.load_event_start = load_event_start + self.load_event_end = load_event_end + self.first_paint = first_paint + self.first_contentful_paint = first_contentful_paint + self.speed_index = speed_index + self.visually_complete = visually_complete + self.time_to_interactive = time_to_interactive + + +class InputEvent(Message): + __id__ = 32 + + def __init__(self, message_id, timestamp, value: str, value_masked: bool, label: str): + self.message_id = message_id + self.timestamp = timestamp + self.value = value + self.value_masked = value_masked + self.label = label + + +class ClickEvent(Message): + __id__ = 33 + + def __init__(self, message_id, timestamp, hesitation_time, label: str): + self.message_id = message_id + self.timestamp = timestamp + self.hesitation_time = hesitation_time + self.label = label + + +class ErrorEvent(Message): + __id__ = 34 + + def __init__(self, message_id, timestamp, source: str, name: str, message: str, + payload: str): + self.message_id = message_id + self.timestamp = timestamp + self.source = source + self.name = name + self.message = message + self.payload = payload + + +class ResourceEvent(Message): + __id__ = 35 + + def __init__(self, message_id, timestamp, duration, ttfb, header_size, encoded_body_size, + decoded_body_size, url: str, type: str, success: bool, method: str, status): + self.message_id = message_id + self.timestamp = timestamp + self.duration = duration + self.ttfb = ttfb + self.header_size = header_size + self.encoded_body_size = encoded_body_size + self.decoded_body_size = decoded_body_size + self.url = url + self.type = type + self.success = success + self.method = method + self.status = status + + +class CustomEvent(Message): + __id__ = 36 + + def __init__(self, message_id, timestamp, name: str, payload: str): + self.message_id = message_id + self.timestamp = timestamp + self.name = name + self.payload = payload + + +class CSSInsertRule(Message): + __id__ = 37 + + def __init__(self, id, rule: str, index): + self.id = id + self.rule = rule + self.index = index + + +class CSSDeleteRule(Message): + __id__ = 38 + + def __init__(self, id, index): + self.id = id + self.index = index + + +class Fetch(Message): + __id__ = 39 + + def __init__(self, method: str, url: str, request: str, response: str, status, + timestamp, duration): + self.method = method + self.url = url + self.request = request + self.response = response + self.status = status + self.timestamp = timestamp + self.duration = duration + + +class Profiler(Message): + __id__ = 40 + + def __init__(self, name: str, duration, args: str, result: str): + self.name = name + self.duration = duration + self.args = args + self.result = result + + +class OTable(Message): + __id__ = 41 + + def __init__(self, key: str, value: str): + self.key = key + self.value = value + + +class StateAction(Message): + __id__ = 42 + + def __init__(self, type: str): + self.type = type + + +class StateActionEvent(Message): + __id__ = 43 + + def __init__(self, message_id, timestamp, type: str): + self.message_id = message_id + self.timestamp = timestamp + self.type = type + + +class Redux(Message): + __id__ = 44 + + def __init__(self, action: str, state: str, duration): + self.action = action + self.state = state + self.duration = duration + + +class Vuex(Message): + __id__ = 45 + + def __init__(self, mutation: str, state: str): + self.mutation = mutation + self.state = state + + +class MobX(Message): + __id__ = 46 + + def __init__(self, type: str, payload: str): + self.type = type + self.payload = payload + + +class NgRx(Message): + __id__ = 47 + + def __init__(self, action: str, state: str, duration): + self.action = action + self.state = state + self.duration = duration + + +class GraphQL(Message): + __id__ = 48 + + def __init__(self, operation_kind: str, operation_name: str, + variables: str, response: str): + self.operation_kind = operation_kind + self.operation_name = operation_name + self.variables = variables + self.response = response + + +class PerformanceTrack(Message): + __id__ = 49 + + def __init__(self, frames: int, ticks: int, + total_js_heap_size, used_js_heap_size): + self.frames = frames + self.ticks = ticks + self.total_js_heap_size = total_js_heap_size + self.used_js_heap_size = used_js_heap_size + + +class GraphQLEvent(Message): + __id__ = 50 + + def __init__(self, message_id, timestamp, name: str): + self.message_id = message_id + self.timestamp = timestamp + self.name = name + + +class DomDrop(Message): + __id__ = 52 + + def __init__(self, timestamp): + self.timestamp = timestamp + + +class ResourceTiming(Message): + __id__ = 53 + + def __init__(self, timestamp, duration, ttfb, header_size, encoded_body_size, + decoded_body_size, url, initiator): + self.timestamp = timestamp + self.duration = duration + self.ttfb = ttfb + self.header_size = header_size + self.encoded_body_size = encoded_body_size + self.decoded_body_size = decoded_body_size + self.url = url + self.initiator = initiator + + +class ConnectionInformation(Message): + __id__ = 54 + + def __init__(self, downlink, type: str): + self.downlink = downlink + self.type = type + + +class SetPageVisibility(Message): + __id__ = 55 + + def __init__(self, hidden: bool): + self.hidden = hidden + + +class PerformanceTrackAggr(Message): + __id__ = 56 + + def __init__(self, timestamp_start, timestamp_end, min_fps, avg_fps, + max_fps, min_cpu, avg_cpu, max_cpu, + min_total_js_heap_size, avg_total_js_heap_size, + max_total_js_heap_size, min_used_js_heap_size, + avg_used_js_heap_size, max_used_js_heap_size + ): + self.timestamp_start = timestamp_start + self.timestamp_end = timestamp_end + self.min_fps = min_fps + self.avg_fps = avg_fps + self.max_fps = max_fps + self.min_cpu = min_cpu + self.avg_cpu = avg_cpu + self.max_cpu = max_cpu + self.min_total_js_heap_size = min_total_js_heap_size + self.avg_total_js_heap_size = avg_total_js_heap_size + self.max_total_js_heap_size = max_total_js_heap_size + self.min_used_js_heap_size = min_used_js_heap_size + self.avg_used_js_heap_size = avg_used_js_heap_size + self.max_used_js_heap_size = max_used_js_heap_size + + +class LongTask(Message): + __id__ = 59 + + def __init__(self, timestamp, duration, context, container_type, container_src: str, + container_id: str, container_name: str): + self.timestamp = timestamp + self.duration = duration + self.context = context + self.container_type = container_type + self.container_src = container_src + self.container_id = container_id + self.container_name = container_name + + +class SetNodeURLBasedAttribute(Message): + __id__ = 60 + + def __init__(self, id, name: str, value: str, base_url: str): + self.id = id + self.name = name + self.value = value + self.base_url = base_url + + +class SetStyleData(Message): + __id__ = 61 + + def __init__(self, id, data: str, base_url: str): + self.id = id + self.data = data + self.base_url = base_url + + +class IssueEvent(Message): + __id__ = 62 + + def __init__(self, message_id, timestamp, type: str, context_string: str, + context: str, payload: str): + self.message_id = message_id + self.timestamp = timestamp + self.type = type + self.context_string = context_string + self.context = context + self.payload = payload + + +class TechnicalInfo(Message): + __id__ = 63 + + def __init__(self, type: str, value: str): + self.type = type + self.value = value + + +class CustomIssue(Message): + __id__ = 64 + + def __init__(self, name: str, payload: str): + self.name = name + self.payload = payload + + +class PageClose(Message): + __id__ = 65 + + +class IOSSessionStart(Message): + __id__ = 90 + + def __init__(self, timestamp, project_id, tracker_version: str, + rev_id: str, user_uuid: str, user_os: str, user_os_version: str, + user_device: str, user_device_type: str, user_country: str): + self.timestamp = timestamp + self.project_id = project_id + self.tracker_version = tracker_version + self.rev_id = rev_id + self.user_uuid = user_uuid + self.user_os = user_os + self.user_os_version = user_os_version + self.user_device = user_device + self.user_device_type = user_device_type + self.user_country = user_country + + +class IOSSessionEnd(Message): + __id__ = 91 + + def __init__(self, timestamp): + self.timestamp = timestamp + + +class IOSMetadata(Message): + __id__ = 92 + + def __init__(self, timestamp, length, key: str, value: str): + self.timestamp = timestamp + self.length = length + self.key = key + self.value = value + + +class IOSUserID(Message): + __id__ = 94 + + def __init__(self, timestamp, length, value: str): + self.timestamp = timestamp + self.length = length + self.value = value + + +class IOSUserAnonymousID(Message): + __id__ = 95 + + def __init__(self, timestamp, length, value: str): + self.timestamp = timestamp + self.length = length + self.value = value + + +class IOSScreenLeave(Message): + __id__ = 99 + + def __init__(self, timestamp, length, title: str, view_name: str): + self.timestamp = timestamp + self.length = length + self.title = title + self.view_name = view_name + + +class IOSLog(Message): + __id__ = 103 + + def __init__(self, timestamp, length, severity: str, content: str): + self.timestamp = timestamp + self.length = length + self.severity = severity + self.content = content + + +class IOSInternalError(Message): + __id__ = 104 + + def __init__(self, timestamp, length, content: str): + self.timestamp = timestamp + self.length = length + self.content = content + + +class IOSPerformanceAggregated(Message): + __id__ = 110 + + def __init__(self, timestamp_start, timestamp_end, min_fps, avg_fps, + max_fps, min_cpu, avg_cpu, max_cpu, + min_memory, avg_memory, max_memory, + min_battery, avg_battery, max_battery + ): + self.timestamp_start = timestamp_start + self.timestamp_end = timestamp_end + self.min_fps = min_fps + self.avg_fps = avg_fps + self.max_fps = max_fps + self.min_cpu = min_cpu + self.avg_cpu = avg_cpu + self.max_cpu = max_cpu + self.min_memory = min_memory + self.avg_memory = avg_memory + self.max_memory = max_memory + self.min_battery = min_battery + self.avg_battery = avg_battery + self.max_battery = max_battery diff --git a/ee/connectors/requirements.txt b/ee/connectors/requirements.txt new file mode 100644 index 000000000..a6b6a0720 --- /dev/null +++ b/ee/connectors/requirements.txt @@ -0,0 +1,43 @@ +certifi==2020.12.5 +chardet==4.0.0 +clickhouse-driver==0.2.0 +clickhouse-sqlalchemy==0.1.5 +idna==2.10 +kafka-python==2.0.2 +pandas==1.2.3 +psycopg2-binary==2.8.6 +pytz==2021.1 +requests==2.25.1 +SQLAlchemy==1.3.23 +tzlocal==2.1 +urllib3==1.26.3 +PyYAML==5.4.1 +pandas-redshift +awswrangler +google-auth-httplib2 +google-auth-oauthlib +google-cloud-bigquery +pandas-gbq +snowflake-connector-python==2.4.1 +snowflake-sqlalchemy==1.2.4 +asn1crypto==1.4.0 +azure-common==1.1.25 +azure-core==1.8.2 +azure-storage-blob==12.5.0 +boto3==1.15.18 +botocore==1.18.18 +cffi==1.14.3 +cryptography==2.9.2 +isodate==0.6.0 +jmespath==0.10.0 +msrest==0.6.19 +oauthlib==3.1.0 +oscrypto==1.2.1 +pycparser==2.20 +pycryptodomex==3.9.8 +PyJWT==1.7.1 +pyOpenSSL==19.1.0 +python-dateutil==2.8.1 +requests-oauthlib==1.3.0 +s3transfer==0.3.3 +six==1.15.0 diff --git a/ee/connectors/sql/clickhouse_events.sql b/ee/connectors/sql/clickhouse_events.sql new file mode 100644 index 000000000..b5eb8b440 --- /dev/null +++ b/ee/connectors/sql/clickhouse_events.sql @@ -0,0 +1,56 @@ +CREATE TABLE IF NOT EXISTS connector_events +( + sessionid UInt64, + connectioninformation_downlink Nullable(UInt64), + connectioninformation_type Nullable(String), + consolelog_level Nullable(String), + consolelog_value Nullable(String), + customevent_messageid Nullable(UInt64), + customevent_name Nullable(String), + customevent_payload Nullable(String), + customevent_timestamp Nullable(UInt64), + errorevent_message Nullable(String), + errorevent_messageid Nullable(UInt64), + errorevent_name Nullable(String), + errorevent_payload Nullable(String), + errorevent_source Nullable(String), + errorevent_timestamp Nullable(UInt64), + jsexception_message Nullable(String), + jsexception_name Nullable(String), + jsexception_payload Nullable(String), + metadata_key Nullable(String), + metadata_value Nullable(String), + mouseclick_id Nullable(UInt64), + mouseclick_hesitationtime Nullable(UInt64), + mouseclick_label Nullable(String), + pageevent_firstcontentfulpaint Nullable(UInt64), + pageevent_firstpaint Nullable(UInt64), + pageevent_messageid Nullable(UInt64), + pageevent_referrer Nullable(String), + pageevent_speedindex Nullable(UInt64), + pageevent_timestamp Nullable(UInt64), + pageevent_url Nullable(String), + pagerendertiming_timetointeractive Nullable(UInt64), + pagerendertiming_visuallycomplete Nullable(UInt64), + rawcustomevent_name Nullable(String), + rawcustomevent_payload Nullable(String), + setviewportsize_height Nullable(UInt64), + setviewportsize_width Nullable(UInt64), + timestamp_timestamp Nullable(UInt64), + user_anonymous_id Nullable(String), + user_id Nullable(String), + issueevent_messageid Nullable(UInt64), + issueevent_timestamp Nullable(UInt64), + issueevent_type Nullable(String), + issueevent_contextstring Nullable(String), + issueevent_context Nullable(String), + issueevent_payload Nullable(String), + customissue_name Nullable(String), + customissue_payload Nullable(String), + received_at UInt64, + batch_order_number UInt64 +) ENGINE = MergeTree() +PARTITION BY intDiv(received_at, 100000) +ORDER BY (received_at, batch_order_number, sessionid) +PRIMARY KEY (received_at) +SETTINGS use_minimalistic_part_header_in_zookeeper=1, index_granularity=1000; \ No newline at end of file diff --git a/ee/connectors/sql/clickhouse_events_buffer.sql b/ee/connectors/sql/clickhouse_events_buffer.sql new file mode 100644 index 000000000..ed291c824 --- /dev/null +++ b/ee/connectors/sql/clickhouse_events_buffer.sql @@ -0,0 +1,52 @@ +CREATE TABLE IF NOT EXISTS connector_events_buffer +( + sessionid UInt64, + connectioninformation_downlink Nullable(UInt64), + connectioninformation_type Nullable(String), + consolelog_level Nullable(String), + consolelog_value Nullable(String), + customevent_messageid Nullable(UInt64), + customevent_name Nullable(String), + customevent_payload Nullable(String), + customevent_timestamp Nullable(UInt64), + errorevent_message Nullable(String), + errorevent_messageid Nullable(UInt64), + errorevent_name Nullable(String), + errorevent_payload Nullable(String), + errorevent_source Nullable(String), + errorevent_timestamp Nullable(UInt64), + jsexception_message Nullable(String), + jsexception_name Nullable(String), + jsexception_payload Nullable(String), + metadata_key Nullable(String), + metadata_value Nullable(String), + mouseclick_id Nullable(UInt64), + mouseclick_hesitationtime Nullable(UInt64), + mouseclick_label Nullable(String), + pageevent_firstcontentfulpaint Nullable(UInt64), + pageevent_firstpaint Nullable(UInt64), + pageevent_messageid Nullable(UInt64), + pageevent_referrer Nullable(String), + pageevent_speedindex Nullable(UInt64), + pageevent_timestamp Nullable(UInt64), + pageevent_url Nullable(String), + pagerendertiming_timetointeractive Nullable(UInt64), + pagerendertiming_visuallycomplete Nullable(UInt64), + rawcustomevent_name Nullable(String), + rawcustomevent_payload Nullable(String), + setviewportsize_height Nullable(UInt64), + setviewportsize_width Nullable(UInt64), + timestamp_timestamp Nullable(UInt64), + user_anonymous_id Nullable(String), + user_id Nullable(String), + issueevent_messageid Nullable(UInt64), + issueevent_timestamp Nullable(UInt64), + issueevent_type Nullable(String), + issueevent_contextstring Nullable(String), + issueevent_context Nullable(String), + issueevent_payload Nullable(String), + customissue_name Nullable(String), + customissue_payload Nullable(String), + received_at UInt64, + batch_order_number UInt64 +) ENGINE = Buffer(default, connector_events, 16, 10, 120, 10000, 1000000, 10000, 100000000); diff --git a/ee/connectors/sql/clickhouse_sessions.sql b/ee/connectors/sql/clickhouse_sessions.sql new file mode 100644 index 000000000..4d648553e --- /dev/null +++ b/ee/connectors/sql/clickhouse_sessions.sql @@ -0,0 +1,52 @@ +CREATE TABLE IF NOT EXISTS connector_user_sessions +( +-- SESSION METADATA + sessionid UInt64, + user_agent Nullable(String), + user_browser Nullable(String), + user_browser_version Nullable(String), + user_country Nullable(String), + user_device Nullable(String), + user_device_heap_size Nullable(UInt64), + user_device_memory_size Nullable(UInt64), + user_device_type Nullable(String), + user_os Nullable(String), + user_os_version Nullable(String), + user_uuid Nullable(String), + connection_effective_bandwidth Nullable(UInt64), -- Downlink + connection_type Nullable(String), --"bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown" + metadata_key Nullable(String), + metadata_value Nullable(String), + referrer Nullable(String), + user_anonymous_id Nullable(String), + user_id Nullable(String), +-- TIME + session_start_timestamp Nullable(UInt64), + session_end_timestamp Nullable(UInt64), + session_duration Nullable(UInt64), +-- SPEED INDEX RELATED + first_contentful_paint Nullable(UInt64), + speed_index Nullable(UInt64), + visually_complete Nullable(UInt64), + timing_time_to_interactive Nullable(UInt64), +-- PERFORMANCE + avg_cpu Nullable(UInt64), + avg_fps Nullable(UInt64), + max_cpu Nullable(UInt64), + max_fps Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), +-- ISSUES AND EVENTS + js_exceptions_count Nullable(UInt64), + long_tasks_total_duration Nullable(UInt64), + long_tasks_max_duration Nullable(UInt64), + long_tasks_count Nullable(UInt64), + inputs_count Nullable(UInt64), + clicks_count Nullable(UInt64), + issues_count Nullable(UInt64), + issues Array(Nullable(String)), + urls_count Nullable(UInt64), + urls Array(Nullable(String)) +) ENGINE = MergeTree() +ORDER BY (sessionid) +PRIMARY KEY (sessionid); \ No newline at end of file diff --git a/ee/connectors/sql/clickhouse_sessions_buffer.sql b/ee/connectors/sql/clickhouse_sessions_buffer.sql new file mode 100644 index 000000000..540700d45 --- /dev/null +++ b/ee/connectors/sql/clickhouse_sessions_buffer.sql @@ -0,0 +1,50 @@ +CREATE TABLE IF NOT EXISTS connector_user_sessions_buffer +( +-- SESSION METADATA + sessionid UInt64, + user_agent Nullable(String), + user_browser Nullable(String), + user_browser_version Nullable(String), + user_country Nullable(String), + user_device Nullable(String), + user_device_heap_size Nullable(UInt64), + user_device_memory_size Nullable(UInt64), + user_device_type Nullable(String), + user_os Nullable(String), + user_os_version Nullable(String), + user_uuid Nullable(String), + connection_effective_bandwidth Nullable(UInt64), -- Downlink + connection_type Nullable(String), --"bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown" + metadata_key Nullable(String), + metadata_value Nullable(String), + referrer Nullable(String), + user_anonymous_id Nullable(String), + user_id Nullable(String), +-- TIME + session_start_timestamp Nullable(UInt64), + session_end_timestamp Nullable(UInt64), + session_duration Nullable(UInt64), +-- SPEED INDEX RELATED + first_contentful_paint Nullable(UInt64), + speed_index Nullable(UInt64), + visually_complete Nullable(UInt64), + timing_time_to_interactive Nullable(UInt64), +-- PERFORMANCE + avg_cpu Nullable(UInt64), + avg_fps Nullable(UInt64), + max_cpu Nullable(UInt64), + max_fps Nullable(UInt64), + max_total_js_heap_size Nullable(UInt64), + max_used_js_heap_size Nullable(UInt64), +-- ISSUES AND EVENTS + js_exceptions_count Nullable(UInt64), + long_tasks_total_duration Nullable(UInt64), + long_tasks_max_duration Nullable(UInt64), + long_tasks_count Nullable(UInt64), + inputs_count Nullable(UInt64), + clicks_count Nullable(UInt64), + issues_count Nullable(UInt64), + issues Array(Nullable(String)), + urls_count Nullable(UInt64), + urls Array(Nullable(String)) +) ENGINE = Buffer(default, connector_user_sessions, 16, 10, 120, 10000, 1000000, 10000, 100000000); diff --git a/ee/connectors/sql/postgres_events.sql b/ee/connectors/sql/postgres_events.sql new file mode 100644 index 000000000..986de4df9 --- /dev/null +++ b/ee/connectors/sql/postgres_events.sql @@ -0,0 +1,52 @@ +CREATE TABLE IF NOT EXISTS connector_events +( + sessionid bigint, + connectioninformation_downlink bigint, + connectioninformation_type text, + consolelog_level text, + consolelog_value text, + customevent_messageid bigint, + customevent_name text, + customevent_payload text, + customevent_timestamp bigint, + errorevent_message text, + errorevent_messageid bigint, + errorevent_name text, + errorevent_payload text, + errorevent_source text, + errorevent_timestamp bigint, + jsexception_message text, + jsexception_name text, + jsexception_payload text, + metadata_key text, + metadata_value text, + mouseclick_id bigint, + mouseclick_hesitationtime bigint, + mouseclick_label text, + pageevent_firstcontentfulpaint bigint, + pageevent_firstpaint bigint, + pageevent_messageid bigint, + pageevent_referrer text, + pageevent_speedindex bigint, + pageevent_timestamp bigint, + pageevent_url text, + pagerendertiming_timetointeractive bigint, + pagerendertiming_visuallycomplete bigint, + rawcustomevent_name text, + rawcustomevent_payload text, + setviewportsize_height bigint, + setviewportsize_width bigint, + timestamp_timestamp bigint, + user_anonymous_id text, + user_id text, + issueevent_messageid bigint, + issueevent_timestamp bigint, + issueevent_type text, + issueevent_contextstring text, + issueevent_context text, + issueevent_payload text, + customissue_name text, + customissue_payload text, + received_at bigint, + batch_order_number bigint +); \ No newline at end of file diff --git a/ee/connectors/sql/postgres_sessions.sql b/ee/connectors/sql/postgres_sessions.sql new file mode 100644 index 000000000..1f68309c2 --- /dev/null +++ b/ee/connectors/sql/postgres_sessions.sql @@ -0,0 +1,50 @@ +CREATE TABLE IF NOT EXISTS connector_user_sessions +( +-- SESSION METADATA + sessionid bigint, + user_agent text, + user_browser text, + user_browser_version text, + user_country text, + user_device text, + user_device_heap_size bigint, + user_device_memory_size bigint, + user_device_type text, + user_os text, + user_os_version text, + user_uuid text, + connection_effective_bandwidth bigint, -- Downlink + connection_type text, --"bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown" + metadata_key text, + metadata_value text, + referrer text, + user_anonymous_id text, + user_id text, +-- TIME + session_start_timestamp bigint, + session_end_timestamp bigint, + session_duration bigint, +-- SPEED INDEX RELATED + first_contentful_paint bigint, + speed_index bigint, + visually_complete bigint, + timing_time_to_interactive bigint, +-- PERFORMANCE + avg_cpu bigint, + avg_fps bigint, + max_cpu bigint, + max_fps bigint, + max_total_js_heap_size bigint, + max_used_js_heap_size bigint, +-- ISSUES AND EVENTS + js_exceptions_count bigint, + long_tasks_total_duration bigint, + long_tasks_max_duration bigint, + long_tasks_count bigint, + inputs_count bigint, + clicks_count bigint, + issues_count bigint, + issues text[], + urls_count bigint, + urls text[] +); \ No newline at end of file diff --git a/ee/connectors/sql/redshift_events.sql b/ee/connectors/sql/redshift_events.sql new file mode 100644 index 000000000..c310e3202 --- /dev/null +++ b/ee/connectors/sql/redshift_events.sql @@ -0,0 +1,52 @@ +CREATE TABLE connector_events +( + sessionid BIGINT, + connectioninformation_downlink BIGINT, + connectioninformation_type VARCHAR(300), + consolelog_level VARCHAR(300), + consolelog_value VARCHAR(300), + customevent_messageid BIGINT, + customevent_name VARCHAR(300), + customevent_payload VARCHAR(300), + customevent_timestamp BIGINT, + errorevent_message VARCHAR(300), + errorevent_messageid BIGINT, + errorevent_name VARCHAR(300), + errorevent_payload VARCHAR(300), + errorevent_source VARCHAR(300), + errorevent_timestamp BIGINT, + jsexception_message VARCHAR(300), + jsexception_name VARCHAR(300), + jsexception_payload VARCHAR(300), + metadata_key VARCHAR(300), + metadata_value VARCHAR(300), + mouseclick_id BIGINT, + mouseclick_hesitationtime BIGINT, + mouseclick_label VARCHAR(300), + pageevent_firstcontentfulpaint BIGINT, + pageevent_firstpaint BIGINT, + pageevent_messageid BIGINT, + pageevent_referrer VARCHAR(300), + pageevent_speedindex BIGINT, + pageevent_timestamp BIGINT, + pageevent_url VARCHAR(300), + pagerendertiming_timetointeractive BIGINT, + pagerendertiming_visuallycomplete BIGINT, + rawcustomevent_name VARCHAR(300), + rawcustomevent_payload VARCHAR(300), + setviewportsize_height BIGINT, + setviewportsize_width BIGINT, + timestamp_timestamp BIGINT, + user_anonymous_id VARCHAR(300), + user_id VARCHAR(300), + issueevent_messageid BIGINT, + issueevent_timestamp BIGINT, + issueevent_type VARCHAR(300), + issueevent_contextstring VARCHAR(300), + issueevent_context VARCHAR(300), + issueevent_payload VARCHAR(300), + customissue_name VARCHAR(300), + customissue_payload VARCHAR(300), + received_at BIGINT, + batch_order_number BIGINT +); \ No newline at end of file diff --git a/ee/connectors/sql/redshift_sessions.sql b/ee/connectors/sql/redshift_sessions.sql new file mode 100644 index 000000000..f1750dcc2 --- /dev/null +++ b/ee/connectors/sql/redshift_sessions.sql @@ -0,0 +1,50 @@ +CREATE TABLE connector_user_sessions +( +-- SESSION METADATA + sessionid bigint, + user_agent VARCHAR, + user_browser VARCHAR, + user_browser_version VARCHAR, + user_country VARCHAR, + user_device VARCHAR, + user_device_heap_size bigint, + user_device_memory_size bigint, + user_device_type VARCHAR, + user_os VARCHAR, + user_os_version VARCHAR, + user_uuid VARCHAR, + connection_effective_bandwidth bigint, -- Downlink + connection_type VARCHAR, --"bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown" + metadata_key VARCHAR, + metadata_value VARCHAR, + referrer VARCHAR, + user_anonymous_id VARCHAR, + user_id VARCHAR, +-- TIME + session_start_timestamp bigint, + session_end_timestamp bigint, + session_duration bigint, +-- SPEED INDEX RELATED + first_contentful_paint bigint, + speed_index bigint, + visually_complete bigint, + timing_time_to_interactive bigint, +-- PERFORMANCE + avg_cpu bigint, + avg_fps bigint, + max_cpu bigint, + max_fps bigint, + max_total_js_heap_size bigint, + max_used_js_heap_size bigint, +-- ISSUES AND EVENTS + js_exceptions_count bigint, + long_tasks_total_duration bigint, + long_tasks_max_duration bigint, + long_tasks_count bigint, + inputs_count bigint, + clicks_count bigint, + issues_count bigint, + issues VARCHAR, + urls_count bigint, + urls VARCHAR +); \ No newline at end of file diff --git a/ee/connectors/sql/snowflake_events.sql b/ee/connectors/sql/snowflake_events.sql new file mode 100644 index 000000000..986de4df9 --- /dev/null +++ b/ee/connectors/sql/snowflake_events.sql @@ -0,0 +1,52 @@ +CREATE TABLE IF NOT EXISTS connector_events +( + sessionid bigint, + connectioninformation_downlink bigint, + connectioninformation_type text, + consolelog_level text, + consolelog_value text, + customevent_messageid bigint, + customevent_name text, + customevent_payload text, + customevent_timestamp bigint, + errorevent_message text, + errorevent_messageid bigint, + errorevent_name text, + errorevent_payload text, + errorevent_source text, + errorevent_timestamp bigint, + jsexception_message text, + jsexception_name text, + jsexception_payload text, + metadata_key text, + metadata_value text, + mouseclick_id bigint, + mouseclick_hesitationtime bigint, + mouseclick_label text, + pageevent_firstcontentfulpaint bigint, + pageevent_firstpaint bigint, + pageevent_messageid bigint, + pageevent_referrer text, + pageevent_speedindex bigint, + pageevent_timestamp bigint, + pageevent_url text, + pagerendertiming_timetointeractive bigint, + pagerendertiming_visuallycomplete bigint, + rawcustomevent_name text, + rawcustomevent_payload text, + setviewportsize_height bigint, + setviewportsize_width bigint, + timestamp_timestamp bigint, + user_anonymous_id text, + user_id text, + issueevent_messageid bigint, + issueevent_timestamp bigint, + issueevent_type text, + issueevent_contextstring text, + issueevent_context text, + issueevent_payload text, + customissue_name text, + customissue_payload text, + received_at bigint, + batch_order_number bigint +); \ No newline at end of file diff --git a/ee/connectors/sql/snowflake_sessions.sql b/ee/connectors/sql/snowflake_sessions.sql new file mode 100644 index 000000000..c66bac2e6 --- /dev/null +++ b/ee/connectors/sql/snowflake_sessions.sql @@ -0,0 +1,50 @@ +CREATE TABLE IF NOT EXISTS connector_user_sessions +( +-- SESSION METADATA + sessionid bigint, + user_agent text, + user_browser text, + user_browser_version text, + user_country text, + user_device text, + user_device_heap_size bigint, + user_device_memory_size bigint, + user_device_type text, + user_os text, + user_os_version text, + user_uuid text, + connection_effective_bandwidth bigint, -- Downlink + connection_type text, --"bluetooth", "cellular", "ethernet", "none", "wifi", "wimax", "other", "unknown" + metadata_key text, + metadata_value text, + referrer text, + user_anonymous_id text, + user_id text, +-- TIME + session_start_timestamp bigint, + session_end_timestamp bigint, + session_duration bigint, +-- SPEED INDEX RELATED + first_contentful_paint bigint, + speed_index bigint, + visually_complete bigint, + timing_time_to_interactive bigint, +-- PERFORMANCE + avg_cpu bigint, + avg_fps bigint, + max_cpu bigint, + max_fps bigint, + max_total_js_heap_size bigint, + max_used_js_heap_size bigint, +-- ISSUES AND EVENTS + js_exceptions_count bigint, + long_tasks_total_duration bigint, + long_tasks_max_duration bigint, + long_tasks_count bigint, + inputs_count bigint, + clicks_count bigint, + issues_count bigint, + issues array, + urls_count bigint, + urls array +); \ No newline at end of file diff --git a/ee/connectors/utils/bigquery.env.example b/ee/connectors/utils/bigquery.env.example new file mode 100644 index 000000000..16d970501 --- /dev/null +++ b/ee/connectors/utils/bigquery.env.example @@ -0,0 +1,7 @@ +table_id='{project_id}.{dataset}.{table}' +project_id=name-123456 +dataset=datasetname +sessions_table=connector_user_sessions +events_table_name=connector_events +events_detailed_table_name=connector_events_detailed +level=normal diff --git a/ee/connectors/utils/bigquery_service_account.json.example b/ee/connectors/utils/bigquery_service_account.json.example new file mode 100644 index 000000000..e6473eed7 --- /dev/null +++ b/ee/connectors/utils/bigquery_service_account.json.example @@ -0,0 +1,12 @@ +{ + "type": "service_account", + "project_id": "aaaaaa-123456", + "private_key_id": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "private_key": "-----BEGIN PRIVATE KEY-----\some_letters_and_numbers\n-----END PRIVATE KEY-----\n", + "client_email": "abc-aws@aaaaa-123456.iam.gserviceaccount.com", + "client_id": "12345678910111213", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/bigquery-connector-aws%40asayer-143408.iam.gserviceaccount.com" +} diff --git a/ee/connectors/utils/clickhouse.env.example b/ee/connectors/utils/clickhouse.env.example new file mode 100644 index 000000000..038fa2a87 --- /dev/null +++ b/ee/connectors/utils/clickhouse.env.example @@ -0,0 +1,7 @@ +connect_str='clickhouse+native://{address}/{database}' +address=1.1.1.1:9000 +database=default +sessions_table=connector_user_sessions_buffer +events_table_name=connector_events_buffer +events_detailed_table_name=connector_events_detailed_buffer +level=normal diff --git a/ee/connectors/utils/pg.env.example b/ee/connectors/utils/pg.env.example new file mode 100644 index 000000000..e50b041f8 --- /dev/null +++ b/ee/connectors/utils/pg.env.example @@ -0,0 +1,10 @@ +connect_str='postgresql://{user}:{password}@{address}:{port}/{database}' +address=1.1.1.1 +port=8080 +database=dev +user=qwerty +password=qwertyQWERTY12345 +sessions_table=connector_user_sessions +events_table_name=connector_events +events_detailed_table_name=connector_events_detailed +level=normal diff --git a/ee/connectors/utils/redshift.env.example b/ee/connectors/utils/redshift.env.example new file mode 100644 index 000000000..d78b9a8a2 --- /dev/null +++ b/ee/connectors/utils/redshift.env.example @@ -0,0 +1,15 @@ +aws_access_key_id=QWERTYQWERTYQWERTY +aws_secret_access_key=abcdefgh12345678 +region_name=eu-central-3 +bucket=name_of_the_bucket +subdirectory=name_of_the_bucket_subdirectory +connect_str='postgresql://{user}:{password}@{address}:{port}/{schema}' +address=redshift-cluster-1.aaaaaaaaa.eu-central-3.redshift.amazonaws.com +port=5439 +schema=dev +user=admin +password=admin +sessions_table=connector_user_sessions +events_table_name=connector_events +events_detailed_table_name=connector_events_detailed +level=normal diff --git a/ee/connectors/utils/snowflake.env.example b/ee/connectors/utils/snowflake.env.example new file mode 100644 index 000000000..deed20462 --- /dev/null +++ b/ee/connectors/utils/snowflake.env.example @@ -0,0 +1,11 @@ +connect_str='snowflake://{user}:{password}@{account}/{database}/{schema}?warehouse={warehouse}' +user=admin +password=12345678 +account=aaaaaaa.eu-central-3 +database=dev +schema=public +warehouse=SOME_WH +sessions_table=connector_user_sessions +events_table_name=connector_events +events_detailed_table_name=connector_events_detailed +level=normal diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 9010cb07a..e880024d3 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -47,77 +47,78 @@ CREATE TABLE tenants CREATE TYPE user_role AS ENUM ('owner', 'admin', 'member'); CREATE TABLE users ( - user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, - email text NOT NULL UNIQUE, - role user_role NOT NULL DEFAULT 'member', - name text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - appearance jsonb NOT NULL default '{ + user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + email text NOT NULL UNIQUE, + role user_role NOT NULL DEFAULT 'member', + name text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + appearance jsonb NOT NULL default '{ + "role": "dev", "dashboard": { - "applicationActivity": true, - "avgCpu": true, - "avgDomContentLoadStart": true, - "avgFirstContentfulPixel": false, - "avgFirstPaint": false, - "avgFps": false, - "avgImageLoadTime": true, - "avgPageLoadTime": true, - "avgPagesDomBuildtime": true, - "avgPagesResponseTime": false, - "avgRequestLoadTime": true, - "avgSessionDuration": false, - "avgTillFirstBit": false, - "avgTimeToInteractive": true, - "avgTimeToRender": true, - "avgUsedJsHeapSize": true, - "avgVisitedPages": false, - "busiestTimeOfDay": true, - "callsErrors_4xx": true, - "callsErrors_5xx": true, - "countSessions": true, - "cpu": true, - "crashes": true, - "errors": true, - "errorsPerDomains": true, - "errorsPerType": true, - "errorsTrend": true, + "cpu": false, "fps": false, - "impactedSessionsByJsErrors": true, - "impactedSessionsBySlowPages": true, - "memoryConsumption": true, - "missingResources": true, + "avgCpu": false, + "avgFps": false, + "errors": true, + "crashes": false, "overview": true, - "pageMetrics": true, - "pagesResponseTime": true, - "pagesResponseTimeDistribution": true, - "performance": true, - "resourceTypeVsResponseEnd": true, - "resourcesByParty": false, - "resourcesCountByType": true, - "resourcesLoadingTime": true, - "resourcesVsVisuallyComplete": true, "sessions": true, - "sessionsFeedback": false, - "sessionsFrustration": false, - "sessionsPerBrowser": false, - "slowestDomains": true, - "slowestImages": true, - "slowestResources": true, - "speedLocation": true, - "timeToRender": false, "topMetrics": true, - "userActivity": false + "callsErrors": false, + "pageMetrics": true, + "performance": true, + "timeToRender": false, + "userActivity": false, + "avgFirstPaint": false, + "countSessions": false, + "errorsPerType": false, + "slowestImages": true, + "speedLocation": false, + "slowestDomains": false, + "avgPageLoadTime": false, + "avgTillFirstBit": false, + "avgTimeToRender": false, + "avgVisitedPages": false, + "avgImageLoadTime": false, + "busiestTimeOfDay": true, + "errorsPerDomains": false, + "missingResources": false, + "resourcesByParty": false, + "sessionsFeedback": false, + "slowestResources": false, + "avgUsedJsHeapSize": false, + "domainsErrors_4xx": false, + "domainsErrors_5xx": false, + "memoryConsumption": false, + "pagesDomBuildtime": false, + "pagesResponseTime": false, + "avgRequestLoadTime": false, + "avgSessionDuration": false, + "sessionsPerBrowser": false, + "applicationActivity": true, + "sessionsFrustration": false, + "avgPagesDomBuildtime": false, + "avgPagesResponseTime": false, + "avgTimeToInteractive": false, + "resourcesCountByType": false, + "resourcesLoadingTime": false, + "avgDomContentLoadStart": false, + "avgFirstContentfulPixel": false, + "resourceTypeVsResponseEnd": false, + "impactedSessionsByJsErrors": false, + "impactedSessionsBySlowPages": false, + "resourcesVsVisuallyComplete": false, + "pagesResponseTimeDistribution": false }, - "runs": false, - "tests": false, - "pagesDomBuildtime": false + "sessionsLive": false, + "sessionsDevtools": true }'::jsonb, - api_key text UNIQUE default generate_api_key(20) not null, - jwt_iat timestamp without time zone NULL DEFAULT NULL, - data jsonb NOT NULL DEFAULT '{}'::jsonb, - weekly_report boolean NOT NULL DEFAULT TRUE + api_key text UNIQUE default generate_api_key(20) not null, + jwt_iat timestamp without time zone NULL DEFAULT NULL, + data jsonb NOT NULL DEFAULT '{}'::jsonb, + weekly_report boolean NOT NULL DEFAULT TRUE ); @@ -140,7 +141,7 @@ CREATE TABLE oauth_authentication provider oauth_provider NOT NULL, provider_user_id text NOT NULL, token text NOT NULL, - UNIQUE (provider, provider_user_id) + UNIQUE (user_id, provider) ); @@ -445,7 +446,6 @@ CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error -- --- sessions.sql --- - CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS'); CREATE TYPE platform AS ENUM ('web','ios','android'); @@ -456,7 +456,7 @@ CREATE TABLE sessions project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, tracker_version text NOT NULL, start_ts bigint NOT NULL, - duration integer NOT NULL, + duration integer NULL, rev_id text DEFAULT NULL, platform platform NOT NULL DEFAULT 'web', is_snippet boolean NOT NULL DEFAULT FALSE, @@ -508,6 +508,7 @@ CREATE INDEX ON sessions (project_id, metadata_7); CREATE INDEX ON sessions (project_id, metadata_8); CREATE INDEX ON sessions (project_id, metadata_9); CREATE INDEX ON sessions (project_id, metadata_10); +-- CREATE INDEX ON sessions (rehydration_id); CREATE INDEX ON sessions (project_id, watchdogs_score DESC); CREATE INDEX platform_idx ON public.sessions (platform); @@ -558,6 +559,18 @@ CREATE TABLE user_favorite_sessions ); +-- --- assignments.sql --- + +create table assigned_sessions +( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + issue_id text NOT NULL, + provider oauth_provider NOT NULL, + created_by integer NOT NULL, + created_at timestamp default timezone('utc'::text, now()) NOT NULL, + provider_data jsonb default '{}'::jsonb NOT NULL +); + -- --- events_common.sql --- CREATE SCHEMA events_common; @@ -613,7 +626,6 @@ CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(ur gin_trgm_ops); -- --- events.sql --- - CREATE SCHEMA events; CREATE TABLE events.pages @@ -636,6 +648,7 @@ CREATE TABLE events.pages time_to_interactive integer DEFAULT NULL, response_time bigint DEFAULT NULL, response_end bigint DEFAULT NULL, + ttfb integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX ON events.pages (session_id); @@ -655,6 +668,11 @@ CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_ gin_trgm_ops); CREATE INDEX ON events.pages (response_time); CREATE INDEX ON events.pages (response_end); +CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); +CREATE INDEX pages_path_idx ON events.pages (path); +CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; +CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; +CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; CREATE TABLE events.clicks @@ -721,6 +739,61 @@ CREATE INDEX ON events.state_actions (name); CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops); CREATE INDEX ON events.state_actions (timestamp); +CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media'); +CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' ); +CREATE TABLE events.resources +( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + duration bigint NULL, + type events.resource_type NOT NULL, + url text NOT NULL, + url_host text NOT NULL, + url_hostpath text NOT NULL, + success boolean NOT NULL, + status smallint NULL, + method events.resource_method NULL, + ttfb bigint NULL, + header_size bigint NULL, + encoded_body_size integer NULL, + decoded_body_size integer NULL, + PRIMARY KEY (session_id, message_id) +); +CREATE INDEX ON events.resources (session_id); +CREATE INDEX ON events.resources (timestamp); +CREATE INDEX ON events.resources (success); +CREATE INDEX ON events.resources (status); +CREATE INDEX ON events.resources (type); +CREATE INDEX ON events.resources (duration) WHERE duration > 0; +CREATE INDEX ON events.resources (url_host); + +CREATE INDEX resources_url_gin_idx ON events.resources USING GIN (url gin_trgm_ops); +CREATE INDEX resources_url_idx ON events.resources (url); +CREATE INDEX resources_url_hostpath_gin_idx ON events.resources USING GIN (url_hostpath gin_trgm_ops); +CREATE INDEX resources_url_hostpath_idx ON events.resources (url_hostpath); + + + +CREATE TABLE events.performance +( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + message_id bigint NOT NULL, + min_fps smallint NOT NULL, + avg_fps smallint NOT NULL, + max_fps smallint NOT NULL, + min_cpu smallint NOT NULL, + avg_cpu smallint NOT NULL, + max_cpu smallint NOT NULL, + min_total_js_heap_size bigint NOT NULL, + avg_total_js_heap_size bigint NOT NULL, + max_total_js_heap_size bigint NOT NULL, + min_used_js_heap_size bigint NOT NULL, + avg_used_js_heap_size bigint NOT NULL, + max_used_js_heap_size bigint NOT NULL, + PRIMARY KEY (session_id, message_id) +); CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS @@ -762,4 +835,4 @@ CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); -COMMIT; \ No newline at end of file +COMMIT; diff --git a/frontend/app/Router.js b/frontend/app/Router.js index b2024c7d4..2706ca2e4 100644 --- a/frontend/app/Router.js +++ b/frontend/app/Router.js @@ -21,6 +21,7 @@ import FunnelIssueDetails from 'Components/Funnels/FunnelIssueDetails'; import APIClient from './api_client'; import * as routes from './routes'; +import { OB_DEFAULT_TAB } from 'App/routes'; import Signup from './components/Signup/Signup'; import { fetchTenants } from 'Duck/user'; @@ -48,6 +49,7 @@ const SIGNUP_PATH = routes.signup(); const FORGOT_PASSWORD = routes.forgotPassword(); const CLIENT_PATH = routes.client(); const ONBOARDING_PATH = routes.onboarding(); +const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB); @withRouter @connect((state) => { @@ -67,6 +69,7 @@ const ONBOARDING_PATH = routes.onboarding(); organisation: state.getIn([ 'user', 'client', 'name' ]), tenantId: state.getIn([ 'user', 'client', 'tenantId' ]), tenants: state.getIn(['user', 'tenants']), + onboarding: state.getIn([ 'user', 'onboarding' ]) }; }, { fetchUserInfo, fetchTenants @@ -92,7 +95,7 @@ class Router extends React.Component { } render() { - const { isLoggedIn, jwt, siteId, sites, loading, changePassword, location, tenants } = this.props; + const { isLoggedIn, jwt, siteId, sites, loading, changePassword, location, tenants, onboarding } = this.props; const siteIdList = sites.map(({ id }) => id).toJS(); const hideHeader = location.pathname && location.pathname.includes('/session/'); @@ -121,6 +124,9 @@ class Router extends React.Component { } } /> + { onboarding && + + } { siteIdList.length === 0 && } diff --git a/frontend/app/assets/apple-touch-icon.png b/frontend/app/assets/apple-touch-icon.png new file mode 100644 index 000000000..2adaf4f0d Binary files /dev/null and b/frontend/app/assets/apple-touch-icon.png differ diff --git a/frontend/app/assets/favicon-16x16.png b/frontend/app/assets/favicon-16x16.png new file mode 100644 index 000000000..fe36b0a1b Binary files /dev/null and b/frontend/app/assets/favicon-16x16.png differ diff --git a/frontend/app/assets/favicon-32x32.png b/frontend/app/assets/favicon-32x32.png new file mode 100644 index 000000000..980396723 Binary files /dev/null and b/frontend/app/assets/favicon-32x32.png differ diff --git a/frontend/app/assets/favicon.ico b/frontend/app/assets/favicon.ico new file mode 100644 index 000000000..734d9fcf2 Binary files /dev/null and b/frontend/app/assets/favicon.ico differ diff --git a/frontend/app/assets/favicon@1x.png b/frontend/app/assets/favicon@1x.png deleted file mode 100644 index 393d5d3cc..000000000 Binary files a/frontend/app/assets/favicon@1x.png and /dev/null differ diff --git a/frontend/app/assets/favicon@2x.png b/frontend/app/assets/favicon@2x.png deleted file mode 100644 index c99e774af..000000000 Binary files a/frontend/app/assets/favicon@2x.png and /dev/null differ diff --git a/frontend/app/assets/favicon@3x.png b/frontend/app/assets/favicon@3x.png deleted file mode 100644 index 4d38be71c..000000000 Binary files a/frontend/app/assets/favicon@3x.png and /dev/null differ diff --git a/frontend/app/assets/favicon@4x.png b/frontend/app/assets/favicon@4x.png deleted file mode 100644 index 19f3a4256..000000000 Binary files a/frontend/app/assets/favicon@4x.png and /dev/null differ diff --git a/frontend/app/assets/favicon@5x.png b/frontend/app/assets/favicon@5x.png deleted file mode 100644 index 81593e9e2..000000000 Binary files a/frontend/app/assets/favicon@5x.png and /dev/null differ diff --git a/frontend/app/assets/favicon@6x.png b/frontend/app/assets/favicon@6x.png deleted file mode 100644 index dbf51e62c..000000000 Binary files a/frontend/app/assets/favicon@6x.png and /dev/null differ diff --git a/frontend/app/assets/index.html b/frontend/app/assets/index.html index 3147d2337..f90b87ff2 100644 --- a/frontend/app/assets/index.html +++ b/frontend/app/assets/index.html @@ -5,12 +5,9 @@ - - - - - - + + + diff --git a/frontend/app/components/BugFinder/CustomFilters/FilterItem.js b/frontend/app/components/BugFinder/CustomFilters/FilterItem.js index e929a53c4..8b60b601c 100644 --- a/frontend/app/components/BugFinder/CustomFilters/FilterItem.js +++ b/frontend/app/components/BugFinder/CustomFilters/FilterItem.js @@ -6,7 +6,7 @@ import cn from 'classnames'; const FilterItem = ({ className = '', icon, label, onClick }) => { return (
- + { icon && } { label }
); diff --git a/frontend/app/components/BugFinder/DateRange.js b/frontend/app/components/BugFinder/DateRange.js index 4f2ce8e22..60e98ffa1 100644 --- a/frontend/app/components/BugFinder/DateRange.js +++ b/frontend/app/components/BugFinder/DateRange.js @@ -1,5 +1,5 @@ import { connect } from 'react-redux'; -import { applyFilter, fetchList } from 'Duck/filters'; +import { applyFilter } from 'Duck/filters'; import { fetchList as fetchFunnelsList } from 'Duck/funnels'; import DateRangeDropdown from 'Shared/DateRangeDropdown'; @@ -8,11 +8,10 @@ import DateRangeDropdown from 'Shared/DateRangeDropdown'; startDate: state.getIn([ 'filters', 'appliedFilter', 'startDate' ]), endDate: state.getIn([ 'filters', 'appliedFilter', 'endDate' ]), }), { - applyFilter, fetchList, fetchFunnelsList + applyFilter, fetchFunnelsList }) export default class DateRange extends React.PureComponent { onDateChange = (e) => { - this.props.fetchList(e.rangeValue) this.props.fetchFunnelsList(e.rangeValue) this.props.applyFilter(e) } diff --git a/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js b/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js index c29cdd6a5..af5adf937 100644 --- a/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js +++ b/frontend/app/components/BugFinder/SessionsMenu/SessionsMenu.js @@ -3,14 +3,15 @@ import { connect } from 'react-redux'; import cn from 'classnames'; import { SideMenuitem, SavedSearchList, Progress, Popup } from 'UI' import stl from './sessionMenu.css'; -import { fetchList, fetchWatchdogStatus } from 'Duck/watchdogs'; +import { fetchWatchdogStatus } from 'Duck/watchdogs'; import { setActiveFlow, clearEvents } from 'Duck/filters'; import { setActiveTab } from 'Duck/sessions'; +import { issues_types } from 'Types/session/issue' function SessionsMenu(props) { const { activeFlow, activeTab, watchdogs = [], keyMap, wdTypeCount, - fetchList, fetchWatchdogStatus, toggleRehydratePanel } = props; + fetchWatchdogStatus, toggleRehydratePanel } = props; const onMenuItemClick = (filter) => { props.onMenuItemClick(filter) @@ -21,7 +22,6 @@ function SessionsMenu(props) { } useEffect(() => { - fetchList() fetchWatchdogStatus() }, []) @@ -62,7 +62,7 @@ function SessionsMenu(props) { /> - { watchdogs.filter(item => item.visible).map(item => ( + { issues_types.filter(item => item.visible).map(item => ( ({ - watchdogs: state.getIn(['watchdogs', 'list']).sortBy(i => i.order), activeTab: state.getIn([ 'sessions', 'activeTab' ]), keyMap: state.getIn([ 'sessions', 'keyMap' ]), wdTypeCount: state.getIn([ 'sessions', 'wdTypeCount' ]), activeFlow: state.getIn([ 'filters', 'activeFlow' ]), captureRate: state.getIn(['watchdogs', 'captureRate']), }), { - fetchList, fetchWatchdogStatus, setActiveFlow, clearEvents, setActiveTab + fetchWatchdogStatus, setActiveFlow, clearEvents, setActiveTab })(SessionsMenu); diff --git a/frontend/app/components/Client/Integrations/SlackAddForm/SlackAddForm.js b/frontend/app/components/Client/Integrations/SlackAddForm/SlackAddForm.js index 754146a36..16586fd1d 100644 --- a/frontend/app/components/Client/Integrations/SlackAddForm/SlackAddForm.js +++ b/frontend/app/components/Client/Integrations/SlackAddForm/SlackAddForm.js @@ -1,20 +1,22 @@ import React from 'react' import { connect } from 'react-redux' -import { edit, save, init } from 'Duck/integrations/slack' +import { edit, save, init, update } from 'Duck/integrations/slack' import { Form, Input, Button, Message } from 'UI' import { confirm } from 'UI/Confirmation'; import { remove } from 'Duck/integrations/slack' class SlackAddForm extends React.PureComponent { - componentWillUnmount() { this.props.init({}); } save = () => { - this.props.save(this.props.instance).then(function() { - - }) + const instance = this.props.instance; + if(instance.exists()) { + this.props.update(this.props.instance) + } else { + this.props.save(this.props.instance) + } } remove = async (id) => { @@ -102,4 +104,4 @@ export default connect(state => ({ instance: state.getIn(['slack', 'instance']), saving: state.getIn(['slack', 'saveRequest', 'loading']), errors: state.getIn([ 'slack', 'saveRequest', 'errors' ]), -}), { edit, save, init, remove })(SlackAddForm) \ No newline at end of file +}), { edit, save, init, remove, update })(SlackAddForm) \ No newline at end of file diff --git a/frontend/app/components/Client/Integrations/SlackChannelList/SlackChannelList.js b/frontend/app/components/Client/Integrations/SlackChannelList/SlackChannelList.js index 88529ce58..e854dfce2 100644 --- a/frontend/app/components/Client/Integrations/SlackChannelList/SlackChannelList.js +++ b/frontend/app/components/Client/Integrations/SlackChannelList/SlackChannelList.js @@ -1,7 +1,8 @@ import React from 'react' import { connect } from 'react-redux' -import { TextEllipsis, NoContent } from 'UI'; +import { NoContent } from 'UI'; import { remove, edit } from 'Duck/integrations/slack' +import DocLink from 'Shared/DocLink/DocLink'; function SlackChannelList(props) { const { list } = props; @@ -14,7 +15,12 @@ function SlackChannelList(props) { return (
+
Integrate Slack with OpenReplay and share insights with the rest of the team, directly from the recording page.
+ +
+ } size="small" show={ list.size === 0 } > @@ -24,21 +30,12 @@ function SlackChannelList(props) { className="border-t px-5 py-2 flex items-center justify-between cursor-pointer" onClick={() => onEdit(c)} > -
+
{c.name}
- - {c.endpoint} -
- } - /> +
+ {c.endpoint} +
- {/*
- -
*/} ))} diff --git a/frontend/app/components/Client/ManageUsers/ManageUsers.js b/frontend/app/components/Client/ManageUsers/ManageUsers.js index c8d1c633d..9f0a4244d 100644 --- a/frontend/app/components/Client/ManageUsers/ManageUsers.js +++ b/frontend/app/components/Client/ManageUsers/ManageUsers.js @@ -7,6 +7,7 @@ import styles from './manageUsers.css'; import UserItem from './UserItem'; import { confirm } from 'UI/Confirmation'; import { toast } from 'react-toastify'; +import BannerMessage from 'Shared/BannerMessage'; const PERMISSION_WARNING = 'You don’t have the permissions to perform this action.'; const LIMIT_WARNING = 'You have reached users limit.'; @@ -38,7 +39,7 @@ class ManageUsers extends React.PureComponent { } adminLabel = (user) => { - if (user.superAdmin) return 'Super Admin'; + if (user.superAdmin) return 'Owner'; return user.admin ? 'Admin' : ''; }; @@ -158,28 +159,37 @@ class ManageUsers extends React.PureComponent { onClose={ this.closeModal } />
-
- { !hideHeader &&

{ (isAdmin ? 'Manage ' : '') + 'Users' }

} - { hideHeader &&

{ `Team Size ${members.size}` }

} - - this.init() } - /> -
+
+
+ { !hideHeader &&

{ (isAdmin ? 'Manage ' : '') + 'Users' }

} + { hideHeader &&

{ `Team Size ${members.size}` }

} + + this.init() } + /> +
+ } + // disabled={ canAddUsers } + content={ `${ !canAddUsers ? (!isAdmin ? PERMISSION_WARNING : LIMIT_WARNING) : 'Add team member' }` } + size="tiny" + inverted + position="top left" + /> +
+
+ { !account.smtp && + + Inviting new users require email messaging. Please setup SMTP. + } - // disabled={ canAddUsers } - content={ `${ !canAddUsers ? (!isAdmin ? PERMISSION_WARNING : LIMIT_WARNING) : 'Add team member' }` } - size="tiny" - inverted - position="top left" - /> +
setTab(CLIENT_TABS.NOTIFICATIONS) } /> diff --git a/frontend/app/components/Client/ProfileSettings/OptOut.js b/frontend/app/components/Client/ProfileSettings/OptOut.js index dea675a60..6e4643d7b 100644 --- a/frontend/app/components/Client/ProfileSettings/OptOut.js +++ b/frontend/app/components/Client/ProfileSettings/OptOut.js @@ -6,7 +6,7 @@ import { updateClient } from 'Duck/user' function OptOut(props) { const { optOut } = props; const onChange = () => { - props.updateClient({ optOut: !optOut, name: 'OpenReplay' }) + props.updateClient({ optOut: !optOut }) } return (
diff --git a/frontend/app/components/Dashboard/Widgets/SessionsPerBrowser/Bar.css b/frontend/app/components/Dashboard/Widgets/SessionsPerBrowser/Bar.css index cf1b14578..dde6009e4 100644 --- a/frontend/app/components/Dashboard/Widgets/SessionsPerBrowser/Bar.css +++ b/frontend/app/components/Dashboard/Widgets/SessionsPerBrowser/Bar.css @@ -1,5 +1,5 @@ .bar { - height: 10px; + height: 5px; width: 100%; border-radius: 3px; display: flex; diff --git a/frontend/app/components/Errors/Error/ErrorInfo.js b/frontend/app/components/Errors/Error/ErrorInfo.js index c9681f25d..4726bc613 100644 --- a/frontend/app/components/Errors/Error/ErrorInfo.js +++ b/frontend/app/components/Errors/Error/ErrorInfo.js @@ -18,7 +18,7 @@ import SideSection from './SideSection'; export default class ErrorInfo extends React.PureComponent { ensureInstance() { const { errorId, loading, errorOnFetch } = this.props; - if (!loading && !errorOnFetch && + if (!loading && this.props.errorIdInStore !== errorId && errorId != null) { this.props.fetch(errorId); diff --git a/frontend/app/components/Funnels/FunnelDetails/FunnelDetails.js b/frontend/app/components/Funnels/FunnelDetails/FunnelDetails.js index 18702f5aa..0a8a997dd 100644 --- a/frontend/app/components/Funnels/FunnelDetails/FunnelDetails.js +++ b/frontend/app/components/Funnels/FunnelDetails/FunnelDetails.js @@ -34,9 +34,9 @@ const FunnelDetails = (props) => { useEffect(() => { if (funnels.size === 0) { - props.fetchList(); - props.fetchIssueTypes() + props.fetchList(); } + props.fetchIssueTypes() props.fetch(funnelId).then(() => { setMounted(true); diff --git a/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js b/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js index a747c9905..b7d140b1b 100644 --- a/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js +++ b/frontend/app/components/Funnels/FunnelHeader/FunnelHeader.js @@ -108,6 +108,7 @@ const FunnelHeader = (props) => { startDate={funnelFilters.startDate} endDate={funnelFilters.endDate} onDateChange={onDateChange} + customRangeRight />
diff --git a/frontend/app/components/Header/Discover/featureItem.css b/frontend/app/components/Header/Discover/featureItem.css index 0c0d54b9c..d434c8f91 100644 --- a/frontend/app/components/Header/Discover/featureItem.css +++ b/frontend/app/components/Header/Discover/featureItem.css @@ -1,5 +1,5 @@ .wrapper { - padding: 10px 0; + padding: 7px 0; } .checkbox { diff --git a/frontend/app/components/Header/OnboardingExplore/FeatureItem.js b/frontend/app/components/Header/OnboardingExplore/FeatureItem.js index bbc31b3ad..cbb0c3472 100644 --- a/frontend/app/components/Header/OnboardingExplore/FeatureItem.js +++ b/frontend/app/components/Header/OnboardingExplore/FeatureItem.js @@ -6,7 +6,7 @@ import stl from './featureItem.css'; const FeatureItem = ({ label, completed = false, subText, onClick }) => { return (
diff --git a/frontend/app/components/Header/OnboardingExplore/OnboardingExplore.js b/frontend/app/components/Header/OnboardingExplore/OnboardingExplore.js index a6893fc17..c6d7aa179 100644 --- a/frontend/app/components/Header/OnboardingExplore/OnboardingExplore.js +++ b/frontend/app/components/Header/OnboardingExplore/OnboardingExplore.js @@ -121,7 +121,7 @@ class OnboardingExplore extends React.PureComponent {
- Follow the steps below to complete this project setup and make the best out of OpenReplay. + Make the best out of OpenReplay by completing your project setup:
@@ -131,7 +131,7 @@ class OnboardingExplore extends React.PureComponent { key={ task.task } label={ task.task } completed={ task.done } - onClick={task.URL && (() => this.onClick(task)) } + onClick={() => this.onClick(task) } /> ))}
diff --git a/frontend/app/components/Header/OnboardingExplore/featureItem.css b/frontend/app/components/Header/OnboardingExplore/featureItem.css index e0b005408..b0fe2dbb9 100644 --- a/frontend/app/components/Header/OnboardingExplore/featureItem.css +++ b/frontend/app/components/Header/OnboardingExplore/featureItem.css @@ -1,5 +1,5 @@ .wrapper { - padding: 10px 0; + padding: 6px 0; display: flex; align-items: center; } diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index f79b2fc05..bd619bae5 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -63,7 +63,7 @@ export default class Login extends React.Component {

Login to OpenReplay

- { tenants.length === 0 &&
Don't have an account?Sign up
} + { tenants.length === 0 &&
Don't have an account? Sign up
}
{ window.ENV.CAPTCHA_ENABLED && ( diff --git a/frontend/app/components/Onboarding/components/OnboardingNavButton/OnboardingNavButton.js b/frontend/app/components/Onboarding/components/OnboardingNavButton/OnboardingNavButton.js index 0a048e3cb..d9d24c7df 100644 --- a/frontend/app/components/Onboarding/components/OnboardingNavButton/OnboardingNavButton.js +++ b/frontend/app/components/Onboarding/components/OnboardingNavButton/OnboardingNavButton.js @@ -5,6 +5,7 @@ import { Button } from 'UI' import { OB_TABS, onboarding as onboardingRoute } from 'App/routes' import * as routes from '../../../../routes' import { sessions } from 'App/routes'; +import { setOnboarding } from 'Duck/user'; const withSiteId = routes.withSiteId; const MENU_ITEMS = [OB_TABS.INSTALLING, OB_TABS.IDENTIFY_USERS, OB_TABS.MANAGE_USERS, OB_TABS.INTEGRATIONS] @@ -25,9 +26,14 @@ const OnboardingNavButton = (props) => { const tab = MENU_ITEMS[activeIndex+1] history.push(withSiteId(onboardingRoute(tab), siteId)); } else { - history.push(sessions()); + onDone() } } + + const onDone = () => { + props.setOnboarding(false); + history.push(sessions()); + } return ( <> @@ -35,7 +41,7 @@ const OnboardingNavButton = (props) => { primary size="small" plain - onClick={() => history.push(sessions())} + onClick={onDone} > {activeIndex === 0 ? 'Done. See Recorded Sessions' : 'Skip Optional Steps and See Recorded Sessions'} @@ -53,4 +59,4 @@ const OnboardingNavButton = (props) => { ) } -export default withRouter(OnboardingNavButton) \ No newline at end of file +export default withRouter(connect(null, { setOnboarding })(OnboardingNavButton)) \ No newline at end of file diff --git a/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js b/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js index a093c4f94..755718ef0 100644 --- a/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js +++ b/frontend/app/components/Onboarding/components/OnboardingTabs/ProjectCodeSnippet/ProjectCodeSnippet.js @@ -28,9 +28,10 @@ const codeSnippet = ` r.setMetadata=function(k,v){r.push([4,k,v])}; r.event=function(k,p,i){r.push([5,k,p,i])}; r.issue=function(k,p){r.push([6,k,p])}; - r.isActive=r.active=function(){return false}; - r.getSessionToken=r.sessionID=function(){}; -})(0,PROJECT_HASH,"//${window.location.hostname}/static/openreplay.js",1,XXX); + r.isActive=function(){return false}; + r.getSessionToken=function(){}; + r.i="https://${window.location.hostname}/ingest"; +})(0, "PROJECT_KEY", "//static.openreplay.com/${window.ENV.TRACKER_VERSION}/openreplay.js",1,XXX); `; diff --git a/frontend/app/components/Session_/Issues/IssueDetails.js b/frontend/app/components/Session_/Issues/IssueDetails.js index 111dd15fe..f91f0ad73 100644 --- a/frontend/app/components/Session_/Issues/IssueDetails.js +++ b/frontend/app/components/Session_/Issues/IssueDetails.js @@ -14,9 +14,9 @@ class IssueDetails extends React.PureComponent { write = (e, { name, value }) => this.setState({ [ name ]: value }); render() { - const { sessionId, issue, loading, users, issueTypeIcons, provider } = this.props; + const { sessionId, issue, loading, users, issueTypeIcons, issuesIntegration } = this.props; const activities = issue.activities; - + const provider = issuesIntegration.provider; const assignee = users.filter(({id}) => issue.assignee === id).first(); return ( @@ -53,5 +53,5 @@ export default connect(state => ({ users: state.getIn(['assignments', 'users']), loading: state.getIn(['assignments', 'fetchAssignment', 'loading']), issueTypeIcons: state.getIn(['assignments', 'issueTypeIcons']), - provider: state.getIn([ 'issues', 'list']).provider, + issuesIntegration: state.getIn([ 'issues', 'list']).first() || {}, }))(IssueDetails); diff --git a/frontend/app/components/Session_/Issues/IssueForm.js b/frontend/app/components/Session_/Issues/IssueForm.js index f305c3ee7..81dda0504 100644 --- a/frontend/app/components/Session_/Issues/IssueForm.js +++ b/frontend/app/components/Session_/Issues/IssueForm.js @@ -7,7 +7,8 @@ import { addActivity, init, edit, fetchAssignments, fetchMeta } from 'Duck/assig const SelectedValue = ({ icon, text }) => { return(
- + {/* */} + { icon } { text }
) @@ -37,7 +38,7 @@ class IssueForm extends React.PureComponent { addActivity(sessionId, instance).then(() => { const { errors } = this.props; - if (errors.length === 0) { + if (!errors || errors.length === 0) { this.props.init({projectId: instance.projectId}); this.props.fetchAssignments(sessionId); this.props.closeHandler(); @@ -52,8 +53,9 @@ class IssueForm extends React.PureComponent { const { creating, projects, users, issueTypes, instance, closeHandler, metaLoading } = this.props; const projectOptions = projects.map(({name, id}) => ({text: name, value: id })).toArray(); const userOptions = users.map(({name, id}) => ({text: name, value: id })).toArray(); - const issueTypeOptions = issueTypes.map(({name, id, iconUrl }) => { - return {text: name, value: id, iconUrl, icon: } + + const issueTypeOptions = issueTypes.map(({name, id, iconUrl, color }) => { + return {text: name, value: id, iconUrl, color } }).toArray(); const selectedIssueType = issueTypes.filter(issue => issue.id == instance.issueType).first(); @@ -80,6 +82,7 @@ class IssueForm extends React.PureComponent { { {/* */} {/* */}
- + { typeIcon } + {/* */} { issue.id } {/*
{ '@ 00:13 Secs'}
*/} { assignee && diff --git a/frontend/app/components/Session_/Issues/IssueListItem.js b/frontend/app/components/Session_/Issues/IssueListItem.js index 9145ffaa5..51b5bb25c 100644 --- a/frontend/app/components/Session_/Issues/IssueListItem.js +++ b/frontend/app/components/Session_/Issues/IssueListItem.js @@ -11,7 +11,8 @@ const IssueListItem = ({ issue, onClick, icon, user, active }) => { >
- + { icon } + {/* */} { issue.id }
diff --git a/frontend/app/components/Session_/Issues/Issues.js b/frontend/app/components/Session_/Issues/Issues.js index 66fc80bc8..bba3eaf40 100644 --- a/frontend/app/components/Session_/Issues/Issues.js +++ b/frontend/app/components/Session_/Issues/Issues.js @@ -21,7 +21,7 @@ import stl from './issues.css'; fetchIssueLoading: state.getIn(['assignments', 'fetchAssignment', 'loading']), fetchIssuesLoading: state.getIn(['assignments', 'fetchAssignments', 'loading']), projectsLoading: state.getIn(['assignments', 'fetchProjects', 'loading']), - provider: state.getIn([ 'issues', 'list']).provider, + issuesIntegration: state.getIn([ 'issues', 'list']).first() || {}, }), { fetchAssigment, fetchAssignments, fetchMeta, fetchProjects }) @withToggle('isModalDisplayed', 'toggleModal') class Issues extends React.Component { @@ -64,9 +64,10 @@ class Issues extends React.Component { render() { const { sessionId, activeIssue, isModalDisplayed, projectsLoading, - fetchIssueLoading, issues, metaLoading, fetchIssuesLoading, provider + fetchIssueLoading, issues, metaLoading, fetchIssuesLoading, issuesIntegration } = this.props; const { showModal } = this.state; + const provider = issuesIntegration.provider return (
diff --git a/frontend/app/components/Session_/Network/Network.js b/frontend/app/components/Session_/Network/Network.js index b3ea3dafb..4c30e6f32 100644 --- a/frontend/app/components/Session_/Network/Network.js +++ b/frontend/app/components/Session_/Network/Network.js @@ -158,30 +158,30 @@ export default class Network extends React.PureComponent { let filtered = resources.filter(({ type, name }) => filterRE.test(name) && (activeTab === ALL || type === TAB_TO_TYPE_MAP[ activeTab ])); - const referenceLines = []; - if (domContentLoadedTime != null) { - referenceLines.push({ - time: domContentLoadedTime, - color: DOM_LOADED_TIME_COLOR, - }) - } - if (loadTime != null) { - referenceLines.push({ - time: loadTime, - color: LOAD_TIME_COLOR, - }) - } - - let tabs = TABS; - if (!fetchPresented) { - tabs = TABS.map(tab => tab.key === XHR - ? { - text: renderXHRText(), - key: XHR, - } - : tab - ); - } +// const referenceLines = []; +// if (domContentLoadedTime != null) { +// referenceLines.push({ +// time: domContentLoadedTime, +// color: DOM_LOADED_TIME_COLOR, +// }) +// } +// if (loadTime != null) { +// referenceLines.push({ +// time: loadTime, +// color: LOAD_TIME_COLOR, +// }) +// } +// +// let tabs = TABS; +// if (!fetchPresented) { +// tabs = TABS.map(tab => tab.key === XHR +// ? { +// text: renderXHRText(), +// key: XHR, +// } +// : tab +// ); +// } const resourcesSize = filtered.reduce((sum, { decodedBodySize }) => sum + (decodedBodySize || 0), 0); const transferredSize = filtered diff --git a/frontend/app/components/Session_/Network/NetworkContent.js b/frontend/app/components/Session_/Network/NetworkContent.js index 1f10eaffe..7eeebf538 100644 --- a/frontend/app/components/Session_/Network/NetworkContent.js +++ b/frontend/app/components/Session_/Network/NetworkContent.js @@ -168,13 +168,13 @@ export default class NetworkContent extends React.PureComponent { const referenceLines = []; if (domContentLoadedTime != null) { referenceLines.push({ - time: domContentLoadedTime, + time: domContentLoadedTime.time, color: DOM_LOADED_TIME_COLOR, }) } if (loadTime != null) { referenceLines.push({ - time: loadTime, + time: loadTime.time, color: LOAD_TIME_COLOR, }) } @@ -239,13 +239,13 @@ export default class NetworkContent extends React.PureComponent { /> diff --git a/frontend/app/components/Session_/Player/Controls/Timeline.js b/frontend/app/components/Session_/Player/Controls/Timeline.js index aeab1af64..3589be148 100644 --- a/frontend/app/components/Session_/Player/Controls/Timeline.js +++ b/frontend/app/components/Session_/Player/Controls/Timeline.js @@ -18,12 +18,14 @@ const getPointerIcon = (type) => { case 'log': return 'funnel/exclamation-circle'; case 'stack': - return 'funnel/file-exclamation'; + return 'funnel/patch-exclamation-fill'; case 'resource': return 'funnel/file-medical-alt'; case 'dead_click': return 'funnel/dizzy'; + case 'click_rage': + return 'funnel/dizzy'; case 'excessive_scrolling': return 'funnel/mouse'; case 'bad_request': @@ -61,6 +63,7 @@ const getPointerIcon = (type) => { fetchList: state.fetchList, })) @connect(state => ({ + issues: state.getIn([ 'sessions', 'current', 'issues' ]), showDevTools: state.getIn([ 'user', 'account', 'appearance', 'sessionsDevtools' ]), clickRageTime: state.getIn([ 'sessions', 'current', 'clickRage' ]) && state.getIn([ 'sessions', 'current', 'clickRageTime' ]), @@ -95,6 +98,7 @@ export default class Timeline extends React.PureComponent { clickRageTime, stackList, fetchList, + issues } = this.props; const scale = 100 / endTime; @@ -124,6 +128,28 @@ export default class Timeline extends React.PureComponent { /> )) } + { + issues.map(iss => ( +
+ + { iss.name } +
+ } + /> +
+ )) + } { events.filter(e => e.type === TYPES.CLICKRAGE).map(e => (
- { "Exception:" } + { "Exception" }
{ e.message }
@@ -278,7 +304,7 @@ export default class Timeline extends React.PureComponent { icon={getPointerIcon('log')} content={
- { "Console:" } + { "Console" }
{ l.value }
@@ -380,7 +406,7 @@ export default class Timeline extends React.PureComponent { icon={getPointerIcon('fetch')} content={
- { "Failed Fetch:" } + { "Failed Fetch" }
{ e.name }
@@ -421,7 +447,7 @@ export default class Timeline extends React.PureComponent { icon={getPointerIcon('stack')} content={
- { "Stack Event:" } + { "Stack Event" }
{ e.name }
diff --git a/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js b/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js index dc58d8b81..93a901f0a 100644 --- a/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js +++ b/frontend/app/components/Session_/StackEvents/UserEvent/UserEvent.js @@ -46,7 +46,7 @@ export default class UserEvent extends React.PureComponent { case STACKDRIVER: return ; default: - return ; + return ; } } diff --git a/frontend/app/components/Signup/SignupForm/SignupForm.js b/frontend/app/components/Signup/SignupForm/SignupForm.js index b5ea500f3..0a5de9507 100644 --- a/frontend/app/components/Signup/SignupForm/SignupForm.js +++ b/frontend/app/components/Signup/SignupForm/SignupForm.js @@ -137,7 +137,7 @@ export default class SignupForm extends React.Component {
-
By creating an account, you agree to our Terms of Service and Privacy Policy
+
By creating an account, you agree to our Terms of Service and Privacy Policy.
diff --git a/frontend/app/components/shared/BannerMessage/BannerMessage.js b/frontend/app/components/shared/BannerMessage/BannerMessage.js new file mode 100644 index 000000000..d1d66b991 --- /dev/null +++ b/frontend/app/components/shared/BannerMessage/BannerMessage.js @@ -0,0 +1,28 @@ +import React from 'react' +import { Icon } from 'UI' + +const BannerMessage= (props) => { + const { icon = 'info-circle', children } = props; + + return ( + <> +
+
+
+
+ +
+
+ {children} +
+
+
+
+ + ) +} + +export default BannerMessage; \ No newline at end of file diff --git a/frontend/app/components/shared/BannerMessage/index.js b/frontend/app/components/shared/BannerMessage/index.js new file mode 100644 index 000000000..4d6ad92b8 --- /dev/null +++ b/frontend/app/components/shared/BannerMessage/index.js @@ -0,0 +1 @@ +export { default } from './BannerMessage' \ No newline at end of file diff --git a/frontend/app/components/shared/DateRange.js b/frontend/app/components/shared/DateRange.js index 83feae0d8..7b627ab28 100644 --- a/frontend/app/components/shared/DateRange.js +++ b/frontend/app/components/shared/DateRange.js @@ -2,7 +2,7 @@ import { connect } from 'react-redux'; import DateRangeDropdown from 'Shared/DateRangeDropdown'; function DateRange (props) { - const { startDate, endDate, rangeValue, className, onDateChange } = props; + const { startDate, endDate, rangeValue, className, onDateChange, customRangeRight=false } = props; return ( ); } diff --git a/frontend/app/components/shared/DateRangeDropdown/DateRangeDropdown.js b/frontend/app/components/shared/DateRangeDropdown/DateRangeDropdown.js index 4165506d4..f29d47745 100644 --- a/frontend/app/components/shared/DateRangeDropdown/DateRangeDropdown.js +++ b/frontend/app/components/shared/DateRangeDropdown/DateRangeDropdown.js @@ -66,7 +66,7 @@ export default class DateRangeDropdown extends React.PureComponent { } render() { - const { button = false, className, direction = 'right', customHidden=false, show30Minutes=false } = this.props; + const { customRangeRight, button = false, className, direction = 'right', customHidden=false, show30Minutes=false } = this.props; const { showDateRangePopup, value, range } = this.state; let options = getDateRangeOptions(range); @@ -108,7 +108,7 @@ export default class DateRangeDropdown extends React.PureComponent { { showDateRangePopup && -
+
{ @@ -9,7 +9,10 @@ export default function DocLink({ className = '', url, label }) { return (
) diff --git a/frontend/app/components/shared/IntegrateSlackButton/IntegrateSlackButton.js b/frontend/app/components/shared/IntegrateSlackButton/IntegrateSlackButton.js new file mode 100644 index 000000000..c308f33bf --- /dev/null +++ b/frontend/app/components/shared/IntegrateSlackButton/IntegrateSlackButton.js @@ -0,0 +1,26 @@ +import React from 'react' +import { connect } from 'react-redux' +import { IconButton } from 'UI' +import { CLIENT_TABS, client as clientRoute } from 'App/routes'; +import { withRouter } from 'react-router-dom'; + +function IntegrateSlackButton({ history, tenantId }) { + const gotoPreferencesIntegrations = () => { + history.push(clientRoute(CLIENT_TABS.INTEGRATIONS)); + } + + return ( +
+ +
+ ) +} + +export default withRouter(connect(state => ({ + tenantId: state.getIn([ 'user', 'client', 'tenantId' ]), +}))(IntegrateSlackButton)) diff --git a/frontend/app/components/shared/IntegrateSlackButton/index.js b/frontend/app/components/shared/IntegrateSlackButton/index.js new file mode 100644 index 000000000..f2f8f2e16 --- /dev/null +++ b/frontend/app/components/shared/IntegrateSlackButton/index.js @@ -0,0 +1 @@ +export { default } from './IntegrateSlackButton' \ No newline at end of file diff --git a/frontend/app/components/shared/NoSessionsMessage/NoSessionsMessage.js b/frontend/app/components/shared/NoSessionsMessage/NoSessionsMessage.js index a3011db44..cee55088b 100644 --- a/frontend/app/components/shared/NoSessionsMessage/NoSessionsMessage.js +++ b/frontend/app/components/shared/NoSessionsMessage/NoSessionsMessage.js @@ -23,7 +23,7 @@ const NoSessionsMessage= (props) => {
- It takes a few minutes for first recordings to appear. All set but they are still not showing up? Check our troubleshooting section. + It takes a few minutes for first recordings to appear. All set but they are still not showing up? Check our troubleshooting section.
diff --git a/frontend/app/components/shared/SharePopup/SharePopup.js b/frontend/app/components/shared/SharePopup/SharePopup.js index 845229034..347a95733 100644 --- a/frontend/app/components/shared/SharePopup/SharePopup.js +++ b/frontend/app/components/shared/SharePopup/SharePopup.js @@ -4,6 +4,7 @@ import withRequest from 'HOCs/withRequest'; import { Popup, Dropdown, Icon, IconButton } from 'UI'; import { pause } from 'Player'; import styles from './sharePopup.css'; +import IntegrateSlackButton from '../IntegrateSlackButton/IntegrateSlackButton'; @connect(state => ({ channels: state.getIn([ 'slack', 'list' ]), @@ -18,7 +19,7 @@ export default class SharePopup extends React.PureComponent { state = { comment: '', isOpen: false, - channelId: this.props.channels.getIn([ 0, 'id' ]), + channelId: this.props.channels.getIn([ 0, 'webhookId' ]), } editMessage = e => this.setState({ comment: e.target.value }) @@ -45,10 +46,10 @@ export default class SharePopup extends React.PureComponent { changeChannel = (e, { value }) => this.setState({ channelId: value }) render() { - const { trigger, loading, channels, tenantId } = this.props; + const { trigger, loading, channels } = this.props; const { comment, isOpen, channelId } = this.state; - const options = channels.map(({ id, name }) => ({ value: id, text: name })).toJS(); + const options = channels.map(({ webhookId, name }) => ({ value: webhookId, text: name })).toJS(); return ( { options.length === 0 ?
- - - +
:
diff --git a/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js b/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js index bacb71bfe..350378cf4 100644 --- a/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js +++ b/frontend/app/components/shared/TrackingCodeModal/ProjectCodeSnippet/ProjectCodeSnippet.js @@ -27,9 +27,10 @@ const codeSnippet = ` r.setMetadata=function(k,v){r.push([4,k,v])}; r.event=function(k,p,i){r.push([5,k,p,i])}; r.issue=function(k,p){r.push([6,k,p])}; - r.isActive=r.active=function(){return false}; - r.getSessionToken=r.sessionID=function(){}; -})(0,PROJECT_HASH,"//${window.location.hostname}/static/openreplay.js",1,XXX); + r.isActive=function(){return false}; + r.getSessionToken=function(){}; + r.i="https://${window.location.hostname}/ingest"; +})(0, "PROJECT_KEY", "//static.openreplay.com/${window.ENV.TRACKER_VERSION}/openreplay.js",1,XXX); `; diff --git a/frontend/app/components/ui/ErrorDetails/ErrorDetails.js b/frontend/app/components/ui/ErrorDetails/ErrorDetails.js index 68f48cb82..2a6afdd1e 100644 --- a/frontend/app/components/ui/ErrorDetails/ErrorDetails.js +++ b/frontend/app/components/ui/ErrorDetails/ErrorDetails.js @@ -4,7 +4,7 @@ import cn from 'classnames'; import { IconButton, Icon } from 'UI'; import { connect } from 'react-redux'; -const docLink = 'https://docs.openreplay.com/plugins/sourcemaps'; +const docLink = 'https://docs.openreplay.com/installation/upload-sourcemaps'; function ErrorDetails({ className, name = "Error", message, errorStack, sourcemapUploaded }) { const [showRaw, setShowRaw] = useState(false) diff --git a/frontend/app/duck/assignments.js b/frontend/app/duck/assignments.js index c6c7d3cda..b48e2ff45 100644 --- a/frontend/app/duck/assignments.js +++ b/frontend/app/duck/assignments.js @@ -5,6 +5,7 @@ import withRequestState, { RequestTypes } from './requestStateCreator'; import { createListUpdater, createItemInListUpdater } from './funcTools/tools'; import { editType, initType } from './funcTools/crud/types'; import { createInit, createEdit } from './funcTools/crud'; +import IssuesType from 'Types/issue/issuesType' const idKey = 'id'; const name = 'assignment'; @@ -41,17 +42,20 @@ const reducer = (state = initialState, action = {}) => { return state.mergeIn([ 'instance' ], action.instance); case FETCH_PROJECTS.SUCCESS: return state.set('projects', List(action.data)).set('projectsFetched', true); - case FETCH_ASSIGNMENTS.SUCCESS: - return state.set('list', List(action.data).map(Assignment)); + case FETCH_ASSIGNMENTS.SUCCESS: + return state.set('list', List(action.data.issues).map(Assignment)); case FETCH_ASSIGNMENT.SUCCESS: return state.set('activeIssue', Assignment({ ...action.data, users})); case FETCH_META.SUCCESS: - issueTypes = action.data.issueTypes; + issueTypes = List(action.data.issueTypes).map(IssuesType); var issueTypeIcons = {} - for (var i =0; i < issueTypes.length; i++) { - issueTypeIcons[issueTypes[i].id] = issueTypes[i].iconUrl - } - return state.set('issueTypes', List(issueTypes)) + // for (var i =0; i < issueTypes.length; i++) { + // issueTypeIcons[issueTypes[i].id] = issueTypes[i].iconUrl + // } + issueTypes.forEach(iss => { + issueTypeIcons[iss.id] = iss.iconUrl + }) + return state.set('issueTypes', issueTypes) .set('users', List(action.data.users)) .set('issueTypeIcons', issueTypeIcons) case ADD_ACTIVITY.SUCCESS: diff --git a/frontend/app/duck/integrations/slack.js b/frontend/app/duck/integrations/slack.js index 1d59bc16b..e4c2803ff 100644 --- a/frontend/app/duck/integrations/slack.js +++ b/frontend/app/duck/integrations/slack.js @@ -4,6 +4,7 @@ import Config from 'Types/integrations/slackConfig'; import { createItemInListUpdater } from '../funcTools/tools'; const SAVE = new RequestTypes('slack/SAVE'); +const UPDATE = new RequestTypes('slack/UPDATE'); const REMOVE = new RequestTypes('slack/REMOVE'); const FETCH_LIST = new RequestTypes('slack/FETCH_LIST'); const EDIT = 'slack/EDIT'; @@ -20,6 +21,7 @@ const reducer = (state = initialState, action = {}) => { switch (action.type) { case FETCH_LIST.SUCCESS: return state.set('list', List(action.data).map(Config)); + case UPDATE.SUCCESS: case SAVE.SUCCESS: const config = Config(action.data); return state @@ -57,6 +59,13 @@ export function save(instance) { }; } +export function update(instance) { + return { + types: UPDATE.toArray(), + call: client => client.put(`/integrations/slack/${instance.webhookId}`, instance.toData()), + }; +} + export function edit(instance) { return { type: EDIT, diff --git a/frontend/app/duck/user.js b/frontend/app/duck/user.js index f7ed2b1a2..deb41e715 100644 --- a/frontend/app/duck/user.js +++ b/frontend/app/duck/user.js @@ -19,6 +19,7 @@ const PUT_CLIENT = new RequestTypes('user/PUT_CLIENT'); const PUSH_NEW_SITE = 'user/PUSH_NEW_SITE'; const SET_SITE_ID = 'user/SET_SITE_ID'; +const SET_ONBOARDING = 'user/SET_ONBOARDING'; const SITE_ID_STORAGE_KEY = "__$user-siteId$__"; const storedSiteId = localStorage.getItem(SITE_ID_STORAGE_KEY); @@ -29,7 +30,8 @@ const initialState = Map({ siteId: null, passwordRequestError: false, passwordErrors: List(), - tenants: [] + tenants: [], + onboarding: false }); const setClient = (state, data) => { @@ -47,17 +49,21 @@ const setClient = (state, data) => { const reducer = (state = initialState, action = {}) => { switch (action.type) { - case SIGNUP.SUCCESS: + case UPDATE_PASSWORD.SUCCESS: case LOGIN.SUCCESS: return setClient( state.set('account', Account(action.data.user)), action.data.client, ); + case SIGNUP.SUCCESS: + return setClient( + state.set('account', Account(action.data.user)), + action.data.client, + ).set('onboarding', true); case REQUEST_RESET_PASSWORD.SUCCESS: break; case UPDATE_APPEARANCE.REQUEST: //TODO: failure handling return state.mergeIn([ 'account', 'appearance' ], action.appearance) - case UPDATE_PASSWORD.SUCCESS: case UPDATE_ACCOUNT.SUCCESS: case FETCH_ACCOUNT.SUCCESS: return state.set('account', Account(action.data)).set('passwordErrors', List()); @@ -77,6 +83,8 @@ const reducer = (state = initialState, action = {}) => { case PUSH_NEW_SITE: return state.updateIn([ 'client', 'sites' ], list => list.push(action.newSite)); + case SET_ONBOARDING: + return state.set('onboarding', action.state) } return state; }; @@ -187,3 +195,11 @@ export function pushNewSite(newSite) { newSite, }; } + +export function setOnboarding(state = false) { + return { + type: SET_ONBOARDING, + state + }; +} + diff --git a/frontend/app/player/MessageDistributor/MessageDistributor.js b/frontend/app/player/MessageDistributor/MessageDistributor.js index fa3641b7a..c21d54ccb 100644 --- a/frontend/app/player/MessageDistributor/MessageDistributor.js +++ b/frontend/app/player/MessageDistributor/MessageDistributor.js @@ -10,7 +10,7 @@ import ReduxAction from 'Types/session/reduxAction'; import { update } from '../store'; import { - init as initLists, + init as initListsDepr, append as listAppend, setStartTime as setListsStartTime } from '../lists'; @@ -43,6 +43,14 @@ export const INITIAL_STATE = { skipIntervals: [], } +function initLists() { + const lists = {}; + for (var i = 0; i < LIST_NAMES.length; i++) { + lists[ LIST_NAMES[i] ] = new ListWalker(); + } + return lists; +} + import type { Message, @@ -78,16 +86,7 @@ export default class MessageDistributor extends StatedScreen { #scrollManager: ListWalker = new ListWalker(); #decoder = new Decoder(); - #lists = { - redux: new ListWalker(), - mobx: new ListWalker(), - vuex: new ListWalker(), - ngrx: new ListWalker(), - graphql: new ListWalker(), - exceptions: new ListWalker(), - profiles: new ListWalker(), - longtasks: new ListWalker(), - } + #lists = initLists(); #activirtManager: ActivityManager; @@ -106,7 +105,7 @@ export default class MessageDistributor extends StatedScreen { /* == REFACTOR_ME == */ const eventList = sess.events.toJSON(); - initLists({ + initListsDepr({ event: eventList, stack: sess.stackEvents.toJSON(), resource: sess.resources.toJSON(), @@ -236,10 +235,16 @@ export default class MessageDistributor extends StatedScreen { const llEvent = this.#locationEventManager.moveToLast(t, index); if (!!llEvent) { if (llEvent.domContentLoadedTime != null) { - stateToUpdate.domContentLoadedTime = llEvent.domContentLoadedTime + this.#navigationStartOffset; + stateToUpdate.domContentLoadedTime = { + time: llEvent.domContentLoadedTime + this.#navigationStartOffset, //TODO: predefined list of load event for the network tab (merge events & setLocation: add navigationStart to db) + value: llEvent.domContentLoadedTime, + } } if (llEvent.loadTime != null) { - stateToUpdate.loadTime = llEvent.domContentLoadedTime + this.#navigationStartOffset + stateToUpdate.loadTime = { + time: llEvent.loadTime + this.#navigationStartOffset, + value: llEvent.loadTime, + } } if (llEvent.domBuildingTime != null) { stateToUpdate.domBuildingTime = llEvent.domBuildingTime; diff --git a/frontend/app/svg/icons/funnel/cpu.svg b/frontend/app/svg/icons/funnel/cpu.svg new file mode 100644 index 000000000..f4922a33f --- /dev/null +++ b/frontend/app/svg/icons/funnel/cpu.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/funnel/dizzy.svg b/frontend/app/svg/icons/funnel/dizzy.svg new file mode 100644 index 000000000..4f026cd64 --- /dev/null +++ b/frontend/app/svg/icons/funnel/dizzy.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/app/svg/icons/funnel/emoji-angry.svg b/frontend/app/svg/icons/funnel/emoji-angry.svg new file mode 100644 index 000000000..e9c147cb9 --- /dev/null +++ b/frontend/app/svg/icons/funnel/emoji-angry.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/funnel/file-earmark-break.svg b/frontend/app/svg/icons/funnel/file-earmark-break.svg new file mode 100644 index 000000000..244e6b211 --- /dev/null +++ b/frontend/app/svg/icons/funnel/file-earmark-break.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/funnel/image.svg b/frontend/app/svg/icons/funnel/image.svg new file mode 100644 index 000000000..36bd4649f --- /dev/null +++ b/frontend/app/svg/icons/funnel/image.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/frontend/app/svg/icons/funnel/sd-card.svg b/frontend/app/svg/icons/funnel/sd-card.svg index 8d4991cb1..4e55e699b 100644 --- a/frontend/app/svg/icons/funnel/sd-card.svg +++ b/frontend/app/svg/icons/funnel/sd-card.svg @@ -1 +1,4 @@ - \ No newline at end of file + + + + \ No newline at end of file diff --git a/frontend/app/types/account/account.js b/frontend/app/types/account/account.js index 42b0e9ac3..fa672dfcc 100644 --- a/frontend/app/types/account/account.js +++ b/frontend/app/types/account/account.js @@ -10,6 +10,7 @@ export default Member.extend({ banner: undefined, email: '', verifiedEmail: undefined, + smtp: false, license: '', expirationDate: undefined, }, { diff --git a/frontend/app/types/integrations/issueTracker.js b/frontend/app/types/integrations/issueTracker.js index ef53b01f1..c03ed1005 100644 --- a/frontend/app/types/integrations/issueTracker.js +++ b/frontend/app/types/integrations/issueTracker.js @@ -7,7 +7,6 @@ export const ACCESS_KEY_ID_LENGTH = 20; export default Record({ username: undefined, token: undefined, - provider: undefined, url: undefined, provider: 'jira' }, { diff --git a/frontend/app/types/issue/issuesType.js b/frontend/app/types/issue/issuesType.js index 1f0679d5a..c40864bea 100644 --- a/frontend/app/types/issue/issuesType.js +++ b/frontend/app/types/issue/issuesType.js @@ -2,7 +2,16 @@ import Record from 'Types/Record'; export default Record({ id: undefined, + color: undefined, + description: '', name: undefined, iconUrl: undefined }, { + fromJS: ({ iconUrl, color, ...issueType }) => ({ + ...issueType, + color, + iconUrl: iconUrl ? + : +
, + }), }) diff --git a/frontend/app/types/session/issue.js b/frontend/app/types/session/issue.js new file mode 100644 index 000000000..87878b6bb --- /dev/null +++ b/frontend/app/types/session/issue.js @@ -0,0 +1,43 @@ +import Record from 'Types/Record'; +import { List } from 'immutable'; +import Watchdog from 'Types/watchdog' + +export const issues_types = List([ + { 'type': 'js_exception', 'visible': true, 'order': 0, 'name': 'Errors', 'icon': 'funnel/exclamation-circle' }, + { 'type': 'bad_request', 'visible': true, 'order': 1, 'name': 'Bad Requests', 'icon': 'funnel/file-medical-alt' }, + { 'type': 'missing_resource', 'visible': true, 'order': 2, 'name': 'Missing Images', 'icon': 'funnel/image' }, + { 'type': 'click_rage', 'visible': true, 'order': 3, 'name': 'Click Rage', 'icon': 'funnel/dizzy' }, + { 'type': 'dead_click', 'visible': true, 'order': 4, 'name': 'Dead Clicks', 'icon': 'funnel/emoji-angry' }, + { 'type': 'memory', 'visible': true, 'order': 5, 'name': 'High Memory', 'icon': 'funnel/sd-card' }, + { 'type': 'cpu', 'visible': true, 'order': 6, 'name': 'High CPU', 'icon': 'funnel/cpu' }, + { 'type': 'crash', 'visible': true, 'order': 7, 'name': 'Crashes', 'icon': 'funnel/file-earmark-break' }, + { 'type': 'custom', 'visible': false, 'order': 8, 'name': 'Custom', 'icon': 'funnel/exclamation-circle' } +]).map(Watchdog) + +export const issues_types_map = {} +issues_types.forEach(i => { + issues_types_map[i.type] = { type: i.type, visible: i.visible, order: i.order, name: i.name, } +}); + +export default Record({ + issueId: undefined, + name: '', + visible: true, + sessionId: undefined, + time: undefined, + seqIndex: undefined, + payload: {}, + projectId: undefined, + type: '', + contextString: '', + context: '', + icon: 'info' +}, { + idKey: 'issueId', + fromJS: ({ type, ...rest }) => ({ + ...rest, + type, + icon: issues_types_map[type].icon, + name: issues_types_map[type].name, + }), +}); diff --git a/frontend/app/types/session/session.js b/frontend/app/types/session/session.js index daa2f4ae0..132afcc7d 100644 --- a/frontend/app/types/session/session.js +++ b/frontend/app/types/session/session.js @@ -7,7 +7,7 @@ import StackEvent from './stackEvent'; import Resource from './resource'; import CustomField from './customField'; import SessionError from './error'; - +import Issue from './issue'; const SOURCE_JS = 'js_exception'; @@ -66,6 +66,7 @@ export default Record({ errorsCount: 0, watchdogs: [], issueTypes: [], + issues: [], userDeviceHeapSize: 0, userDeviceMemorySize: 0, errors: List(), @@ -80,6 +81,7 @@ export default Record({ projectId, errors, stackEvents = [], + issues = [], ...session }) => { const duration = Duration.fromMillis(session.duration < 1000 ? 1000 : session.duration); @@ -109,6 +111,10 @@ export default Record({ .map(se => StackEvent({ ...se, time: se.timestamp - startedAt })); const exceptions = List(errors) .map(SessionError) + + const issuesList = List(issues) + .map(e => Issue({ ...e, time: e.timestamp - startedAt })) + return { ...session, isIOS: session.platform === "ios", @@ -128,6 +134,7 @@ export default Record({ userNumericHash: hashString(session.userId || session.userAnonymousId || session.userUuid || ""), userDisplayName: session.userId || session.userAnonymousId || 'Anonymous User', firstResourceTime, + issues: issuesList, }; }, idKey: "sessionId", diff --git a/frontend/app/types/watchdog.js b/frontend/app/types/watchdog.js index 76d874c64..fdc30dfb2 100644 --- a/frontend/app/types/watchdog.js +++ b/frontend/app/types/watchdog.js @@ -22,7 +22,6 @@ const WATCHDOG_TYPES = [ ] export const names = { - // 'all' : { label: 'All', icon: 'all' }, 'js_exception' : { label: 'JS Exceptions', icon: 'funnel/exclamation-circle' }, 'bad_request': { label: 'Bad Request', icon: 'funnel/patch-exclamation-fill' }, 'missing_resource': { label: 'Missing Resources', icon: 'funnel/image-fill' }, @@ -33,13 +32,6 @@ export const names = { 'cpu': { label: 'CPU', icon: 'funnel/hdd-fill' }, 'dead_click': { label: 'Dead Click', icon: 'funnel/emoji-dizzy-fill' }, 'custom': { label: 'Custom', icon: 'funnel/exclamation-circle-fill' }, - - // 'errors' : { label: 'Errors', icon: 'console/error' }, - // 'missing_image': { label: 'Missing Images', icon: 'image' }, - // 'slow_session': { label: 'Slow Sessions', icon: 'turtle' }, - // 'high_engagement': { label: 'High Engagements', icon: 'high-engagement' }, - // 'performance_issues': { label: 'Mem/CPU Issues', icon: 'tachometer-slowest' }, - // 'default': { label: 'Default', icon: 'window-alt' }, } const CONJUGATED_ISSUE_TYPES = { @@ -93,8 +85,6 @@ export default Record({ } }, fromJS: (item) => ({ - ...item, - name: item.name, - icon: names[item.type] ? names[item.type].icon : 'turtle' + ...item }), }); diff --git a/frontend/env.js b/frontend/env.js index fdc173aeb..7c8c52d2b 100644 --- a/frontend/env.js +++ b/frontend/env.js @@ -13,7 +13,7 @@ const oss = { ORIGIN: () => 'window.location.origin', API_EDP: () => 'window.location.origin + "/api"', ASSETS_HOST: () => 'window.location.origin + "/assets"', - VERSION: '1.0.1', + VERSION: '1.0.0', SOURCEMAP: true, MINIO_ENDPOINT: process.env.MINIO_ENDPOINT, MINIO_PORT: process.env.MINIO_PORT, diff --git a/scripts/helm/README.md b/scripts/helm/README.md index 1a79331a0..4d5c7e54e 100644 --- a/scripts/helm/README.md +++ b/scripts/helm/README.md @@ -1,48 +1,36 @@ -## Helm charts for installing openreplay components. +## Helm charts for installing OpenReplay components Installation components are separated by namepaces. **Namespace:** -- **app:** Core openreplay application related components. - - alert - - auth - - cache +- **app:** Core OpenReplay application related components. + - alerts + - assets - chalice - - clickhouse - ender - - events - - failover - - filesink - - filestorage + - sink + - storage - http - integrations - - ios-proxy - - metadata - - negative - - pg-stateless - - pg - - preprocessing - - redis - - ws + - db - **db:** Contains following databases and backend components. - - kafka + - kafka (ee) - redis - postgresql - - clickhouse + - clickhouse (ee) - minio - - sqs - nfs-server -- **longhorn:** On-Prem storage solution for kubernetes PVs. +- **longhorn:** Storage solution for kubernetes PVs. - **nginx-ingress:** Nginx ingress for internet traffic to enter the kubernetes cluster. **Scripts:** - **install.sh** - Installs openreplay in a single node machine, for trial runs / demo. + Installs OpenReplay in a single node machine, for trial runs / demo. This script is a wrapper around the `install.sh` with [k3s](https://k3s.io/) as kubernetes distro. @@ -50,8 +38,8 @@ Installation components are separated by namepaces. - **kube-install.sh:** - Installs openreplay on any given kubernetes cluster. Has 3 configuration types - - small (4cores 8G RAM) + Installs OpenReplay on any given kubernetes cluster. Has 3 configuration types: + - small (2cores 8G RAM) - medium (4cores 16G RAM) - recommened (8cores 32G RAM) diff --git a/scripts/helm/app/README.md b/scripts/helm/app/README.md index e5faed535..a5b73f915 100644 --- a/scripts/helm/app/README.md +++ b/scripts/helm/app/README.md @@ -1,13 +1,14 @@ -## Core Openreplay application configuration folder +## Core OpenReplay application configuration folder - This folder contains configuration for core openreplay apps. All applications share common helm chart named *openreplay* which can be overridden by `.yaml` file. + This folder contains configuration for core OpenReplay apps. All applications share common helm chart named *openreplay* which can be overridden by `.yaml` file. **Below is a sample template.** ```yaml - namespace: app # In which namespace alert runs. + namespace: app # In which namespace alerts runs. image: - repository: 998611063711.dkr.ecr.eu-central-1.amazonaws.com/alert # Which image to use + repository: rg.fr-par.scw.cloud/foss # Which image to use + name: alerts pullPolicy: IfNotPresent tag: "latest" # Overrides the image tag whose default is the chart appVersion. @@ -30,7 +31,7 @@ # env vars for the application env: - ALERT_NOTIFICATION_STRING: https://parrot.openreplay.io/alerts/notifications + ALERT_NOTIFICATION_STRING: http://chalice-openreplay.app.svc.cluster.local:8000/alerts/notifications CLICKHOUSE_STRING: tcp://clickhouse.db.svc.cluster.local:9000/default POSTGRES_STRING: postgres://postgresql.db.svc.cluster.local:5432 ``` diff --git a/scripts/helm/app/alerts.yaml b/scripts/helm/app/alerts.yaml index 4fe30c3cc..4bb397526 100644 --- a/scripts/helm/app/alerts.yaml +++ b/scripts/helm/app/alerts.yaml @@ -1,7 +1,7 @@ namespace: app image: repository: rg.fr-par.scw.cloud/foss - name: alert + name: alerts pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. tag: "latest" @@ -22,6 +22,6 @@ resources: memory: 128Mi env: - ALERT_NOTIFICATION_STRING: https://parrot.asayer.io/alerts/notifications + ALERT_NOTIFICATION_STRING: http://chalice-openreplay.app.svc.cluster.local:8000/alerts/notifications CLICKHOUSE_STRING: tcp://clickhouse.db.svc.cluster.local:9000/default POSTGRES_STRING: postgres://postgres:asayerPostgres@postgresql.db.svc.cluster.local:5432 diff --git a/scripts/helm/app/assets.yaml b/scripts/helm/app/assets.yaml index 390fe4e07..c7b740e22 100644 --- a/scripts/helm/app/assets.yaml +++ b/scripts/helm/app/assets.yaml @@ -22,8 +22,8 @@ resources: memory: 128Mi env: - ASSETS_ORIGIN: /asayer-sessions-assets # TODO: full path (with the minio prefix) - S3_BUCKET_ASSETS: asayer-sessions-assets + ASSETS_ORIGIN: /sessions-assets # TODO: full path (with the minio prefix) + S3_BUCKET_ASSETS: sessions-assets AWS_ENDPOINT: http://minio.db.svc.cluster.local:9000 AWS_ACCESS_KEY_ID: "minios3AccessKeyS3cr3t" AWS_SECRET_ACCESS_KEY: "m1n10s3CretK3yPassw0rd" diff --git a/scripts/helm/app/chalice.yaml b/scripts/helm/app/chalice.yaml index 9a02adfaa..879bb945f 100644 --- a/scripts/helm/app/chalice.yaml +++ b/scripts/helm/app/chalice.yaml @@ -40,9 +40,6 @@ env: sessions_region: us-east-1 put_S3_TTL: '20' sourcemaps_bucket: sourcemaps - sourcemaps_bucket_key: minios3AccessKeyS3cr3t - sourcemaps_bucket_secret: m1n10s3CretK3yPassw0rd - sourcemaps_bucket_region: us-east-1 js_cache_bucket: sessions-assets async_Token: '' EMAIL_HOST: '' @@ -56,7 +53,7 @@ env: EMAIL_FROM: OpenReplay SITE_URL: '' announcement_url: '' - jwt_secret: SET A RANDOM STRING HERE + jwt_secret: "SetARandomStringHere" jwt_algorithm: HS512 jwt_exp_delta_seconds: '2592000' # Override with your https://domain_name diff --git a/scripts/helm/app/http.yaml b/scripts/helm/app/http.yaml index d2788970c..82342c171 100644 --- a/scripts/helm/app/http.yaml +++ b/scripts/helm/app/http.yaml @@ -22,9 +22,9 @@ resources: memory: 128Mi env: - ASSETS_ORIGIN: /asayer-sessions-assets # TODO: full path (with the minio prefix) + ASSETS_ORIGIN: /sessions-assets # TODO: full path (with the minio prefix) TOKEN_SECRET: secret_token_string # TODO: generate on buld - S3_BUCKET_IMAGES_IOS: asayer-sessions-mobile-assets + S3_BUCKET_IMAGES_IOS: sessions-mobile-assets AWS_ACCESS_KEY_ID: "minios3AccessKeyS3cr3t" AWS_SECRET_ACCESS_KEY: "m1n10s3CretK3yPassw0rd" AWS_REGION: us-east-1 diff --git a/scripts/helm/app/issues.md b/scripts/helm/app/issues.md deleted file mode 100644 index 06a6cb91f..000000000 --- a/scripts/helm/app/issues.md +++ /dev/null @@ -1,76 +0,0 @@ -i [X] alert: - - [X] postgresql app db not found - public.alerts relation doesn't exist -- [X] cache: - - [X] connecting kafka with ssl:// -- [X] events: - - [X] postgresql app db not found - ``` - ERROR: relation "integrations" does not exist (SQLSTATE 42P01) - ``` -- [X] failover: asayer no error logs - - [X] Redis error: NOAUTH Authentication required. - redis cluster should not have password - - [X] Redis has cluster support disabled -- [X] redis-asayer: - - [X] /root/workers/redis/main.go:29: Redis error: no pools available - - [X] /root/workers/pg/main.go:49: Redis error: no cluster slots assigned -- [X] ws-asayer: - - [X] Redis has cluster support disabled -- [X] ender: - - [X] /root/pkg/kafka/consumer.go:95: Consumer error: Subscribed topic not available: ^(raw)$: Broker: Unknown topic or partition - - [X] kafka ssl -- [X] preprocessor: - - [X] kafka ssl -- [X] clickhouse-asayer: - - [X] Table default.sessions doesn't exist. -- [ ] puppeteer: - - [ ] Image not found - ``` - repository 998611063711.dkr.ecr.eu-central-1.amazonaws.com/puppeteer-jasmine not found: name unknown: The repository with name 'puppeteer-jasmine' does not exist in the registry with id '998611063711 - Back-off pulling image "998611063711.dkr.ecr.eu-central-1.amazonaws.com/puppeteer-jasmine:latest" - ``` -- [o] negative: - - [X] Clickhouse prepare error: code: 60, message: Table default.negatives_buffer doesn't exist. - - [ ] kafka ssl issue -- [o] metadata: - - [X] code: 60, message: Table default.sessions_metadata doesn't exist. - - [ ] /root/workers/metadata/main.go:96: Consumer Commit error: Local: No offset stored -- [ ] http: - - [ ] /root/pkg/env/worker_id.go:8: Get : unsupported protocol scheme "" -- [o] chalice: - - [X] No code to start - - [X] first install deps - - [X] then install chalice - - [X] sqs without creds - - [ ] do we need dead-runs as aws put failed in deadruns Q - - [ ] do we have to limit for parallel runs / the retries ? - -## Talk with Mehdi and Sacha -- [X] Do we need new app or old -- [X] in new we don't need redis. so what should we do ? - -# 3 new workers - -This is not in prod -kafka-staging take the new by compare with prod - -1. ender sasha -2. pg_stateless sasha -3. http sasha -4. changed preprocessing: david -5. ios proxy: taha - -Application loadbalancer - -domain: ingest.asayer.io - -ingress with ssl termination. - ios proxy ( in ecs ) - oauth - ws - api - http ( sasha ) - -ws lb with ssl: - ingress diff --git a/scripts/helm/app/openreplay/templates/deployment.yaml b/scripts/helm/app/openreplay/templates/deployment.yaml index 12e90714a..a2259a852 100644 --- a/scripts/helm/app/openreplay/templates/deployment.yaml +++ b/scripts/helm/app/openreplay/templates/deployment.yaml @@ -14,8 +14,9 @@ spec: {{- include "openreplay.selectorLabels" . | nindent 6 }} template: metadata: - {{- with .Values.podAnnotations }} annotations: + openreplayRolloutID: {{ randAlphaNum 5 | quote }} # Restart nginx after every deployment + {{- with .Values.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} labels: diff --git a/scripts/helm/app/openreplay/values.yaml b/scripts/helm/app/openreplay/values.yaml index e93d4d44d..498f88801 100644 --- a/scripts/helm/app/openreplay/values.yaml +++ b/scripts/helm/app/openreplay/values.yaml @@ -1,4 +1,4 @@ -# Default values for openreplay. +# Default values for OpenReplay. # This is a YAML-formatted file. # Declare variables to be passed into your templates. diff --git a/scripts/helm/app/storage.yaml b/scripts/helm/app/storage.yaml index 18890847a..aebc2f3e8 100644 --- a/scripts/helm/app/storage.yaml +++ b/scripts/helm/app/storage.yaml @@ -34,10 +34,10 @@ env: AWS_ENDPOINT: http://minio.db.svc.cluster.local:9000 AWS_ACCESS_KEY_ID: "minios3AccessKeyS3cr3t" AWS_SECRET_ACCESS_KEY: "m1n10s3CretK3yPassw0rd" - AWS_REGION_WEB: eu-central-1 - AWS_REGION_IOS: eu-central-1 - S3_BUCKET_WEB: asayer-mobs - S3_BUCKET_IOS: asayer-mobs + AWS_REGION_WEB: us-east-1 + AWS_REGION_IOS: us-east-1 + S3_BUCKET_WEB: mobs + S3_BUCKET_IOS: mobs # REDIS_STRING: redis-master.db.svc.cluster.local:6379 KAFKA_SERVERS: kafka.db.svc.cluster.local:9092 diff --git a/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/scripts/helm/db/init_dbs/postgresql/init_schema.sql index ed1449309..83afd3ba0 100644 --- a/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -1,8 +1,9 @@ BEGIN; +-- --- public.sql --- CREATE EXTENSION IF NOT EXISTS pg_trgm; CREATE EXTENSION IF NOT EXISTS pgcrypto; - +-- --- accounts.sql --- CREATE OR REPLACE FUNCTION generate_api_key(length integer) RETURNS text AS $$ @@ -33,7 +34,7 @@ CREATE TABLE public.tenants created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), edition varchar(3) NOT NULL, version_number text NOT NULL, - licence text NULL, + license text NULL, opt_out bool NOT NULL DEFAULT FALSE, t_projects integer NOT NULL DEFAULT 1, t_sessions bigint NOT NULL DEFAULT 0, @@ -128,7 +129,6 @@ CREATE TABLE basic_authentication token_requested_at timestamp without time zone NULL DEFAULT NULL, changed_at timestamp, UNIQUE (user_id) - -- CHECK ((token IS NULL and token_requested_at IS NULL) or (token IS NOT NULL and token_requested_at IS NOT NULL)) ); CREATE TYPE oauth_provider AS ENUM ('jira', 'github'); @@ -138,32 +138,32 @@ CREATE TABLE oauth_authentication provider oauth_provider NOT NULL, provider_user_id text NOT NULL, token text NOT NULL, - UNIQUE (provider, provider_user_id) + UNIQUE (user_id, provider) ); - +-- --- projects.sql --- CREATE TABLE projects ( project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), + project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), name text NOT NULL, active boolean NOT NULL, - sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), - created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - max_session_duration integer NOT NULL DEFAULT 7200000, - metadata_1 text DEFAULT NULL, - metadata_2 text DEFAULT NULL, - metadata_3 text DEFAULT NULL, - metadata_4 text DEFAULT NULL, - metadata_5 text DEFAULT NULL, - metadata_6 text DEFAULT NULL, - metadata_7 text DEFAULT NULL, - metadata_8 text DEFAULT NULL, - metadata_9 text DEFAULT NULL, - metadata_10 text DEFAULT NULL, - gdpr jsonb NOT NULL DEFAULT '{ + sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + max_session_duration integer NOT NULL DEFAULT 7200000, + metadata_1 text DEFAULT NULL, + metadata_2 text DEFAULT NULL, + metadata_3 text DEFAULT NULL, + metadata_4 text DEFAULT NULL, + metadata_5 text DEFAULT NULL, + metadata_6 text DEFAULT NULL, + metadata_7 text DEFAULT NULL, + metadata_8 text DEFAULT NULL, + metadata_9 text DEFAULT NULL, + metadata_10 text DEFAULT NULL, + gdpr jsonb NOT NULL DEFAULT '{ "maskEmails": true, "sampleRate": 33, "maskNumbers": false, @@ -185,6 +185,70 @@ CREATE TRIGGER on_insert_or_update FOR EACH ROW EXECUTE PROCEDURE notify_project(); +-- --- alerts.sql --- + +CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change'); + +CREATE TABLE alerts +( + alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + name text NOT NULL, + description text NULL DEFAULT NULL, + active boolean NOT NULL DEFAULT TRUE, + detection_method alert_detection_method NOT NULL, + query jsonb NOT NULL, + deleted_at timestamp NULL DEFAULT NULL, + created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), + options jsonb NOT NULL DEFAULT '{ + "renotifyInterval": 1440 + }'::jsonb +); + + +CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS +$$ +DECLARE + clone jsonb; +BEGIN + clone = to_jsonb(NEW); + clone = jsonb_set(clone, '{created_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.created_at) * 1000 AS BIGINT))); + IF NEW.deleted_at NOTNULL THEN + clone = jsonb_set(clone, '{deleted_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.deleted_at) * 1000 AS BIGINT))); + END IF; + PERFORM pg_notify('alert', clone::text); + RETURN NEW; +END ; +$$ LANGUAGE plpgsql; + + +CREATE TRIGGER on_insert_or_update_or_delete + AFTER INSERT OR UPDATE OR DELETE + ON alerts + FOR EACH ROW +EXECUTE PROCEDURE notify_alert(); + +-- --- webhooks.sql --- + +create type webhook_type as enum ('webhook', 'slack', 'email'); + +create table webhooks +( + webhook_id integer generated by default as identity + constraint webhooks_pkey + primary key, + endpoint text not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + auth_header text, + type webhook_type not null, + index integer default 0 not null, + name varchar(100) +); + + +-- --- notifications.sql --- + CREATE TABLE notifications ( notification_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, @@ -209,6 +273,23 @@ CREATE TABLE user_viewed_notifications constraint user_viewed_notifications_pkey primary key (user_id, notification_id) ); +-- --- funnels.sql --- + +CREATE TABLE funnels +( + funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + name text not null, + filter jsonb not null, + created_at timestamp default timezone('utc'::text, now()) not null, + deleted_at timestamp, + is_public boolean NOT NULL DEFAULT False +); + +CREATE INDEX ON public.funnels (user_id, is_public); + +-- --- announcements.sql --- create type announcement_type as enum ('notification', 'alert'); @@ -226,92 +307,7 @@ create table announcements type announcement_type default 'notification'::announcement_type not null ); -CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic'); -CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); -CREATE TABLE errors -( - error_id text NOT NULL PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - source error_source NOT NULL, - name text DEFAULT NULL, - message text NOT NULL, - payload jsonb NOT NULL, - status error_status NOT NULL DEFAULT 'unresolved', - parent_error_id text DEFAULT NULL REFERENCES errors (error_id) ON DELETE SET NULL, - stacktrace jsonb, --to save the stacktrace and not query S3 another time - stacktrace_parsed_at timestamp -); -CREATE INDEX ON errors (project_id, source); -CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); -CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); -CREATE INDEX errors_project_id_idx ON public.errors (project_id); -CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status); - -CREATE TABLE user_favorite_errors -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, error_id) -); - -CREATE TABLE user_viewed_errors -( - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - PRIMARY KEY (user_id, error_id) -); -CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); -CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); - - -CREATE TYPE issue_type AS ENUM ( - 'click_rage', - 'dead_click', - 'excessive_scrolling', - 'bad_request', - 'missing_resource', - 'memory', - 'cpu', - 'slow_resource', - 'slow_page_load', - 'crash', - 'ml_cpu', - 'ml_memory', - 'ml_dead_click', - 'ml_click_rage', - 'ml_mouse_thrashing', - 'ml_excessive_scrolling', - 'ml_slow_resources', - 'custom', - 'js_exception' - ); - -CREATE TABLE issues -( - issue_id text NOT NULL PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - type issue_type NOT NULL, - context_string text NOT NULL, - context jsonb DEFAULT NULL -); -CREATE INDEX ON issues (issue_id, type); -CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); - -create type webhook_type as enum ('webhook', 'slack', 'email'); - -create table webhooks -( - webhook_id integer generated by default as identity - constraint webhooks_pkey - primary key, - endpoint text not null, - created_at timestamp default timezone('utc'::text, now()) not null, - deleted_at timestamp, - auth_header text, - type webhook_type not null, - index integer default 0 not null, - name varchar(100) -); +-- --- integrations.sql --- CREATE TYPE integration_provider AS ENUM ('bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic', 'elasticsearch'); --, 'jira', 'github'); CREATE TABLE integrations @@ -355,74 +351,82 @@ create table jira_cloud url text ); -CREATE TABLE funnels +-- --- issues.sql --- + +CREATE TYPE issue_type AS ENUM ( + 'click_rage', + 'dead_click', + 'excessive_scrolling', + 'bad_request', + 'missing_resource', + 'memory', + 'cpu', + 'slow_resource', + 'slow_page_load', + 'crash', + 'ml_cpu', + 'ml_memory', + 'ml_dead_click', + 'ml_click_rage', + 'ml_mouse_thrashing', + 'ml_excessive_scrolling', + 'ml_slow_resources', + 'custom', + 'js_exception' + ); + +CREATE TABLE issues ( - funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text not null, - filter jsonb not null, - created_at timestamp default timezone('utc'::text, now()) not null, - deleted_at timestamp, - is_public boolean NOT NULL DEFAULT False + issue_id text NOT NULL PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + type issue_type NOT NULL, + context_string text NOT NULL, + context jsonb DEFAULT NULL +); +CREATE INDEX ON issues (issue_id, type); +CREATE INDEX issues_context_string_gin_idx ON public.issues USING GIN (context_string gin_trgm_ops); + +-- --- errors.sql --- + +CREATE TYPE error_source AS ENUM ('js_exception', 'bugsnag', 'cloudwatch', 'datadog', 'newrelic', 'rollbar', 'sentry', 'stackdriver', 'sumologic'); +CREATE TYPE error_status AS ENUM ('unresolved', 'resolved', 'ignored'); +CREATE TABLE errors +( + error_id text NOT NULL PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + source error_source NOT NULL, + name text DEFAULT NULL, + message text NOT NULL, + payload jsonb NOT NULL, + status error_status NOT NULL DEFAULT 'unresolved', + parent_error_id text DEFAULT NULL REFERENCES errors (error_id) ON DELETE SET NULL, + stacktrace jsonb, --to save the stacktrace and not query S3 another time + stacktrace_parsed_at timestamp +); +CREATE INDEX ON errors (project_id, source); +CREATE INDEX errors_message_gin_idx ON public.errors USING GIN (message gin_trgm_ops); +CREATE INDEX errors_name_gin_idx ON public.errors USING GIN (name gin_trgm_ops); +CREATE INDEX errors_project_id_idx ON public.errors (project_id); +CREATE INDEX errors_project_id_status_idx ON public.errors (project_id, status); + +CREATE TABLE user_favorite_errors +( + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, error_id) ); -CREATE INDEX ON public.funnels (user_id, is_public); - -CREATE TYPE alert_detection_method AS ENUM ('threshold', 'change'); - -CREATE TABLE alerts +CREATE TABLE user_viewed_errors ( - alert_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - name text NOT NULL, - description text NULL DEFAULT NULL, - active boolean NOT NULL DEFAULT TRUE, - detection_method alert_detection_method NOT NULL, - query jsonb NOT NULL, - deleted_at timestamp NULL DEFAULT NULL, - created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), - options jsonb NOT NULL DEFAULT '{ - "renotifyInterval": 1440 - }'::jsonb + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + PRIMARY KEY (user_id, error_id) ); +CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); +CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); -CREATE OR REPLACE FUNCTION notify_alert() RETURNS trigger AS -$$ -DECLARE - clone jsonb; -BEGIN - clone = to_jsonb(NEW); - clone = jsonb_set(clone, '{created_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.created_at) * 1000 AS BIGINT))); - IF NEW.deleted_at NOTNULL THEN - clone = jsonb_set(clone, '{deleted_at}', to_jsonb(CAST(EXTRACT(epoch FROM NEW.deleted_at) * 1000 AS BIGINT))); - END IF; - PERFORM pg_notify('alert', clone::text); - RETURN NEW; -END ; -$$ LANGUAGE plpgsql; - - -CREATE TRIGGER on_insert_or_update_or_delete - AFTER INSERT OR UPDATE OR DELETE - ON alerts - FOR EACH ROW -EXECUTE PROCEDURE notify_alert(); - -CREATE TABLE autocomplete -( - value text NOT NULL, - type text NOT NULL, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE -); - -CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type); -CREATE index autocomplete_project_id_idx ON autocomplete (project_id); -CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); -CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); - - +-- --- sessions.sql --- CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS'); CREATE TYPE platform AS ENUM ('web','ios','android'); @@ -433,7 +437,7 @@ CREATE TABLE sessions project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, tracker_version text NOT NULL, start_ts bigint NOT NULL, - duration integer DEFAULT NULL, + duration integer NULL, rev_id text DEFAULT NULL, platform platform NOT NULL DEFAULT 'web', is_snippet boolean NOT NULL DEFAULT FALSE, @@ -507,6 +511,8 @@ CREATE INDEX sessions_user_anonymous_id_gin_idx ON public.sessions USING GIN (us CREATE INDEX sessions_user_country_gin_idx ON public.sessions (project_id, user_country); CREATE INDEX ON sessions (project_id, user_country); CREATE INDEX ON sessions (project_id, user_browser); +CREATE INDEX sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0; +CREATE INDEX sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0; ALTER TABLE public.sessions ADD CONSTRAINT web_browser_constraint CHECK ( (sessions.platform = 'web' AND sessions.user_browser NOTNULL) OR @@ -536,6 +542,73 @@ CREATE TABLE user_favorite_sessions ); +-- --- assignments.sql --- + +create table assigned_sessions +( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + issue_id text NOT NULL, + provider oauth_provider NOT NULL, + created_by integer NOT NULL, + created_at timestamp default timezone('utc'::text, now()) NOT NULL, + provider_data jsonb default '{}'::jsonb NOT NULL +); + +-- --- events_common.sql --- + +CREATE SCHEMA events_common; + +CREATE TYPE events_common.custom_level AS ENUM ('info','error'); + +CREATE TABLE events_common.customs +( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + name text NOT NULL, + payload jsonb NOT NULL, + level events_common.custom_level NOT NULL DEFAULT 'info', + PRIMARY KEY (session_id, timestamp, seq_index) +); +CREATE INDEX ON events_common.customs (name); +CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops); +CREATE INDEX ON events_common.customs (timestamp); + + +CREATE TABLE events_common.issues +( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE, + payload jsonb DEFAULT NULL, + PRIMARY KEY (session_id, timestamp, seq_index) +); + + +CREATE TABLE events_common.requests +( + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + seq_index integer NOT NULL, + url text NOT NULL, + duration integer NOT NULL, + success boolean NOT NULL, + PRIMARY KEY (session_id, timestamp, seq_index) +); +CREATE INDEX ON events_common.requests (url); +CREATE INDEX ON events_common.requests (duration); +CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); +CREATE INDEX ON events_common.requests (timestamp); +CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE + WHEN url LIKE 'http://%' + THEN 7 + WHEN url LIKE 'https://%' + THEN 8 + ELSE 0 END)) + gin_trgm_ops); + +-- --- events.sql --- CREATE SCHEMA events; CREATE TABLE events.pages @@ -558,6 +631,7 @@ CREATE TABLE events.pages time_to_interactive integer DEFAULT NULL, response_time bigint DEFAULT NULL, response_end bigint DEFAULT NULL, + ttfb integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX ON events.pages (session_id); @@ -577,6 +651,11 @@ CREATE INDEX pages_base_referrer_gin_idx2 ON events.pages USING GIN (RIGHT(base_ gin_trgm_ops); CREATE INDEX ON events.pages (response_time); CREATE INDEX ON events.pages (response_end); +CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); +CREATE INDEX pages_path_idx ON events.pages (path); +CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; +CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; +CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; CREATE TABLE events.clicks @@ -643,85 +722,6 @@ CREATE INDEX ON events.state_actions (name); CREATE INDEX state_actions_name_gin_idx ON events.state_actions USING GIN (name gin_trgm_ops); CREATE INDEX ON events.state_actions (timestamp); - - -CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS -$$ -DECLARE - step integer; - c integer := 0; -BEGIN - FOREACH step IN ARRAY steps - LOOP - IF step + c = 0 THEN - IF c = 0 THEN - RETURN false; - END IF; - c := 0; - CONTINUE; - END IF; - IF c + 1 = step THEN - c := step; - END IF; - END LOOP; - RETURN c = m; -END; -$$ LANGUAGE plpgsql IMMUTABLE; - - - -CREATE SCHEMA events_common; - -CREATE TYPE events_common.custom_level AS ENUM ('info','error'); - -CREATE TABLE events_common.customs -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - name text NOT NULL, - payload jsonb NOT NULL, - level events_common.custom_level NOT NULL DEFAULT 'info', - PRIMARY KEY (session_id, timestamp, seq_index) -); -CREATE INDEX ON events_common.customs (name); -CREATE INDEX customs_name_gin_idx ON events_common.customs USING GIN (name gin_trgm_ops); -CREATE INDEX ON events_common.customs (timestamp); - - -CREATE TABLE events_common.issues -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - issue_id text NOT NULL REFERENCES issues (issue_id) ON DELETE CASCADE, - payload jsonb DEFAULT NULL, - PRIMARY KEY (session_id, timestamp, seq_index) -); - - -CREATE TABLE events_common.requests -( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - timestamp bigint NOT NULL, - seq_index integer NOT NULL, - url text NOT NULL, - duration integer NOT NULL, - success boolean NOT NULL, - PRIMARY KEY (session_id, timestamp, seq_index) -); -CREATE INDEX ON events_common.requests (url); -CREATE INDEX ON events_common.requests (duration); -CREATE INDEX requests_url_gin_idx ON events_common.requests USING GIN (url gin_trgm_ops); -CREATE INDEX ON events_common.requests (timestamp); -CREATE INDEX requests_url_gin_idx2 ON events_common.requests USING GIN (RIGHT(url, length(url) - (CASE - WHEN url LIKE 'http://%' - THEN 7 - WHEN url LIKE 'https://%' - THEN 8 - ELSE 0 END)) - gin_trgm_ops); - CREATE TYPE events.resource_type AS ENUM ('other', 'script', 'stylesheet', 'fetch', 'img', 'media'); CREATE TYPE events.resource_method AS ENUM ('GET' , 'HEAD' , 'POST' , 'PUT' , 'DELETE' , 'CONNECT' , 'OPTIONS' , 'TRACE' , 'PATCH' ); CREATE TABLE events.resources @@ -779,16 +779,42 @@ CREATE TABLE events.performance ); -ALTER TABLE events.pages - ADD COLUMN ttfb integer DEFAULT NULL; -CREATE INDEX pages_path_gin_idx ON events.pages USING GIN (path gin_trgm_ops); -CREATE INDEX pages_path_idx ON events.pages (path); -CREATE INDEX pages_visually_complete_idx ON events.pages (visually_complete) WHERE visually_complete > 0; -CREATE INDEX pages_dom_building_time_idx ON events.pages (dom_building_time) WHERE dom_building_time > 0; -CREATE INDEX pages_load_time_idx ON events.pages (load_time) WHERE load_time > 0; - -CREATE INDEX sessions_start_ts_idx ON public.sessions (start_ts) WHERE duration > 0; -CREATE INDEX sessions_project_id_idx ON public.sessions (project_id) WHERE duration > 0; +CREATE OR REPLACE FUNCTION events.funnel(steps integer[], m integer) RETURNS boolean AS +$$ +DECLARE + step integer; + c integer := 0; +BEGIN + FOREACH step IN ARRAY steps + LOOP + IF step + c = 0 THEN + IF c = 0 THEN + RETURN false; + END IF; + c := 0; + CONTINUE; + END IF; + IF c + 1 = step THEN + c := step; + END IF; + END LOOP; + RETURN c = m; +END; +$$ LANGUAGE plpgsql IMMUTABLE; -COMMIT; \ No newline at end of file +-- --- autocomplete.sql --- + +CREATE TABLE autocomplete +( + value text NOT NULL, + type text NOT NULL, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE +); + +CREATE unique index autocomplete_unique ON autocomplete (project_id, value, type); +CREATE index autocomplete_project_id_idx ON autocomplete (project_id); +CREATE INDEX autocomplete_type_idx ON public.autocomplete (type); +CREATE INDEX autocomplete_value_gin_idx ON public.autocomplete USING GIN (value gin_trgm_ops); + +COMMIT; diff --git a/scripts/helm/db/sqs/.helmignore b/scripts/helm/db/sqs/.helmignore deleted file mode 100644 index 0e8a0eb36..000000000 --- a/scripts/helm/db/sqs/.helmignore +++ /dev/null @@ -1,23 +0,0 @@ -# Patterns to ignore when building packages. -# This supports shell glob matching, relative path matching, and -# negation (prefixed with !). Only one pattern per line. -.DS_Store -# Common VCS dirs -.git/ -.gitignore -.bzr/ -.bzrignore -.hg/ -.hgignore -.svn/ -# Common backup files -*.swp -*.bak -*.tmp -*.orig -*~ -# Various IDEs -.project -.idea/ -*.tmproj -.vscode/ diff --git a/scripts/helm/db/sqs/Chart.yaml b/scripts/helm/db/sqs/Chart.yaml deleted file mode 100644 index df40d044a..000000000 --- a/scripts/helm/db/sqs/Chart.yaml +++ /dev/null @@ -1,23 +0,0 @@ -apiVersion: v2 -name: sqs -description: A Helm chart for Kubernetes - -# A chart can be either an 'application' or a 'library' chart. -# -# Application charts are a collection of templates that can be packaged into versioned archives -# to be deployed. -# -# Library charts provide useful utilities or functions for the chart developer. They're included as -# a dependency of application charts to inject those utilities and functions into the rendering -# pipeline. Library charts do not define any templates and therefore cannot be deployed. -type: application - -# This is the chart version. This version number should be incremented each time you make changes -# to the chart and its templates, including the app version. -# Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.1.0 - -# This is the version number of the application being deployed. This version number should be -# incremented each time you make changes to the application. Versions are not expected to -# follow Semantic Versioning. They should reflect the version the application is using. -appVersion: 1.16.0 diff --git a/scripts/helm/db/sqs/templates/NOTES.txt b/scripts/helm/db/sqs/templates/NOTES.txt deleted file mode 100644 index 1933314a0..000000000 --- a/scripts/helm/db/sqs/templates/NOTES.txt +++ /dev/null @@ -1,22 +0,0 @@ -1. Get the application URL by running these commands: -{{- if .Values.ingress.enabled }} -{{- range $host := .Values.ingress.hosts }} - {{- range .paths }} - http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ . }} - {{- end }} -{{- end }} -{{- else if contains "NodePort" .Values.service.type }} - export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "sqs.fullname" . }}) - export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}") - echo http://$NODE_IP:$NODE_PORT -{{- else if contains "LoadBalancer" .Values.service.type }} - NOTE: It may take a few minutes for the LoadBalancer IP to be available. - You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "sqs.fullname" . }}' - export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "sqs.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}") - echo http://$SERVICE_IP:{{ .Values.service.port }} -{{- else if contains "ClusterIP" .Values.service.type }} - export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "sqs.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}") - export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}") - echo "Visit http://127.0.0.1:9325 to use your application" - kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 9325:$CONTAINER_PORT -{{- end }} diff --git a/scripts/helm/db/sqs/templates/_helpers.tpl b/scripts/helm/db/sqs/templates/_helpers.tpl deleted file mode 100644 index 518fd7cc2..000000000 --- a/scripts/helm/db/sqs/templates/_helpers.tpl +++ /dev/null @@ -1,62 +0,0 @@ -{{/* -Expand the name of the chart. -*/}} -{{- define "sqs.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} -{{- end }} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "sqs.fullname" -}} -{{- if .Values.fullnameOverride }} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} -{{- else }} -{{- $name := default .Chart.Name .Values.nameOverride }} -{{- if contains $name .Release.Name }} -{{- .Release.Name | trunc 63 | trimSuffix "-" }} -{{- else }} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} -{{- end }} -{{- end }} -{{- end }} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "sqs.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} -{{- end }} - -{{/* -Common labels -*/}} -{{- define "sqs.labels" -}} -helm.sh/chart: {{ include "sqs.chart" . }} -{{ include "sqs.selectorLabels" . }} -{{- if .Chart.AppVersion }} -app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} -{{- end }} -app.kubernetes.io/managed-by: {{ .Release.Service }} -{{- end }} - -{{/* -Selector labels -*/}} -{{- define "sqs.selectorLabels" -}} -app.kubernetes.io/name: {{ include "sqs.name" . }} -app.kubernetes.io/instance: {{ .Release.Name }} -{{- end }} - -{{/* -Create the name of the service account to use -*/}} -{{- define "sqs.serviceAccountName" -}} -{{- if .Values.serviceAccount.create }} -{{- default (include "sqs.fullname" .) .Values.serviceAccount.name }} -{{- else }} -{{- default "default" .Values.serviceAccount.name }} -{{- end }} -{{- end }} diff --git a/scripts/helm/db/sqs/templates/configmap.yaml b/scripts/helm/db/sqs/templates/configmap.yaml deleted file mode 100644 index aa6c9f956..000000000 --- a/scripts/helm/db/sqs/templates/configmap.yaml +++ /dev/null @@ -1,28 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ include "sqs.fullname" . }} - labels: - {{- include "sqs.labels" . | nindent 4 }} -data: - elasticmq.conf: |- - include classpath("application.conf") - akka.http.server.request-timeout = 40 s - - node-address { - protocol = http - host = "*" - port = 9324 - context-path = "" - } - - rest-sqs { - enabled = true - bind-port = 9324 - bind-hostname = "0.0.0.0" - // Possible values: relaxed, strict - sqs-limits = strict - } -{{if .Values.queueConfig }} -{{ .Values.queueConfig | trim | nindent 4 }} -{{ end }} diff --git a/scripts/helm/db/sqs/templates/deployment.yaml b/scripts/helm/db/sqs/templates/deployment.yaml deleted file mode 100644 index 62712031f..000000000 --- a/scripts/helm/db/sqs/templates/deployment.yaml +++ /dev/null @@ -1,64 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "sqs.fullname" . }} - labels: - {{- include "sqs.labels" . | nindent 4 }} -spec: - {{- if not .Values.autoscaling.enabled }} - replicas: {{ .Values.replicaCount }} - {{- end }} - selector: - matchLabels: - {{- include "sqs.selectorLabels" . | nindent 6 }} - template: - metadata: - {{- with .Values.podAnnotations }} - annotations: - {{- toYaml . | nindent 8 }} - {{- end }} - labels: - {{- include "sqs.selectorLabels" . | nindent 8 }} - spec: - {{- with .Values.imagePullSecrets }} - imagePullSecrets: - {{- toYaml . | nindent 8 }} - {{- end }} - serviceAccountName: {{ include "sqs.serviceAccountName" . }} - securityContext: - {{- toYaml .Values.podSecurityContext | nindent 8 }} - containers: - - name: {{ .Chart.Name }} - securityContext: - {{- toYaml .Values.securityContext | nindent 12 }} - image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" - imagePullPolicy: {{ .Values.image.pullPolicy }} - ports: - - name: http - containerPort: 9325 - protocol: TCP - - name: sqs - containerPort: 9324 - protocol: TCP - resources: - {{- toYaml .Values.resources | nindent 12 }} - volumeMounts: - - name: elasticmq - mountPath: /opt/elasticmq.conf - subPath: elasticmq.conf - volumes: - - name: elasticmq - configMap: - name: {{ include "sqs.fullname" . }} - {{- with .Values.nodeSelector }} - nodeSelector: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.affinity }} - affinity: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.tolerations }} - tolerations: - {{- toYaml . | nindent 8 }} - {{- end }} diff --git a/scripts/helm/db/sqs/templates/hpa.yaml b/scripts/helm/db/sqs/templates/hpa.yaml deleted file mode 100644 index db0747bcf..000000000 --- a/scripts/helm/db/sqs/templates/hpa.yaml +++ /dev/null @@ -1,28 +0,0 @@ -{{- if .Values.autoscaling.enabled }} -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - name: {{ include "sqs.fullname" . }} - labels: - {{- include "sqs.labels" . | nindent 4 }} -spec: - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: {{ include "sqs.fullname" . }} - minReplicas: {{ .Values.autoscaling.minReplicas }} - maxReplicas: {{ .Values.autoscaling.maxReplicas }} - metrics: - {{- if .Values.autoscaling.targetCPUUtilizationPercentage }} - - type: Resource - resource: - name: cpu - targetAverageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }} - {{- end }} - {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }} - - type: Resource - resource: - name: memory - targetAverageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }} - {{- end }} -{{- end }} diff --git a/scripts/helm/db/sqs/templates/ingress.yaml b/scripts/helm/db/sqs/templates/ingress.yaml deleted file mode 100644 index b2dc375fb..000000000 --- a/scripts/helm/db/sqs/templates/ingress.yaml +++ /dev/null @@ -1,41 +0,0 @@ -{{- if .Values.ingress.enabled -}} -{{- $fullName := include "sqs.fullname" . -}} -{{- $svcPort := .Values.service.port -}} -{{- if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} -apiVersion: networking.k8s.io/v1beta1 -{{- else -}} -apiVersion: extensions/v1beta1 -{{- end }} -kind: Ingress -metadata: - name: {{ $fullName }} - labels: - {{- include "sqs.labels" . | nindent 4 }} - {{- with .Values.ingress.annotations }} - annotations: - {{- toYaml . | nindent 4 }} - {{- end }} -spec: - {{- if .Values.ingress.tls }} - tls: - {{- range .Values.ingress.tls }} - - hosts: - {{- range .hosts }} - - {{ . | quote }} - {{- end }} - secretName: {{ .secretName }} - {{- end }} - {{- end }} - rules: - {{- range .Values.ingress.hosts }} - - host: {{ .host | quote }} - http: - paths: - {{- range .paths }} - - path: {{ . }} - backend: - serviceName: {{ $fullName }} - servicePort: {{ $svcPort }} - {{- end }} - {{- end }} - {{- end }} diff --git a/scripts/helm/db/sqs/templates/service.yaml b/scripts/helm/db/sqs/templates/service.yaml deleted file mode 100644 index fa6b14238..000000000 --- a/scripts/helm/db/sqs/templates/service.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: {{ include "sqs.fullname" . }} - labels: - {{- include "sqs.labels" . | nindent 4 }} -spec: - type: {{ .Values.service.type }} - ports: - - port: {{ .Values.service.http.port }} - targetPort: http - protocol: TCP - name: http - - port: {{ .Values.service.sqs.port }} - targetPort: sqs - protocol: TCP - name: sqs - selector: - {{- include "sqs.selectorLabels" . | nindent 4 }} diff --git a/scripts/helm/db/sqs/templates/serviceaccount.yaml b/scripts/helm/db/sqs/templates/serviceaccount.yaml deleted file mode 100644 index a2989f188..000000000 --- a/scripts/helm/db/sqs/templates/serviceaccount.yaml +++ /dev/null @@ -1,12 +0,0 @@ -{{- if .Values.serviceAccount.create -}} -apiVersion: v1 -kind: ServiceAccount -metadata: - name: {{ include "sqs.serviceAccountName" . }} - labels: - {{- include "sqs.labels" . | nindent 4 }} - {{- with .Values.serviceAccount.annotations }} - annotations: - {{- toYaml . | nindent 4 }} - {{- end }} -{{- end }} diff --git a/scripts/helm/db/sqs/values.yaml b/scripts/helm/db/sqs/values.yaml deleted file mode 100644 index 5634a5494..000000000 --- a/scripts/helm/db/sqs/values.yaml +++ /dev/null @@ -1,111 +0,0 @@ -# Default values for sqs. -# This is a YAML-formatted file. -# Declare variables to be passed into your templates. - -replicaCount: 1 - -image: - repository: roribio16/alpine-sqs - pullPolicy: IfNotPresent - # Overrides the image tag whose default is the chart appVersion. - tag: "latest" - -imagePullSecrets: [] -nameOverride: "" -fullnameOverride: "" - -serviceAccount: - # Specifies whether a service account should be created - create: true - # Annotations to add to the service account - annotations: {} - # The name of the service account to use. - # If not set and create is true, a name is generated using the fullname template - name: "" - -podAnnotations: {} - -podSecurityContext: {} - # fsGroup: 2000 - -securityContext: {} - # capabilities: - # drop: - # - ALL - # readOnlyRootFilesystem: true - # runAsNonRoot: true - # runAsUser: 1000 - -service: - type: ClusterIP - sqs: - port: 9324 - http: - port: 9325 - -ingress: - enabled: false - annotations: {} - # kubernetes.io/ingress.class: nginx - # kubernetes.io/tls-acme: "true" - hosts: - - host: chart-example.local - paths: [] - tls: [] - # - secretName: chart-example-tls - # hosts: - # - chart-example.local - -resources: - # We usually recommend not to specify default resources and to leave this as a conscious - # choice for the user. This also increases chances charts run on environments with little - # resources, such as Minikube. If you do want to specify resources, uncomment the following - # lines, adjust them as necessary, and remove the curly braces after 'resources:'. - limits: - cpu: 1 - memory: 1Gi - requests: - cpu: 100m - memory: 128Mi - -autoscaling: - enabled: false - minReplicas: 1 - maxReplicas: 100 - targetCPUUtilizationPercentage: 80 - # targetMemoryUtilizationPercentage: 80 - -nodeSelector: {} - -tolerations: [] - -affinity: {} - -# Creating the initial queue -# Ref: https://github.com/softwaremill/elasticmq#automatically-creating-queues-on-startup -queueConfig: |- - queues { - scheduled-runs { - defaultVisibilityTimeout = 10 seconds - delay = 5 seconds - receiveMessageWait = 0 seconds - deadLettersQueue { - name = "dead-runs" - maxReceiveCount = 1000 - } - } - ondemand-runs { - defaultVisibilityTimeout = 10 seconds - delay = 5 seconds - receiveMessageWait = 0 seconds - deadLettersQueue { - name = "dead-runs" - maxReceiveCount = 1000 - } - } - dead-runs { - defaultVisibilityTimeout = 10 seconds - delay = 5 seconds - receiveMessageWait = 0 seconds - } - } diff --git a/scripts/helm/install.sh b/scripts/helm/install.sh index fc50c519b..a3dfcc4c2 100755 --- a/scripts/helm/install.sh +++ b/scripts/helm/install.sh @@ -2,6 +2,22 @@ set -o errtrace +# Check for a valid domain_name +domain_name=`grep domain_name vars.yaml | grep -v "example" | cut -d " " -f2 | cut -d '"' -f2` +# Ref: https://stackoverflow.com/questions/15268987/bash-based-regex-domain-name-validation +[[ $(echo $domain_name | grep -P '(?=^.{5,254}$)(^(?:(?!\d+\.)[a-zA-Z0-9_\-]{1,63}\.?)+(?:[a-zA-Z]{2,})$)') ]] || { + echo "OpenReplay Needs a valid domain name for captured sessions to replay. For example, openreplay.mycompany.com" + echo "Please enter your domain name" + read domain_name + [[ -z domain_name ]] && { + echo "OpenReplay won't work without domain name. Exiting..." + exit 1 + } || { + sed -i "s#domain_name.*#domain_name: \"${domain_name}\" #g" vars.yaml + } +} + + # Installing k3s curl -sL https://get.k3s.io | sudo K3S_KUBECONFIG_MODE="644" INSTALL_K3S_VERSION='v1.19.5+k3s2' INSTALL_K3S_EXEC="--no-deploy=traefik" sh - mkdir ~/.kube diff --git a/scripts/helm/kube-install.sh b/scripts/helm/kube-install.sh index e3905c2d4..5483c4420 100755 --- a/scripts/helm/kube-install.sh +++ b/scripts/helm/kube-install.sh @@ -1,6 +1,6 @@ #!/bin/bash -set -xo errtrace +set -o errtrace # color schemes # Ansi color code variables @@ -22,7 +22,7 @@ echo -e ${reset} ## installing kubectl which kubectl &> /dev/null || { - echo "kubectl not installed. installing..." + echo "kubectl not installed. Installing it..." sudo curl -SsL https://dl.k8s.io/release/v1.20.0/bin/linux/amd64/kubectl -o /usr/local/bin/kubectl ; sudo chmod +x /usr/local/bin/kubectl } @@ -34,7 +34,7 @@ which stern &> /dev/null || { ## installing k9s which k9s &> /dev/null || { - echo "k9s not installed. installing..." + echo "k9s not installed. Installing it..." sudo curl -SsL https://github.com/derailed/k9s/releases/download/v0.24.2/k9s_Linux_x86_64.tar.gz -o /tmp/k9s.tar.gz cd /tmp tar -xf k9s.tar.gz @@ -44,13 +44,13 @@ which k9s &> /dev/null || { } which ansible &> /dev/null || { - echo "ansible not installed. Installing..." + echo "ansible not installed. Installing it..." which pip || (sudo apt update && sudo apt install python3-pip -y) sudo pip3 install ansible==2.10.0 } which docker &> /dev/null || { - echo "docker is not installed. Installing..." + echo "docker is not installed. Installing it..." user=`whoami` sudo apt install docker.io -y sudo usermod -aG docker $user @@ -59,7 +59,7 @@ which docker &> /dev/null || { ## installing helm which helm &> /dev/null if [[ $? -ne 0 ]]; then - echo "helm not installed. installing..." + echo "helm not installed. Installing it..." curl -ssl https://get.helm.sh/helm-v3.4.2-linux-amd64.tar.gz -o /tmp/helm.tar.gz tar -xf /tmp/helm.tar.gz chmod +x linux-amd64/helm @@ -77,30 +77,31 @@ fi # make all stderr red color()(set -o pipefail;"$@" 2>&1>&3|sed $'s,.*,\e[31m&\e[m,'>&2)3>&1 -usage() -{ +usage() { echo -e ${bold}${yellow} ''' -This script will install and configure openreplay apps and databases on the kubernetes cluster, +This script will install and configure OpenReplay apps and databases on the kubernetes cluster, which is accesd with the ${HOME}/.kube/config or $KUBECONFIG env variable. ''' -cat << EOF -▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄ -█░▄▄▀█░▄▄█░▄▄▀█░██░█░▄▄█░▄▄▀██ -█░▀▀░█▄▄▀█░▀▀░█░▀▀░█░▄▄█░▀▀▄██ -█▄██▄█▄▄▄█▄██▄█▀▀▀▄█▄▄▄█▄█▄▄██ -▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀ +cat <<"EOF" + ___ ____ _ + / _ \ _ __ ___ _ __ | _ \ ___ _ __ | | __ _ _ _ +| | | | '_ \ / _ \ '_ \| |_) / _ \ '_ \| |/ _` | | | | +| |_| | |_) | __/ | | | _ < __/ |_) | | (_| | |_| | + \___/| .__/ \___|_| |_|_| \_\___| .__/|_|\__,_|\__, | + |_| |_| |___/ + EOF echo -e "${green}Usage: openreplay-cli [ -h | --help ] [ -v | --verbose ] [ -a | --app APP_NAME ] to install/reinstall specific application [ -t | --type small|medium|ideal ]" echo -e "${reset}${blue}type defines the resource limits applied for the installation: - small: 4core 8G machine + small: 2core 8G machine medium: 4core 16G machine ideal: 8core 32G machine apps can specifically be installed/reinstalled: - alerts assets auth chalice ender http integrations ios-proxy metadata negative pg-stateless pg preprocessing redis sink storage frontend + alerts assets chalice ender http integrations ios-proxy pg redis sink storage frontend ${reset}" echo type value: $installation_type exit 0 @@ -122,8 +123,10 @@ type() { function app(){ case $1 in nginx) - [[ NGINX_REDIRECT_HTTPS -eq 0 ]] && { - sed -i "/return 301/d" nginx-ingress/nginx-ingress/templates/configmap.yaml + # Resetting the redirection rule + sed -i 's/.* return 301 .*/ # return 301 https:\/\/$host$request_uri;/g' nginx-ingress/nginx-ingress/templates/configmap.yaml + [[ NGINX_REDIRECT_HTTPS -eq 1 ]] && { + sed -i "s/# return 301/return 301/g" nginx-ingress/nginx-ingress/templates/configmap.yaml } ansible-playbook -c local setup.yaml -e @vars.yaml -e scale=$installation_type --tags nginx -v exit 0 @@ -170,3 +173,7 @@ done { ansible-playbook -c local setup.yaml -e @vars.yaml -e scale=$installation_type --skip-tags pre-check -v } || exit $? + + + + diff --git a/scripts/helm/nginx-ingress/nginx-ingress/README.md b/scripts/helm/nginx-ingress/nginx-ingress/README.md index a61fe2bc6..76c878b5a 100644 --- a/scripts/helm/nginx-ingress/nginx-ingress/README.md +++ b/scripts/helm/nginx-ingress/nginx-ingress/README.md @@ -1,13 +1,13 @@ ## Description -This is the frontend of the openreplay web app to internet. +This is the frontend of the OpenReplay web app (internet). -## Path information +## Endpoints -/ws -> websocket -/streaming -> ios-proxy -/api -> chalice -/http -> http -/ -> frontend (in minio) -/assets -> asayer-sessions-assets bucket in minio -/s3 -> minio api endpoint +- /streaming -> ios-proxy +- /api -> chalice +- /http -> http +- / -> frontend (in minio) +- /assets -> sessions-assets bucket in minio +- /minio -> minio api endpoint +- /ingest -> events ingestor diff --git a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml index d47e47255..d02cc26b1 100644 --- a/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml +++ b/scripts/helm/nginx-ingress/nginx-ingress/templates/configmap.yaml @@ -20,6 +20,8 @@ data: proxy_set_header Connection ""; chunked_transfer_encoding off; + client_max_body_size 50M; + proxy_pass http://minio.db.svc.cluster.local:9000; } @@ -35,6 +37,9 @@ data: proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "Upgrade"; + proxy_set_header X-Forwarded-For $real_ip; + proxy_set_header X-Forwarded-Host $real_ip; + proxy_set_header X-Real-IP $real_ip; proxy_set_header Host $host; proxy_pass http://http-openreplay.app.svc.cluster.local; } @@ -102,6 +107,13 @@ data: ; sites.conf: |- + # Need real ip address for flags in replay. + # Some LBs will forward real ips as x-forwarded-for + # So making that as priority + map $http_x_forwarded_for $real_ip { + ~^(\d+\.\d+\.\d+\.\d+) $1; + default $remote_addr; + } map $http_upgrade $connection_upgrade { default upgrade; '' close; @@ -110,8 +122,8 @@ data: listen 80 default_server; listen [::]:80 default_server; # server_name _; - return 301 https://$host$request_uri; - # include /etc/nginx/conf.d/location.list; + # return 301 https://$host$request_uri; + include /etc/nginx/conf.d/location.list; } server { listen 443 ssl; diff --git a/scripts/helm/nginx-ingress/nginx-ingress/templates/deployment.yaml b/scripts/helm/nginx-ingress/nginx-ingress/templates/deployment.yaml index c9d9d78e5..9cc018dc1 100644 --- a/scripts/helm/nginx-ingress/nginx-ingress/templates/deployment.yaml +++ b/scripts/helm/nginx-ingress/nginx-ingress/templates/deployment.yaml @@ -13,8 +13,9 @@ spec: {{- include "nginx.selectorLabels" . | nindent 6 }} template: metadata: - {{- with .Values.podAnnotations }} annotations: + nginxRolloutID: {{ randAlphaNum 5 | quote }} # Restart nginx after every deployment + {{- with .Values.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} labels: diff --git a/scripts/helm/nginx-ingress/nginx-ingress/templates/service.yaml b/scripts/helm/nginx-ingress/nginx-ingress/templates/service.yaml index 38dc08846..38912bf78 100644 --- a/scripts/helm/nginx-ingress/nginx-ingress/templates/service.yaml +++ b/scripts/helm/nginx-ingress/nginx-ingress/templates/service.yaml @@ -6,6 +6,8 @@ metadata: {{- include "nginx.labels" . | nindent 4 }} spec: type: {{ .Values.service.type }} + # Make sure to get client ip + externalTrafficPolicy: Local ports: {{- range .Values.service.ports }} - port: {{ .port }} diff --git a/scripts/helm/openreplay-cli b/scripts/helm/openreplay-cli index 05a0b55ed..19392c1ce 100755 --- a/scripts/helm/openreplay-cli +++ b/scripts/helm/openreplay-cli @@ -1,6 +1,6 @@ #!/bin/bash -## This script is a helper for openreplay management +## This script is a helper for managing your OpenReplay instance set -eE -o pipefail # same as: `set -o errexit -o errtrace` # Trapping the error @@ -37,6 +37,16 @@ CWD=$pwd usage() { clear +cat <<"EOF" + ___ ____ _ + / _ \ _ __ ___ _ __ | _ \ ___ _ __ | | __ _ _ _ +| | | | '_ \ / _ \ '_ \| |_) / _ \ '_ \| |/ _` | | | | +| |_| | |_) | __/ | | | _ < __/ |_) | | (_| | |_| | + \___/| .__/ \___|_| |_|_| \_\___| .__/|_|\__,_|\__, | + |_| |_| |___/ + +EOF + echo -e "${green}Usage: openreplay-cli [ -h | --help ] [ -d | --status ] [ -v | --verbose ] @@ -48,7 +58,7 @@ clear echo -e "${reset}${blue}services: ${services[*]}${reset}" exit 0 } -services=( alert auth cache chalice clickhouse ender events failover filesink filestorage http integrations ios-proxy metadata negative pg-stateless pg preprocessing redis ws ) +services=( alerts assets chalice clickhouse ender sink storage http integrations ios-proxy db pg redis ) check() { if ! command -v kubectl &> /dev/null @@ -72,14 +82,14 @@ stop() { start() { if [[ $1 == "all" ]]; then - cd ./helm/app + cd ./app for apps in $(ls *.yaml);do app=$(echo $apps | cut -d '.' -f1) helm upgrade --install -n app $app openreplay -f $app.yaml done cd $CWD fi - helm upgrade --install -n app $1 ./helm/app/openreplay -f ./helm/app/openreplay/$1.yaml + helm upgrade --install -n app $1 ./app/openreplay -f ./app/$1.yaml } @@ -95,7 +105,7 @@ install() { } upgrade() { - sed -i "s/tag:.*/ tag: 'latest'/g" helm/app/$1.yaml + sed -i "s/tag:.*/ tag: 'latest'/g" ./app/$1.yaml } logs() { @@ -109,7 +119,7 @@ status() { [[ $# -eq 0 ]] && usage && exit 1 -PARSED_ARGUMENTS=$(color getopt -a -n openreplay-cli -o vhds:S:l:r:i: --long verbose,help,status,start:,stop:,restart:,install: -- "$@") +PARSED_ARGUMENTS=$(color getopt -a -n openreplay-cli -o vhds:S:l:r:i: --long verbose,help,status,start:,stop:,logs:,restart:,install: -- "$@") VALID_ARGUMENTS=$? if [[ "$VALID_ARGUMENTS" != "0" ]]; then usage diff --git a/scripts/helm/roles/openreplay/defaults/main.yml b/scripts/helm/roles/openreplay/defaults/main.yml index 4927d5350..f7948e53c 100644 --- a/scripts/helm/roles/openreplay/defaults/main.yml +++ b/scripts/helm/roles/openreplay/defaults/main.yml @@ -6,5 +6,4 @@ db_list: - "nfs-server-provisioner" - "postgresql" - "redis" - - "sqs" enterprise_edition: false diff --git a/scripts/helm/roles/openreplay/tasks/install-apps.yaml b/scripts/helm/roles/openreplay/tasks/install-apps.yaml index f442065ac..3e511ac19 100644 --- a/scripts/helm/roles/openreplay/tasks/install-apps.yaml +++ b/scripts/helm/roles/openreplay/tasks/install-apps.yaml @@ -9,7 +9,7 @@ executable: /bin/bash when: app_name|length > 0 tags: app -- name: Installing openreplay core applications +- name: Installing OpenReplay core applications shell: | override='' [[ -f /tmp/'{{ item|basename }}' ]] && override='-f /tmp/{{ item|basename }}' || true diff --git a/scripts/helm/roles/openreplay/tasks/main.yml b/scripts/helm/roles/openreplay/tasks/main.yml index ab4a7cd60..66d31cf4a 100644 --- a/scripts/helm/roles/openreplay/tasks/main.yml +++ b/scripts/helm/roles/openreplay/tasks/main.yml @@ -14,11 +14,11 @@ shell: | kubectl delete -n app secret aws-registry || true kubectl create secret -n app docker-registry aws-registry \ - --docker-server="{{ ecr_docker_registry_server }}" \ - --docker-username="{{ ecr_docker_username }}" \ - --docker-password="{{ ecr_docker_password }}" \ + --docker-server="{{ docker_registry_url }}" \ + --docker-username="{{ docker_registry_username }}" \ + --docker-password="{{ docker_registry_password }}" \ --docker-email=no@email.local - when: ecr_docker_username|length != 0 and ecr_docker_password|length != 0 + when: docker_registry_username|length != 0 and docker_registry_password|length != 0 # Creating helm override files. - name: Creating override files diff --git a/scripts/helm/roles/openreplay/tasks/pre-check.yaml b/scripts/helm/roles/openreplay/tasks/pre-check.yaml index 4363b9c34..60801192f 100644 --- a/scripts/helm/roles/openreplay/tasks/pre-check.yaml +++ b/scripts/helm/roles/openreplay/tasks/pre-check.yaml @@ -4,11 +4,11 @@ block: - name: Checking mandatory variables fail: - msg: "Didn't find openreplay docker credentials." - when: kubeconfig_path|length == 0 or ecr_docker_registry_server|length == 0 + msg: "Didn't find OpenReplay docker credentials." + when: kubeconfig_path|length == 0 or docker_registry_url|length == 0 - name: Generaing minio access key block: - - name: Generaing minio access key + - name: Generating minio access key set_fact: minio_access_key_generated: "{{ lookup('password', '/dev/null length=30 chars=ascii_letters') }}" - name: Updating vars.yaml @@ -16,13 +16,13 @@ regexp: '^minio_access_key' line: 'minio_access_key: "{{ minio_access_key_generated }}"' path: vars.yaml - - name: Generaing minio access key + - name: Generating minio access key set_fact: minio_access_key: "{{ minio_access_key_generated }}" when: minio_access_key|length == 0 - - name: Generaing minio secret key + - name: Generating minio secret key block: - - name: Generaing minio access key + - name: Generating minio access key set_fact: minio_secret_key_generated: "{{ lookup('password', '/dev/null length=30 chars=ascii_letters') }}" - name: Updating vars.yaml @@ -30,19 +30,33 @@ regexp: '^minio_secret_key' line: 'minio_secret_key: "{{minio_secret_key_generated}}"' path: vars.yaml - - name: Generaing minio secret key + - name: Generating minio secret key set_fact: minio_access_key: "{{ minio_secret_key_generated }}" when: minio_secret_key|length == 0 + - name: Generating jwt secret key + block: + - name: Generating jwt access key + set_fact: + jwt_secret_key_generated: "{{ lookup('password', '/dev/null length=30 chars=ascii_letters') }}" + - name: Updating vars.yaml + lineinfile: + regexp: '^jwt_secret_key' + line: 'jwt_secret_key: "{{jwt_secret_key_generated}}"' + path: vars.yaml + - name: Generating jwt secret key + set_fact: + jwt_access_key: "{{ jwt_secret_key_generated }}" + when: jwt_secret_key|length == 0 rescue: - name: Caught error debug: msg: - - Below variabls are mandatory. Please make sure it's updated in vars.yaml + - Below variables are mandatory. Please make sure it is updated in vars.yaml - kubeconfig_path - - ecr_docker_username - - ecr_docker_password - - ecr_docker_registry_server + - docker_registry_username + - docker_registry_password + - docker_registry_url failed_when: true tags: pre-check - name: Creating Nginx SSL certificate diff --git a/scripts/helm/roles/openreplay/templates/alert.yaml b/scripts/helm/roles/openreplay/templates/alert.yaml index 0200a406a..1ba439a63 100644 --- a/scripts/helm/roles/openreplay/templates/alert.yaml +++ b/scripts/helm/roles/openreplay/templates/alert.yaml @@ -4,9 +4,9 @@ image: tag: {{ image_tag }} {% endif %} -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/assets.yaml b/scripts/helm/roles/openreplay/templates/assets.yaml index d7be9fa9d..1f21147bb 100644 --- a/scripts/helm/roles/openreplay/templates/assets.yaml +++ b/scripts/helm/roles/openreplay/templates/assets.yaml @@ -7,6 +7,6 @@ env: AWS_ACCESS_KEY_ID: "{{ minio_access_key }}" AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/chalice.yaml b/scripts/helm/roles/openreplay/templates/chalice.yaml index 90b6de579..28325eb64 100644 --- a/scripts/helm/roles/openreplay/templates/chalice.yaml +++ b/scripts/helm/roles/openreplay/templates/chalice.yaml @@ -4,7 +4,7 @@ image: tag: {{ image_tag }} {% endif %} -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} env: @@ -13,3 +13,4 @@ env: sourcemaps_bucket_key: "{{ minio_access_key }}" sourcemaps_bucket_secret: "{{ minio_secret_key }}" S3_HOST: "https://{{ domain_name }}" + jwt_secret: "{{ jwt_secret_key }}" diff --git a/scripts/helm/roles/openreplay/templates/db.yaml b/scripts/helm/roles/openreplay/templates/db.yaml index 0200a406a..1ba439a63 100644 --- a/scripts/helm/roles/openreplay/templates/db.yaml +++ b/scripts/helm/roles/openreplay/templates/db.yaml @@ -4,9 +4,9 @@ image: tag: {{ image_tag }} {% endif %} -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/ender.yaml b/scripts/helm/roles/openreplay/templates/ender.yaml index 560483e94..2d51506ea 100644 --- a/scripts/helm/roles/openreplay/templates/ender.yaml +++ b/scripts/helm/roles/openreplay/templates/ender.yaml @@ -4,6 +4,6 @@ image: tag: {{ image_tag }} {% endif %} -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/http.yaml b/scripts/helm/roles/openreplay/templates/http.yaml index d7be9fa9d..1f21147bb 100644 --- a/scripts/helm/roles/openreplay/templates/http.yaml +++ b/scripts/helm/roles/openreplay/templates/http.yaml @@ -7,6 +7,6 @@ env: AWS_ACCESS_KEY_ID: "{{ minio_access_key }}" AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/integrations.yaml b/scripts/helm/roles/openreplay/templates/integrations.yaml index 560483e94..2d51506ea 100644 --- a/scripts/helm/roles/openreplay/templates/integrations.yaml +++ b/scripts/helm/roles/openreplay/templates/integrations.yaml @@ -4,6 +4,6 @@ image: tag: {{ image_tag }} {% endif %} -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/sink.yaml b/scripts/helm/roles/openreplay/templates/sink.yaml index 560483e94..2d51506ea 100644 --- a/scripts/helm/roles/openreplay/templates/sink.yaml +++ b/scripts/helm/roles/openreplay/templates/sink.yaml @@ -4,6 +4,6 @@ image: tag: {{ image_tag }} {% endif %} -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} diff --git a/scripts/helm/roles/openreplay/templates/storage.yaml b/scripts/helm/roles/openreplay/templates/storage.yaml index d7be9fa9d..1f21147bb 100644 --- a/scripts/helm/roles/openreplay/templates/storage.yaml +++ b/scripts/helm/roles/openreplay/templates/storage.yaml @@ -7,6 +7,6 @@ env: AWS_ACCESS_KEY_ID: "{{ minio_access_key }}" AWS_SECRET_ACCESS_KEY: "{{ minio_secret_key }}" -{% if not (ecr_docker_username is defined and ecr_docker_username and ecr_docker_password is defined and ecr_docker_password) %} +{% if not (docker_registry_username is defined and docker_registry_username and docker_registry_password is defined and docker_registry_password) %} imagePullSecrets: [] {% endif %} diff --git a/scripts/helm/vars.yaml b/scripts/helm/vars.yaml index 1bdd7a6bf..bdade016c 100644 --- a/scripts/helm/vars.yaml +++ b/scripts/helm/vars.yaml @@ -7,22 +7,22 @@ # Give absolute file path. # Use following command to get the full file path # `readlink -f ` -kubeconfig_path: "" +kubeconfig_path: /home/rajeshr/.kube/config ################### ## Optional Fields. ################### # If you've private registry, please update the details here. -ecr_docker_username: "" -ecr_docker_password: "" -ecr_docker_registry_server: "rg.fr-par.scw.cloud/foss" -image_tag: v1.0.0 +docker_registry_username: "" +docker_registry_password: "" +docker_registry_url: "rg.fr-par.scw.cloud/foss" +image_tag: "latest" # This is an optional field. If you want to use proper ssl, then it's mandatory -# Using which domain name, you'll be accessing openreplay -# for exmample: domain_name: "openreplay.mycorp.org" -domain_name: "" +# Using which domain name, you'll be accessing OpenReplay +# for example: domain_name: "test.com" +domain_name: "" # Nginx ssl certificates. # in cert format @@ -39,16 +39,21 @@ domain_name: "" nginx_ssl_cert_file_path: "" nginx_ssl_key_file_path: "" +# This key is used to create password for chalice api requests. +# Create a strong password. +# By default, a default key will be generated and will update the value here. +jwt_secret_key: "" + # Enable monitoring # If set, monitoring stack will be installed # including, prometheus, grafana and other core components, -# to scrape the metrics. But this will cost, additional resources(cpu and memory). +# to scrape the metrics. But this will cost, additional resources (cpu and memory). # Monitoring won't be installed on base installation. enable_monitoring: "false" # Random password for minio, # If not defined, will generate at runtime. -# Use following command to generate passwordwill give +# Use following command to generate password # `openssl rand -base64 30` minio_access_key: "" minio_secret_key: "" diff --git a/sourcemap-uploader/cli.js b/sourcemap-uploader/cli.js index f7b124117..c644369f3 100755 --- a/sourcemap-uploader/cli.js +++ b/sourcemap-uploader/cli.js @@ -18,10 +18,13 @@ parser.addArgument(['-p', '-i', '--project-key'], { // -i is depricated help: 'Project Key', required: true, }); - parser.addArgument(['-s', '--server'], { help: 'OpenReplay API server URL for upload', }); +parser.addArgument(['-l', '--log'], { + help: 'Log requests information', + action: 'storeTrue', +}); const subparsers = parser.addSubparsers({ title: 'commands', @@ -50,7 +53,9 @@ dir.addArgument(['-u', '--js-dir-url'], { // TODO: exclude in dir -const { command, api_key, project_key, server, ...args } = parser.parseArgs(); +const { command, api_key, project_key, server, log, ...args } = parser.parseArgs(); + +global.LOG = !!log; try { global.SERVER = new URL(server || "https://api.openreplay.com"); diff --git a/sourcemap-uploader/lib/readDir.js b/sourcemap-uploader/lib/readDir.js index 56a51a72b..501a2949f 100644 --- a/sourcemap-uploader/lib/readDir.js +++ b/sourcemap-uploader/lib/readDir.js @@ -3,7 +3,9 @@ const readFile = require('./readFile'); module.exports = (sourcemap_dir_path, js_dir_url) => { sourcemap_dir_path = (sourcemap_dir_path + '/').replace(/\/+/g, '/'); - js_dir_url = (js_dir_url + '/').replace(/\/+/g, '/'); + if (js_dir_url[ js_dir_url.length - 1 ] !== '/') { // replace will break schema + js_dir_url += '/'; + } return glob(sourcemap_dir_path + '**/*.map').then(sourcemap_file_paths => Promise.all( sourcemap_file_paths.map(sourcemap_file_path => diff --git a/sourcemap-uploader/lib/uploadSourcemaps.js b/sourcemap-uploader/lib/uploadSourcemaps.js index a39ce5e4d..f0c3171fd 100644 --- a/sourcemap-uploader/lib/uploadSourcemaps.js +++ b/sourcemap-uploader/lib/uploadSourcemaps.js @@ -7,14 +7,21 @@ const getUploadURLs = (api_key, project_key, js_file_urls) => } const pathPrefix = (global.SERVER.pathname + "/").replace(/\/+/g, '/'); + const options = { + method: 'PUT', + hostname: global.SERVER.host, + path: pathPrefix + `${project_key}/sourcemaps/`, + headers: { Authorization: api_key, 'Content-Type': 'application/json' }, + } + if (global.LOG) { + console.log("Request: ", options, "\nFiles: ", js_file_urls); + } const req = https.request( - { - method: 'PUT', - hostname: global.SERVER.host, - path: pathPrefix + `${project_key}/sourcemaps/`, - headers: { Authorization: api_key, 'Content-Type': 'application/json' }, - }, + options, res => { + if (global.LOG) { + console.log("Response Code: ", res.statusCode, "\nMessage: ", res.statusMessage); + } if (res.statusCode === 403) { reject("Authorisation rejected. Please, check your API_KEY and/or PROJECT_KEY.") return @@ -24,7 +31,12 @@ const getUploadURLs = (api_key, project_key, js_file_urls) => } let data = ''; res.on('data', s => (data += s)); - res.on('end', () => resolve(JSON.parse(data).data)); + res.on('end', () => { + if (global.LOG) { + console.log("Server Response: ", data) + } + resolve(JSON.parse(data).data) + }); }, ); req.on('error', reject); @@ -46,8 +58,12 @@ const uploadSourcemap = (upload_url, body) => }, res => { if (res.statusCode !== 200) { + if (global.LOG) { + console.log("Response Code: ", res.statusCode, "\nMessage: ", res.statusMessage); + } + reject("Unable to upload. Please, contact OpenReplay support."); - return; + return; // TODO: report per-file errors. } resolve(); //res.on('end', resolve); diff --git a/sourcemap-uploader/package.json b/sourcemap-uploader/package.json index 2ffe6f5b7..8f0070408 100644 --- a/sourcemap-uploader/package.json +++ b/sourcemap-uploader/package.json @@ -1,6 +1,6 @@ { "name": "@openreplay/sourcemap-uploader", - "version": "3.0.1", + "version": "3.0.2", "description": "NPM module to upload your JS sourcemaps files to OpenReplay", "bin": "cli.js", "main": "index.js", diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 1ea5ac2bb..00bf42046 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "3.0.2", + "version": "3.0.3", "keywords": [ "logging", "replay" diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index 91dc20322..3fe006ce8 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -174,7 +174,24 @@ export default class App { _start(reset: boolean): void { // TODO: return a promise instead of onStart handling if (!this.isActive) { this.isActive = true; + if (!this.worker) { + throw new Error("Stranger things: no worker found"); + } + + let pageNo: number = 0; + const pageNoStr = sessionStorage.getItem(this.options.session_pageno_key); + if (pageNoStr != null) { + pageNo = parseInt(pageNoStr); + pageNo++; + } + sessionStorage.setItem(this.options.session_pageno_key, pageNo.toString()); const startTimestamp = timestamp(); + + this.worker.postMessage({ ingestPoint: this.options.ingestPoint, pageNo, startTimestamp }); // brings delay of 10th ms? + this.observer.observe(); + this.startCallbacks.forEach((cb) => cb()); + this.ticker.start(); + window.fetch(this.options.ingestPoint + '/v1/web/start', { method: 'POST', headers: { @@ -196,7 +213,7 @@ export default class App { .then(r => { if (r.status === 200) { return r.json() - } else { // TODO: handle canceling + } else { // TODO: handle canceling && 403 throw new Error("Server error"); } }) @@ -206,26 +223,14 @@ export default class App { typeof userUUID !== 'string') { throw new Error("Incorrect server responce"); } - if (!this.worker) { - throw new Error("Stranger things: worker is not started"); - } sessionStorage.setItem(this.options.session_token_key, token); localStorage.setItem(this.options.local_uuid_key, userUUID); - - let pageNo: number = 0; - const pageNoStr = sessionStorage.getItem(this.options.session_pageno_key); - if (pageNoStr != null) { - pageNo = parseInt(pageNoStr); - pageNo++; + if (!this.worker) { + throw new Error("Stranger things: no worker found after start request"); } - sessionStorage.setItem(this.options.session_pageno_key, pageNo.toString()); - - this.worker.postMessage({ ingestPoint: this.options.ingestPoint, token, pageNo, startTimestamp }); - this.observer.observe(); - this.startCallbacks.forEach((cb) => cb()); - this.ticker.start(); - log("OpenReplay tracking started."); + this.worker.postMessage({ token }); + log("OpenReplay tracking started."); if (typeof this.options.onStart === 'function') { this.options.onStart({ sessionToken: token, userUUID, sessionID: token /* back compat (depricated) */ }); } @@ -254,7 +259,7 @@ export default class App { if (this.isActive) { try { if (this.worker) { - this.worker.postMessage(null); + this.worker.postMessage("stop"); } this.observer.disconnect(); this.nodes.clear(); diff --git a/tracker/tracker/src/main/index.ts b/tracker/tracker/src/main/index.ts index def27c55a..d6c8481df 100644 --- a/tracker/tracker/src/main/index.ts +++ b/tracker/tracker/src/main/index.ts @@ -85,6 +85,8 @@ export default class API { ? null : new App(options.projectKey, options.sessionToken, options); if (this.app !== null) { + Viewport(this.app); + CSSRules(this.app); Connection(this.app); Console(this.app, options); Exception(this.app, options); @@ -94,9 +96,7 @@ export default class API { Timing(this.app, options); Performance(this.app); Scroll(this.app); - Viewport(this.app); Longtasks(this.app); - CSSRules(this.app); (window as any).__OPENREPLAY__ = (window as any).__OPENREPLAY__ || this; } else { console.log("OpenReplay: broeser doesn't support API required for tracking.") diff --git a/tracker/tracker/src/webworker/index.ts b/tracker/tracker/src/webworker/index.ts index 2e9d3b0e0..f61b7bca5 100644 --- a/tracker/tracker/src/webworker/index.ts +++ b/tracker/tracker/src/webworker/index.ts @@ -1,4 +1,4 @@ -import { classes, BatchMeta, Timestamp, SetPageVisibility } from '../messages'; +import { classes, BatchMeta, Timestamp, SetPageVisibility, CreateDocument } from '../messages'; import Message from '../messages/message'; import Writer from '../messages/writer'; @@ -12,8 +12,11 @@ let ingestPoint: string = ""; let token: string = ""; let pageNo: number = 0; let timestamp: number = 0; +let timeAdjustment: number = 0; let nextIndex: number = 0; -let isEmpty: boolean = true; +// TODO: clear logic: isEmpty here means presense of BatchMeta but absence of other messages +// BatchWriter should be abstracted +let isEmpty: boolean = true; function writeBatchMeta(): boolean { // TODO: move to encoder return new BatchMeta(pageNo, nextIndex, timestamp).encode(writer) @@ -67,7 +70,7 @@ function sendBatch(batch: Uint8Array):void { } function send(): void { - if (isEmpty || ingestPoint === "") { + if (isEmpty || token === "" || ingestPoint === "") { return; } const batch = writer.flush(); @@ -82,29 +85,45 @@ function send(): void { } function reset() { + ingestPoint = "" + token = "" clearInterval(sendIntervalID); writer.reset(); } let restartTimeoutID: ReturnType; +function hasTimestamp(msg: any): msg is { timestamp: number } { + return typeof msg === 'object' && typeof msg.timestamp === 'number'; +} + self.onmessage = ({ data }: MessageEvent) => { if (data === null) { send(); return; } - if (!Array.isArray(data)) { + if (data === "stop") { + send(); reset(); - ingestPoint = data.ingestPoint; - token = data.token; - pageNo = data.pageNo; - timestamp = data.startTimestamp; - writeBatchMeta(); - sendIntervalID = setInterval(send, SEND_INTERVAL); + return; + } + if (!Array.isArray(data)) { + ingestPoint = data.ingestPoint || ingestPoint; + token = data.token || token; + pageNo = data.pageNo || pageNo; + timestamp = data.startTimestamp || timestamp; + timeAdjustment = data.timeAdjustment || timeAdjustment; + if (writer.isEmpty()) { + writeBatchMeta(); + } + if (sendIntervalID == null) { + sendIntervalID = setInterval(send, SEND_INTERVAL); + } return; } data.forEach((data: any) => { const message: Message = new (classes.get(data._id))(); + Object.assign(message, data); if (message instanceof Timestamp) { timestamp = (message).timestamp; @@ -116,7 +135,6 @@ self.onmessage = ({ data }: MessageEvent) => { } } - Object.assign(message, data); writer.checkpoint(); nextIndex++; if (message.encode(writer)) {