v1.7.0 enhanced (#606)

* feat(assist): changed Dockerfile

* feat(assist): changed Dockerfile

* feat(assist): changed Dockerfile

* feat(assist): changed lock file

* feat(assist): changed Dockerfile

* feat(chalice): return role name after update user

* feat(chalice): changed sessions search

* feat(chalice): changed sessions search

* feat(chalice): changed Dockerfile
feat(chalice): changed entrypoint
feat(alerts): changed Dockerfile
feat(alerts): changed entrypoint

* feat(assist): handle null uws payload

* feat(crons): fixed coroutine

* feat(chalice): optimize get projects
This commit is contained in:
Kraiem Taha Yassine 2022-07-13 22:40:29 +02:00 committed by GitHub
parent 8281ddcc6c
commit d74a82383d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 518 additions and 373 deletions

View file

@ -1,23 +1,15 @@
FROM python:3.10-slim
FROM python:3.10-alpine
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
RUN apk add --no-cache nodejs npm tini
ARG envarg
# Add Tini
# Startup daemon
ENV TINI_VERSION=v0.19.0 \
SOURCE_MAP_VERSION=0.7.4 \
ENV SOURCE_MAP_VERSION=0.7.4 \
APP_NAME=chalice \
ENTERPRISE_BUILD=${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm /mappings.wasm
RUN chmod +x /tini
# Installing Nodejs
RUN apt update && apt install -y curl && \
curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
apt install -y nodejs && \
apt remove --purge -y curl && \
rm -rf /var/lib/apt/lists/*
ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm /mappings.wasm
WORKDIR /work_tmp
COPY requirements.txt /work_tmp/requirements.txt
@ -29,5 +21,5 @@ WORKDIR /work
COPY . .
RUN mv env.default .env && mv /work_tmp/node_modules sourcemap-reader/.
ENTRYPOINT ["/tini", "--"]
ENTRYPOINT ["/sbin/tini", "--"]
CMD ./entrypoint.sh

View file

@ -1,16 +1,12 @@
FROM python:3.10-slim
FROM python:3.10-alpine
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
ENV APP_NAME alerts
ENV pg_minconn 2
ENV pg_maxconn 10
# Add Tini
# Startup daemon
ENV TINI_VERSION v0.19.0
RUN apk add --no-cache tini
ARG envarg
ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
ENV APP_NAME=alerts \
pg_minconn=2 \
pg_maxconn=10 \
ENTERPRISE_BUILD=${envarg}
COPY requirements.txt /work_tmp/requirements.txt
RUN pip install --no-cache-dir --upgrade -r /work_tmp/requirements.txt
@ -19,5 +15,5 @@ WORKDIR /work
COPY . .
RUN mv env.default .env && mv app_alerts.py app.py && mv entrypoint_alerts.sh entrypoint.sh
ENTRYPOINT ["/tini", "--"]
ENTRYPOINT ["/sbin/tini", "--"]
CMD ./entrypoint.sh

View file

@ -863,12 +863,12 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)}
if f.type == schemas.FetchFilterType._url:
event_where.append(
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k_f})s", f.value,
value_key=e_k_f))
_multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k_f})s::text",
f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._status_code:
event_where.append(
_multiple_conditions(f"main.status_code {f.operator} %({e_k_f})s", f.value,
_multiple_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value,
value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._method:
@ -877,15 +877,15 @@ def search_query_parts(data, error_status, errors_only, favorite_only, issue, pr
apply = True
elif f.type == schemas.FetchFilterType._duration:
event_where.append(
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s", f.value, value_key=e_k_f))
_multiple_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._request_body:
event_where.append(
_multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
_multiple_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value, value_key=e_k_f))
apply = True
elif f.type == schemas.FetchFilterType._response_body:
event_where.append(
_multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f))
_multiple_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value, value_key=e_k_f))
apply = True
else:
print(f"undefined FETCH filter: {f.type}")

View file

@ -1,4 +1,4 @@
#!/bin/bash
#!/bin/sh
cd sourcemap-reader
nohup npm start &> /tmp/sourcemap-reader.log &
cd ..

View file

@ -1,3 +1,3 @@
#!/bin/bash
#!/bin/sh
uvicorn app:app --host 0.0.0.0 --reload

View file

@ -1,19 +1,28 @@
print("============= CRONS =============")
import sys
import asyncio
from routers.crons import core_dynamic_crons
def process(action):
{
def default_action(action):
async def _func():
print(f"{action} not found in crons-definitions")
return _func
async def process(action):
await {
"TELEMETRY": core_dynamic_crons.telemetry_cron,
"JOB": core_dynamic_crons.run_scheduled_jobs,
"REPORT": core_dynamic_crons.weekly_report2
}.get(action.upper(), lambda: print(f"{action} not found in crons-definitions"))()
}.get(action.upper(), default_action(action))()
if __name__ == '__main__':
if len(sys.argv) < 2 or len(sys.argv[1]) < 1:
print("please provide actions as argument")
else:
process(sys.argv[1])
print(f"action: {sys.argv[1]}")
asyncio.run(process(sys.argv[1]))

View file

@ -52,14 +52,23 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
AND users.tenant_id = %(tenant_id)s
AND (roles.all_projects OR roles_projects.project_id = s.project_id)
) AS role_project ON (TRUE)"""
pre_select = ""
if recorded:
pre_select = """WITH recorded_p AS (SELECT DISTINCT projects.project_id
FROM projects INNER JOIN sessions USING (project_id)
WHERE tenant_id =%(tenant_id)s
AND deleted_at IS NULL
AND duration > 0)"""
cur.execute(
cur.mogrify(f"""\
{pre_select}
SELECT
s.project_id, s.name, s.project_key, s.save_request_payloads
{',s.gdpr' if gdpr else ''}
{',COALESCE((SELECT TRUE FROM public.sessions WHERE sessions.project_id = s.project_id LIMIT 1), FALSE) AS recorded' if recorded else ''}
{',EXISTS(SELECT 1 FROM recorded_p WHERE recorded_p.project_id = s.project_id) AS recorded' if recorded else ''}
{',stack_integrations.count>0 AS stack_integrations' if stack_integrations else ''}
FROM public.projects AS s
{'LEFT JOIN recorded_p USING (project_id)' if recorded else ''}
{'LEFT JOIN LATERAL (SELECT COUNT(*) AS count FROM public.integrations WHERE s.project_id = integrations.project_id LIMIT 1) AS stack_integrations ON TRUE' if stack_integrations else ''}
{role_query if user_id is not None else ""}
WHERE s.tenant_id =%(tenant_id)s
@ -76,7 +85,6 @@ def get_projects(tenant_id, recording_state=False, gdpr=None, recorded=False, st
WHERE sessions.start_ts >= %(startDate)s AND sessions.start_ts <= %(endDate)s
GROUP BY project_id;""",
{"startDate": TimeUTC.now(delta_days=-3), "endDate": TimeUTC.now(delta_days=1)})
cur.execute(query=query)
status = cur.fetchall()
for r in rows:

View file

@ -168,7 +168,11 @@ def update(tenant_id, user_id, changes):
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.role_id;""",
users.role_id,
(SELECT roles.name
FROM roles
WHERE roles.tenant_id=%(tenant_id)s
AND roles.role_id=users.role_id) AS role_name;""",
{"tenant_id": tenant_id, "user_id": user_id, **changes})
)
if len(sub_query_bauth) > 0:
@ -187,7 +191,11 @@ def update(tenant_id, user_id, changes):
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.role_id;""",
users.role_id,
(SELECT roles.name
FROM roles
WHERE roles.tenant_id=%(tenant_id)s
AND roles.role_id=users.role_id) AS role_name;""",
{"tenant_id": tenant_id, "user_id": user_id, **changes})
)

View file

@ -56,6 +56,7 @@ ALTER TABLE IF EXISTS events.resources
PRIMARY KEY (session_id, message_id, timestamp);
COMMIT;
CREATE INDEX CONCURRENTLY IF NOT EXISTS projects_tenant_id_idx ON public.projects (tenant_id);
CREATE INDEX CONCURRENTLY IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL;
ALTER TYPE metric_type ADD VALUE IF NOT EXISTS 'funnel';

View file

@ -257,6 +257,7 @@ $$
);
CREATE INDEX IF NOT EXISTS projects_tenant_id_idx ON public.projects (tenant_id);
CREATE INDEX IF NOT EXISTS projects_project_key_idx ON public.projects (project_key);
CREATE INDEX IF NOT EXISTS projects_project_id_deleted_at_n_idx ON public.projects (project_id) WHERE deleted_at IS NULL;
DROP TRIGGER IF EXISTS on_insert_or_update ON projects;

View file

@ -10,7 +10,7 @@ build.sh
servers/peerjs-server.js
servers/sourcemaps-handler.js
servers/sourcemaps-server.js
/Dockerfile
/utils/geoIP.js
/utils/HeapSnapshot.js
/utils/helper.js
/utils/assistHelper.js

15
ee/utilities/Dockerfile Normal file
View file

@ -0,0 +1,15 @@
FROM node:18-alpine
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
RUN apk add --no-cache tini git libc6-compat && ln -s /lib/libc.musl-x86_64.so.1 /lib/ld-linux-x86-64.so.2
ARG envarg
ENV ENTERPRISE_BUILD=${envarg} \
MAXMINDDB_FILE=/root/geoip.mmdb
WORKDIR /work
ADD https://static.openreplay.com/geoip/GeoLite2-Country.mmdb $MAXMINDDB_FILE
COPY package.json .
COPY package-lock.json .
RUN npm install
COPY . .
ENTRYPOINT ["/sbin/tini", "--"]
CMD npm start

View file

@ -1,6 +1,7 @@
rm -rf ./utils/geoIP.js
rm -rf ./utils/HeapSnapshot.js
rm -rf ./utils/helper.js
rm -rf ./utils/assistHelper.js
rm -rf servers/peerjs-server.js
rm -rf servers/sourcemaps-handler.js

File diff suppressed because it is too large Load diff

View file

@ -70,7 +70,10 @@ const extractPayloadFromRequest = async function (req, res) {
filters.filter.userID = [req.getQuery("userId")];
}
if (!filters.query.value) {
let body = await getBodyFromUWSResponse(res);
let body = {};
if (req.getMethod() !== 'get') {
body = await getBodyFromUWSResponse(res);
}
filters = {
...filters,
"sort": {