Patch/api v1.12.0 (#1301)

* fix(chalice): changed base image to support SSO/xmlsec

* fix(chalice): fixed exp search null metadata
This commit is contained in:
Kraiem Taha Yassine 2023-06-01 15:50:22 +02:00 committed by GitHub
parent 4b8cf9742c
commit ab000751d2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 7 additions and 10 deletions

View file

@ -1,8 +1,6 @@
#FROM python:3.11-alpine FROM python:3.11-alpine3.17
FROM python:3.11-slim
LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>" LABEL Maintainer="KRAIEM Taha Yassine<tahayk2@gmail.com>"
#RUN apk add --no-cache build-base libressl libffi-dev libressl-dev libxslt-dev libxml2-dev xmlsec-dev xmlsec tini RUN apk add --no-cache build-base libressl libffi-dev libressl-dev libxslt-dev libxml2-dev xmlsec-dev xmlsec tini
RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc tini && rm -rf /var/lib/apt/lists/*
ARG envarg ARG envarg
ENV SOURCE_MAP_VERSION=0.7.4 \ ENV SOURCE_MAP_VERSION=0.7.4 \
@ -18,8 +16,7 @@ RUN pip install --no-cache-dir --upgrade -r requirements.txt
COPY . . COPY . .
RUN mv env.default .env RUN mv env.default .env
#RUN adduser -u 1001 openreplay -D RUN adduser -u 1001 openreplay -D
RUN useradd -mu 1001 openreplay
USER 1001 USER 1001
ENTRYPOINT ["/sbin/tini", "--"] ENTRYPOINT ["/sbin/tini", "--"]

View file

@ -1,4 +1,4 @@
import json import ast
from typing import List, Union from typing import List, Union
import schemas import schemas
@ -158,7 +158,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
meta_keys = metadata.get(project_id=project_id) meta_keys = metadata.get(project_id=project_id)
meta_map = ",map(%s) AS 'metadata'" \ meta_map = ",map(%s) AS 'metadata'" \
% ','.join([f"'{m['key']}',metadata_{m['index']}" for m in meta_keys]) % ','.join([f"'{m['key']}',coalesce(metadata_{m['index']},'None')" for m in meta_keys])
main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, main_query = cur.mogrify(f"""SELECT COUNT(*) AS count,
COALESCE(JSONB_AGG(users_sessions) COALESCE(JSONB_AGG(users_sessions)
FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions
@ -238,10 +238,10 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_
for i, s in enumerate(sessions): for i, s in enumerate(sessions):
sessions[i] = {**s.pop("last_session")[0], **s} sessions[i] = {**s.pop("last_session")[0], **s}
sessions[i].pop("rn") sessions[i].pop("rn")
sessions[i]["metadata"] = json.loads(sessions[i]["metadata"].replace("'", '"')) sessions[i]["metadata"] = ast.literal_eval(sessions[i]["metadata"])
else: else:
for i in range(len(sessions)): for i in range(len(sessions)):
sessions[i]["metadata"] = json.loads(sessions[i]["metadata"].replace("'", '"')) sessions[i]["metadata"] = ast.literal_eval(sessions[i]["metadata"])
sessions[i] = schemas_ee.SessionModel.parse_obj(helper.dict_to_camel_case(sessions[i])) sessions[i] = schemas_ee.SessionModel.parse_obj(helper.dict_to_camel_case(sessions[i]))
# if not data.group_by_user and data.sort is not None and data.sort != "session_id": # if not data.group_by_user and data.sort is not None and data.sort != "session_id":