Merge pull request #145 from openreplay/dev

hotfix assist v1.3.0
This commit is contained in:
Shekar Siri 2021-08-13 04:13:42 +05:30 committed by GitHub
commit 42b50bf915
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 198 additions and 74 deletions

View file

@ -0,0 +1,67 @@
{
"version": "2.0",
"app_name": "parrot",
"environment_variables": {
},
"stages": {
"default-foss": {
"api_gateway_stage": "default-fos",
"manage_iam_role": false,
"iam_role_arn": "",
"autogen_policy": true,
"environment_variables": {
"isFOS": "true",
"isEE": "false",
"stage": "default-foss",
"jwt_issuer": "openreplay-default-foss",
"sentryURL": "",
"pg_host": "postgresql.db.svc.cluster.local",
"pg_port": "5432",
"pg_dbname": "postgres",
"pg_user": "postgres",
"pg_password": "asayerPostgres",
"alert_ntf": "http://127.0.0.1:8000/async/alerts/notifications/%s",
"email_signup": "http://127.0.0.1:8000/async/email_signup/%s",
"email_funnel": "http://127.0.0.1:8000/async/funnel/%s",
"email_basic": "http://127.0.0.1:8000/async/basic/%s",
"assign_link": "http://127.0.0.1:8000/async/email_assignment",
"captcha_server": "",
"captcha_key": "",
"sessions_bucket": "mobs",
"sessions_region": "us-east-1",
"put_S3_TTL": "20",
"sourcemaps_reader": "http://0.0.0.0:9000/sourcemaps",
"sourcemaps_bucket": "sourcemaps",
"js_cache_bucket": "sessions-assets",
"peers": "http://0.0.0.0:9000/assist/peers",
"async_Token": "",
"EMAIL_HOST": "",
"EMAIL_PORT": "587",
"EMAIL_USER": "",
"EMAIL_PASSWORD": "",
"EMAIL_USE_TLS": "true",
"EMAIL_USE_SSL": "false",
"EMAIL_SSL_KEY": "",
"EMAIL_SSL_CERT": "",
"EMAIL_FROM": "OpenReplay<do-not-reply@openreplay.com>",
"SITE_URL": "",
"announcement_url": "",
"jwt_secret": "",
"jwt_algorithm": "HS512",
"jwt_exp_delta_seconds": "2592000",
"S3_HOST": "",
"S3_KEY": "",
"S3_SECRET": "",
"invitation_link": "/api/users/invitation?token=%s",
"change_password_link": "/reset-password?invitation=%s&&pass=%s",
"version_number": "1.2.0"
},
"lambda_timeout": 150,
"lambda_memory_size": 400,
"subnet_ids": [
],
"security_group_ids": [
]
}
}
}

View file

@ -64,4 +64,4 @@
]
}
}
}
}

View file

@ -13,4 +13,4 @@ ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh
CMD ./entrypoint.sh

27
api/Dockerfile.bundle Normal file
View file

@ -0,0 +1,27 @@
FROM python:3.6-slim
LABEL Maintainer="Rajesh Rajendran<rjshrjndrn@gmail.com>"
WORKDIR /work
COPY . .
COPY ../utilities ./utilities
RUN rm entrypoint.sh && rm .chalice/config.json
RUN mv entrypoint.bundle.sh entrypoint.sh && mv .chalice/config.bundle.json .chalice/config.json
RUN pip install -r requirements.txt -t ./vendor --upgrade
RUN pip install chalice==1.22.2
# Installing Nodejs
RUN apt update && apt install -y curl && \
curl -fsSL https://deb.nodesource.com/setup_12.x | bash - && \
apt install -y nodejs && \
apt remove --purge -y curl && \
rm -rf /var/lib/apt/lists/* && \
cd utilities && \
npm install
# Add Tini
# Startup daemon
ENV TINI_VERSION v0.19.0
ARG envarg
ENV ENTERPRISE_BUILD ${envarg}
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini
RUN chmod +x /tini
ENTRYPOINT ["/tini", "--"]
CMD ./entrypoint.sh

View file

@ -884,6 +884,15 @@ def sessions_live(projectId, context):
return {'data': data}
@app.route('/{projectId}/assist/sessions', methods=['POST'])
def sessions_live_search(projectId, context):
data = app.current_request.json_body
if data is None:
data = {}
data = assist.get_live_sessions(projectId, filters=data.get("filters"))
return {'data': data}
@app.route('/{projectId}/heatmaps/url', methods=['POST'])
def get_heatmaps_by_url(projectId, context):
data = app.current_request.json_body

View file

@ -1,5 +1,5 @@
from chalicelib.utils import pg_client, helper
from chalicelib.core import projects
from chalicelib.core import projects, sessions, sessions_metas
import requests
from chalicelib.utils.helper import environ
@ -19,7 +19,7 @@ SESSION_PROJECTION_COLS = """s.project_id,
"""
def get_live_sessions(project_id):
def get_live_sessions(project_id, filters=None):
project_key = projects.get_project_key(project_id)
connected_peers = requests.get(environ["peers"] + f"/{project_key}")
if connected_peers.status_code != 200:
@ -31,13 +31,31 @@ def get_live_sessions(project_id):
if len(connected_peers) == 0:
return []
connected_peers = tuple(connected_peers)
extra_constraints = ["project_id = %(project_id)s", "session_id IN %(connected_peers)s"]
extra_params = {}
if filters is not None:
for i, f in enumerate(filters):
if not isinstance(f.get("value"), list):
f["value"] = [f.get("value")]
if len(f["value"]) == 0 or f["value"][0] is None:
continue
filter_type = f["type"].upper()
f["value"] = sessions.__get_sql_value_multiple(f["value"])
if filter_type == sessions_metas.meta_type.USERID:
op = sessions.__get_sql_operator(f["operator"])
extra_constraints.append(f"user_id {op} %(value_{i})s")
extra_params[f"value_{i}"] = helper.string_to_sql_like_with_op(f["value"][0], op)
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
SELECT {SESSION_PROJECTION_COLS}, %(project_key)s||'-'|| session_id AS peer_id
FROM public.sessions AS s
WHERE s.project_id = %(project_id)s
AND session_id IN %(connected_peers)s;""",
{"project_id": project_id, "connected_peers": connected_peers, "project_key": project_key})
WHERE {" AND ".join(extra_constraints)}
LIMIT 500;""",
{"project_id": project_id,
"connected_peers": connected_peers,
"project_key": project_key,
**extra_params})
cur.execute(query)
results = cur.fetchall()
return helper.list_to_camel_case(results)
@ -52,4 +70,4 @@ def is_live(project_id, session_id, project_key=None):
print(connected_peers.text)
return False
connected_peers = connected_peers.json().get("data", [])
return session_id in connected_peers
return str(session_id) in connected_peers

View file

@ -99,7 +99,8 @@ def get_by_id2_pg(project_id, session_id, user_id, full_data=False, include_fav_
data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data)
data['issues'] = issues.get_by_session_id(session_id=session_id)
data['live'] = assist.is_live(project_id=project_id, session_id=session_id,
data['live'] = assist.is_live(project_id=project_id,
session_id=session_id,
project_key=data["projectKey"])
return data

6
api/entrypoint.bundle.sh Executable file
View file

@ -0,0 +1,6 @@
#!/bin/bash
cd utilities
nohup npm start &> /tmp/utilities.log &
cd ..
python env_handler.py
chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD}

View file

@ -25,6 +25,7 @@ ENV TZ=UTC \
MAXMINDDB_FILE=/root/geoip.mmdb \
UAPARSER_FILE=/root/regexes.yaml \
HTTP_PORT=80 \
BEACON_SIZE_LIMIT=1000000 \
KAFKA_USE_SSL=true \
REDIS_STREAMS_MAX_LEN=3000 \
TOPIC_RAW=raw \
@ -42,7 +43,7 @@ ENV TZ=UTC \
AWS_REGION_ASSETS=eu-central-1 \
CACHE_ASSETS=false \
ASSETS_SIZE_LIMIT=6291456 \
FS_CLEAN_HRS=240
FS_CLEAN_HRS=72
ARG SERVICE_NAME

View file

@ -26,6 +26,7 @@ ENV TZ=UTC \
MAXMINDDB_FILE=/root/geoip.mmdb \
UAPARSER_FILE=/root/regexes.yaml \
HTTP_PORT=80 \
BEACON_SIZE_LIMIT=1000000 \
KAFKA_USE_SSL=true \
REDIS_STREAMS_MAX_LEN=3000 \
TOPIC_RAW=raw \

View file

@ -43,6 +43,9 @@ func (d *deadClickDetector) HandleMessage(msg Message, messageID uint64, timesta
case *CreateDocument:
d.inputIDSet = nil
case *MouseClick:
if m.Label == "" {
return nil
}
i = d.HandleReaction(timestamp)
if d.inputIDSet[m.ID] { // ignore if input
return i

View file

@ -7,7 +7,7 @@ import (
func sendAssetForCache(sessionID uint64, baseURL string, relativeURL string) {
if fullURL, cacheable := assets.GetFullCachableURL(baseURL, relativeURL); cacheable {
producer.Produce(topicTrigger, sessionID, messages.Encode(&messages.AssetCache{
producer.Produce(TOPIC_TRIGGER, sessionID, messages.Encode(&messages.AssetCache{
URL: fullURL,
}))
}
@ -20,7 +20,7 @@ func sendAssetsForCacheFromCSS(sessionID uint64, baseURL string, css string) {
}
func handleURL(sessionID uint64, baseURL string, url string) string {
if cacheAssets {
if CACHE_ASSESTS {
rewrittenURL, isCachable := rewriter.RewriteURL(sessionID, baseURL, url)
if isCachable {
sendAssetForCache(sessionID, baseURL, url)
@ -31,7 +31,7 @@ func handleURL(sessionID uint64, baseURL string, url string) string {
}
func handleCSS(sessionID uint64, baseURL string, css string) string {
if cacheAssets {
if CACHE_ASSESTS {
sendAssetsForCacheFromCSS(sessionID, baseURL, css)
return rewriter.RewriteCSS(sessionID, baseURL, css)
}

View file

@ -19,7 +19,6 @@ import (
)
const JSON_SIZE_LIMIT int64 = 1e3 // 1Kb
const BATCH_SIZE_LIMIT int64 = 1e6 // 1Mb
func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
type request struct {
@ -90,7 +89,7 @@ func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
tokenData = &token.TokenData{sessionID, expTime.UnixNano() / 1e6}
country := geoIP.ExtractISOCodeFromHTTPRequest(r)
producer.Produce(topicRaw, tokenData.ID, Encode(&SessionStart{
producer.Produce(TOPIC_RAW, tokenData.ID, Encode(&SessionStart{
Timestamp: req.Timestamp,
ProjectID: uint64(p.ProjectID),
TrackerVersion: req.TrackerVersion,
@ -120,7 +119,7 @@ func startSessionHandlerWeb(w http.ResponseWriter, r *http.Request) {
}
func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) {
body := http.MaxBytesReader(w, r.Body, BATCH_SIZE_LIMIT)
body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT)
//defer body.Close()
var reader io.ReadCloser
switch r.Header.Get("Content-Encoding") {
@ -139,7 +138,7 @@ func pushMessages(w http.ResponseWriter, r *http.Request, sessionID uint64) {
responseWithError(w, http.StatusInternalServerError, err) // TODO: send error here only on staging
return
}
producer.Produce(topicRaw, sessionID, buf) // What if not able to send?
producer.Produce(TOPIC_RAW, sessionID, buf) // What if not able to send?
w.WriteHeader(http.StatusOK)
}
@ -158,7 +157,7 @@ func pushMessagesSeparatelyHandler(w http.ResponseWriter, r *http.Request) {
responseWithError(w, http.StatusUnauthorized, err)
return
}
body := http.MaxBytesReader(w, r.Body, BATCH_SIZE_LIMIT)
body := http.MaxBytesReader(w, r.Body, BEACON_SIZE_LIMIT)
//defer body.Close()
buf, err := ioutil.ReadAll(body)
if err != nil {
@ -234,8 +233,8 @@ func pushMessagesSeparatelyHandler(w http.ResponseWriter, r *http.Request) {
responseWithError(w, http.StatusForbidden, err)
return
}
producer.Produce(topicRaw, sessionData.ID, rewritenBuf)
//producer.Produce(topicAnalytics, sessionData.ID, WriteBatch(analyticsMessages))
producer.Produce(TOPIC_RAW, sessionData.ID, rewritenBuf)
//producer.Produce(TOPIC_ANALYTICS, sessionData.ID, WriteBatch(analyticsMessages))
//duration := time.Now().Sub(startTime)
//log.Printf("Sended batch within %v nsec; %v nsek/byte", duration.Nanoseconds(), duration.Nanoseconds()/int64(len(buf)))
w.WriteHeader(http.StatusOK)

View file

@ -31,20 +31,22 @@ var uaParser *uaparser.UAParser
var geoIP *geoip.GeoIP
var tokenizer *token.Tokenizer
var s3 *storage.S3
var topicRaw string
var topicTrigger string
var topicAnalytics string
var TOPIC_RAW string
var TOPIC_TRIGGER string
var TOPIC_ANALYTICS string
// var kafkaTopicEvents string
var cacheAssets bool
var CACHE_ASSESTS bool
var BEACON_SIZE_LIMIT int64
func main() {
log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile)
producer = queue.NewProducer()
defer producer.Close(15000)
topicRaw = env.String("TOPIC_RAW")
topicTrigger = env.String("TOPIC_TRIGGER")
topicAnalytics = env.String("TOPIC_ANALYTICS")
TOPIC_RAW = env.String("TOPIC_RAW")
TOPIC_TRIGGER = env.String("TOPIC_TRIGGER")
TOPIC_ANALYTICS = env.String("TOPIC_ANALYTICS")
rewriter = assets.NewRewriter(env.String("ASSETS_ORIGIN"))
pgconn = cache.NewPGCache(postgres.NewConn(env.String("POSTGRES_STRING")), 1000 * 60 * 20)
defer pgconn.Close()
@ -53,7 +55,8 @@ func main() {
uaParser = uaparser.NewUAParser(env.String("UAPARSER_FILE"))
geoIP = geoip.NewGeoIP(env.String("MAXMINDDB_FILE"))
flaker = flakeid.NewFlaker(env.WorkerID())
cacheAssets = env.Bool("CACHE_ASSETS")
CACHE_ASSESTS = env.Bool("CACHE_ASSETS")
BEACON_SIZE_LIMIT = int64(env.Uint64("BEACON_SIZE_LIMIT"))
HTTP_PORT := env.String("HTTP_PORT")

View file

@ -1,5 +1,5 @@
import React, { useEffect, useState } from 'react';
import { fetchLiveList } from 'Duck/sessions';
import React, { useEffect } from 'react';
import { fetchList } from 'Duck/sessions';
import { connect } from 'react-redux';
import { NoContent, Loader } from 'UI';
import { List, Map } from 'immutable';
@ -7,45 +7,31 @@ import SessionItem from 'Shared/SessionItem';
interface Props {
loading: Boolean,
list?: List<any>,
fetchLiveList: () => void,
list?: List<any>,
fetchList: (params) => void,
filters: List<any>
}
function LiveSessionList(props: Props) {
const { loading, list, filters } = props;
const [userId, setUserId] = useState(undefined)
const { loading, list, filters } = props;
useEffect(() => {
props.fetchLiveList();
useEffect(() => {
props.fetchList(filters.toJS());
}, [])
useEffect(() => {
if (filters) {
const userIdFilter = filters.filter(i => i.key === 'USERID').first()
if (userIdFilter)
setUserId(userIdFilter.value[0])
else
setUserId(undefined)
}
}, [filters])
return (
<div>
<NoContent
title={"No live sessions!"}
// subtext="Please try changing your search parameters."
title={"No live sessions!"}
image={<img src="/img/live-sessions.png" style={{ width: '70%', marginBottom: '30px' }}/>}
show={ !loading && list && list.size === 0}
>
<Loader loading={ loading }>
{list && (userId ? list.filter(i => i.userId === userId) : list).map(session => (
{list && list.map(session => (
<SessionItem
key={ session.sessionId }
session={ session }
live
// hasUserFilter={hasUserFilter}
live
/>
))}
</Loader>
@ -57,5 +43,5 @@ function LiveSessionList(props: Props) {
export default connect(state => ({
list: state.getIn(['sessions', 'liveSessions']),
loading: state.getIn([ 'sessions', 'loading' ]),
filters: state.getIn([ 'filters', 'appliedFilter', 'filters' ]),
}), { fetchLiveList })(LiveSessionList)
filters: state.getIn([ 'filters', 'appliedFilter' ]),
}), { fetchList })(LiveSessionList)

View file

@ -118,7 +118,7 @@ class Sites extends React.PureComponent {
const { modalContent, showTrackingCode } = this.state;
const isAdmin = user.admin || user.superAdmin;
const canAddSites = isAdmin && account.limits.projects && account.limits.projects.remaining !== 0;
const canDeleteSites = sites.size > 1;
const canDeleteSites = sites.size > 1 && isAdmin;
return (
<Loader loading={ loading }>
@ -189,15 +189,15 @@ class Sites extends React.PureComponent {
</div>
<div className={ stl.actions }>
<button
className="hidden cursor-pointer"
disabled={ !isAdmin || !canDeleteSites }
className={cn({'hidden' : !canDeleteSites})}
disabled={ !canDeleteSites }
onClick={ () => canDeleteSites && this.remove(_site) }
>
<Icon name="trash" size="16" color="teal" />
</button>
<button
className="hidden"
disabled={ !isAdmin || !canDeleteSites }
className={cn({'hidden' : !canDeleteSites})}
disabled={ !canDeleteSites }
onClick={ () => canDeleteSites && this.edit(_site) }
data-clickable
>

View file

@ -7,7 +7,7 @@
.main {
max-height: 100%;
display: flex;
min-height: calc(100vh - 51px);
min-height: calc(100vh - 81px);
& .tabMenu {
width: 240px;

View file

@ -21,8 +21,7 @@ interface Props {
function PageInsightsPanel({
filters, fetchInsights, events = [], insights, urlOptions, host, loading = true
}: Props) {
const [insightsFilters, setInsightsFilters] = useState(filters)
console.log('host', host)
const [insightsFilters, setInsightsFilters] = useState(filters)
const onDateChange = (e) => {
const { startDate, endDate, rangeValue } = e;

View file

@ -11,10 +11,7 @@ export default ({
empty = false,
image = null
}) => (!show ? children :
<div className={ `${ styles.wrapper } ${ size && styles[ size ] }` }>
{
image && image
}
<div className={ `${ styles.wrapper } ${ size && styles[ size ] }` }>
{
icon && <div className={ empty ? styles.emptyIcon : styles.icon } />
}
@ -23,5 +20,8 @@ export default ({
subtext &&
<div className={ styles.subtext }>{ subtext }</div>
}
{
image && <div className="mt-4 flex justify-center">{ image } </div>
}
</div>
);

View file

@ -173,12 +173,11 @@ const reducer = (state = initialState, action = {}) => {
}
}
})
})
console.log('visitedEvents', visitedEvents)
})
return state.set('current', current.merge(session))
.set('eventsIndex', matching)
.set('visitedEvents', visitedEvents)
.set('host', visitedEvents[0].host);
.set('host', visitedEvents[0] && visitedEvents[0].host);
}
case FETCH_FAVORITE_LIST.SUCCESS:
return state
@ -255,13 +254,18 @@ function init(session) {
}
}
export function fetchList(params = {}, clear = false) {
return {
export const fetchList = (params = {}, clear = false) => (dispatch, getState) => {
const activeTab = getState().getIn([ 'sessions', 'activeTab' ]);
return dispatch(activeTab && activeTab.type === 'live' ? {
types: FETCH_LIVE_LIST.toArray(),
call: client => client.post('/assist/sessions', params),
} : {
types: FETCH_LIST.toArray(),
call: client => client.post('/sessions/search2', params),
clear,
params: cleanParams(params),
};
})
}
export function fetchErrorStackList(sessionId, errorId) {

View file

@ -125,9 +125,9 @@ export default class AssistManager {
this.md.setMessagesLoading(false);
}
if (status === ConnectionStatus.Connected) {
// this.md.display(true);
this.md.display(true);
} else {
// this.md.display(false);
this.md.display(false);
}
update({ peerConnectionStatus: status });
}