feat(backend): enable Kerberos authentication for Kafka communication… (#807)

* feat(backend): enable Kerberos authentication for Kafka communications between services for EnterpriseEdition

* feat(backend): put default value for KAFKA_USE_KERBEROS

* feat(backend): Add Kerberos auth for Kafka - Update with comments for the envvars that configure the Kerberos auth
This commit is contained in:
Dayan Graham 2022-11-22 12:10:01 +01:00 committed by GitHub
parent e67c3ec876
commit 5d37d2da1e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 49 additions and 6 deletions

View file

@ -1,6 +1,6 @@
FROM golang:1.18-alpine3.15 AS prepare
RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash
RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5
WORKDIR /root
@ -15,11 +15,11 @@ COPY pkg pkg
COPY internal internal
ARG SERVICE_NAME
RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags musl openreplay/backend/cmd/$SERVICE_NAME
RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags dynamic openreplay/backend/cmd/$SERVICE_NAME
FROM alpine AS entrypoint
RUN apk add --no-cache ca-certificates
RUN apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5
RUN adduser -u 1001 openreplay -D
ENV TZ=UTC \
@ -29,6 +29,18 @@ ENV TZ=UTC \
UAPARSER_FILE=/home/openreplay/regexes.yaml \
HTTP_PORT=8080 \
KAFKA_USE_SSL=true \
# KAFKA_USE_KERBEROS should be set true if you wish to use Kerberos auth for Kafka
KAFKA_USE_KERBEROS=false \
# KERBEROS_SERVICE_NAME is the primary name of the Brokers configured in the Broker JAAS file
KERBEROS_SERVICE_NAME="" \
# KERBEROS_PRINCIPAL is this client's principal name
KERBEROS_PRINCIPAL="" \
# KERBEROS_PRINCIPAL is the absolute path to the keytab to be used for authentication
KERBEROS_KEYTAB_LOCATION="" \
# KAFKA_SSL_KEY is the absolute path to the CA cert for verifying the broker's key
KAFKA_SSL_KEY="" \
# KAFKA_SSL_CERT is a CA cert string (PEM format) for verifying the broker's key
KAFKA_SSL_CERT="" \
KAFKA_MAX_POLL_INTERVAL_MS=400000 \
REDIS_STREAMS_MAX_LEN=10000 \
TOPIC_RAW_WEB=raw \

View file

@ -1,6 +1,6 @@
FROM golang:1.18-alpine3.15 AS prepare
RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash
RUN apk add --no-cache git openssh openssl-dev pkgconf gcc g++ make libc-dev bash librdkafka-dev cyrus-sasl-gssapi cyrus-sasl-devel
WORKDIR /root
@ -14,11 +14,11 @@ COPY cmd cmd
COPY pkg pkg
COPY internal internal
RUN for name in assets db ender http integrations sink storage;do CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o bin/$name -tags musl openreplay/backend/cmd/$name; done
RUN for name in assets db ender http integrations sink storage;do CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o bin/$name -tags dynamic openreplay/backend/cmd/$name; done
FROM alpine AS entrypoint
#FROM pygmy/alpine-tini:latest
RUN apk add --no-cache ca-certificates
RUN apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl-gssapi cyrus-sasl-devel pkgconf
ENV TZ=UTC \
FS_ULIMIT=1000 \
@ -28,6 +28,18 @@ ENV TZ=UTC \
HTTP_PORT=80 \
BEACON_SIZE_LIMIT=7000000 \
KAFKA_USE_SSL=true \
# KAFKA_USE_KERBEROS should be set true if you wish to use Kerberos auth for Kafka
KAFKA_USE_KERBEROS=false \
# KERBEROS_SERVICE_NAME is the primary name of the Brokers configured in the Broker JAAS file
KERBEROS_SERVICE_NAME="" \
# KERBEROS_PRINCIPAL is this client's principal name
KERBEROS_PRINCIPAL="" \
# KERBEROS_PRINCIPAL is the absolute path to the keytab to be used for authentication
KERBEROS_KEYTAB_LOCATION="" \
# KAFKA_SSL_KEY is the absolute path to the CA cert for verifying the broker's key
KAFKA_SSL_KEY="" \
# KAFKA_SSL_CERT is a CA cert string (PEM format) for verifying the broker's key
KAFKA_SSL_CERT="" \
KAFKA_MAX_POLL_INTERVAL_MS=400000 \
REDIS_STREAMS_MAX_LEN=3000 \
TOPIC_RAW_WEB=raw \

View file

@ -47,6 +47,16 @@ func NewConsumer(
kafkaConfig.SetKey("ssl.key.location", os.Getenv("KAFKA_SSL_KEY"))
kafkaConfig.SetKey("ssl.certificate.location", os.Getenv("KAFKA_SSL_CERT"))
}
// Apply Kerberos configuration
if env.Bool("KAFKA_USE_KERBEROS") {
kafkaConfig.SetKey("security.protocol", "sasl_plaintext")
kafkaConfig.SetKey("sasl.mechanisms", "GSSAPI")
kafkaConfig.SetKey("sasl.kerberos.service.name", os.Getenv("KERBEROS_SERVICE_NAME"))
kafkaConfig.SetKey("sasl.kerberos.principal", os.Getenv("KERBEROS_PRINCIPAL"))
kafkaConfig.SetKey("sasl.kerberos.keytab", os.Getenv("KERBEROS_KEYTAB_LOCATION"))
}
c, err := kafka.NewConsumer(kafkaConfig)
if err != nil {
log.Fatalln(err)

View file

@ -30,6 +30,15 @@ func NewProducer(messageSizeLimit int, useBatch bool) *Producer {
kafkaConfig.SetKey("ssl.key.location", os.Getenv("KAFKA_SSL_KEY"))
kafkaConfig.SetKey("ssl.certificate.location", os.Getenv("KAFKA_SSL_CERT"))
}
// Apply Kerberos configuration
if env.Bool("KAFKA_USE_KERBEROS") {
kafkaConfig.SetKey("security.protocol", "sasl_plaintext")
kafkaConfig.SetKey("sasl.mechanisms", "GSSAPI")
kafkaConfig.SetKey("sasl.kerberos.service.name", os.Getenv("KERBEROS_SERVICE_NAME"))
kafkaConfig.SetKey("sasl.kerberos.principal", os.Getenv("KERBEROS_PRINCIPAL"))
kafkaConfig.SetKey("sasl.kerberos.keytab", os.Getenv("KERBEROS_KEYTAB_LOCATION"))
}
producer, err := kafka.NewProducer(kafkaConfig)
if err != nil {
log.Fatalln(err)