mirror of
https://github.com/openobserve/goflow2.git
synced 2025-11-02 13:03:18 +00:00
Add SASL/SCAM support to Kafka transport. (#121)
This pull request adds the optional -transport.kafka.scram flag, which tells the Kafka transport to use SASL/SCRAM for authentication. This allows goflow2 to work with AWS Managed Service for Kafka (MSK) which does not support SASL/PLAIN. Co-authored-by: Brian Landers <brian.landers@meraki.net> Co-authored-by: lspgn <lspgn@users.noreply.github.com>
This commit is contained in:
@@ -20,7 +20,8 @@ import (
|
||||
|
||||
type KafkaDriver struct {
|
||||
kafkaTLS bool
|
||||
kafkaSASL bool
|
||||
kafkaSASL string
|
||||
kafkaSCRAM string
|
||||
kafkaTopic string
|
||||
kafkaSrv string
|
||||
kafkaBrk string
|
||||
@@ -30,8 +31,8 @@ type KafkaDriver struct {
|
||||
|
||||
kafkaLogErrors bool
|
||||
|
||||
kafkaHashing bool
|
||||
kafkaVersion string
|
||||
kafkaHashing bool
|
||||
kafkaVersion string
|
||||
kafkaCompressionCodec string
|
||||
|
||||
producer sarama.AsyncProducer
|
||||
@@ -39,20 +40,44 @@ type KafkaDriver struct {
|
||||
q chan bool
|
||||
}
|
||||
|
||||
type KafkaSASLAlgorithm string
|
||||
|
||||
const (
|
||||
KAFKA_SASL_NONE KafkaSASLAlgorithm = "none"
|
||||
KAFKA_SASL_PLAIN KafkaSASLAlgorithm = "plain"
|
||||
KAFKA_SASL_SCRAM_SHA256 KafkaSASLAlgorithm = "scram-sha256"
|
||||
KAFKA_SASL_SCRAM_SHA512 KafkaSASLAlgorithm = "scram-sha512"
|
||||
)
|
||||
|
||||
var (
|
||||
compressionCodecs = map[string]sarama.CompressionCodec{
|
||||
strings.ToLower(sarama.CompressionNone.String()): sarama.CompressionNone,
|
||||
strings.ToLower(sarama.CompressionGZIP.String()): sarama.CompressionGZIP,
|
||||
strings.ToLower(sarama.CompressionNone.String()): sarama.CompressionNone,
|
||||
strings.ToLower(sarama.CompressionGZIP.String()): sarama.CompressionGZIP,
|
||||
strings.ToLower(sarama.CompressionSnappy.String()): sarama.CompressionSnappy,
|
||||
strings.ToLower(sarama.CompressionLZ4.String()): sarama.CompressionLZ4,
|
||||
strings.ToLower(sarama.CompressionZSTD.String()): sarama.CompressionZSTD,
|
||||
strings.ToLower(sarama.CompressionLZ4.String()): sarama.CompressionLZ4,
|
||||
strings.ToLower(sarama.CompressionZSTD.String()): sarama.CompressionZSTD,
|
||||
}
|
||||
|
||||
saslAlgorithms = map[KafkaSASLAlgorithm]bool{
|
||||
KAFKA_SASL_PLAIN: true,
|
||||
KAFKA_SASL_SCRAM_SHA256: true,
|
||||
KAFKA_SASL_SCRAM_SHA512: true,
|
||||
}
|
||||
saslAlgorithmsList = []string{
|
||||
string(KAFKA_SASL_NONE),
|
||||
string(KAFKA_SASL_PLAIN),
|
||||
string(KAFKA_SASL_SCRAM_SHA256),
|
||||
string(KAFKA_SASL_SCRAM_SHA512),
|
||||
}
|
||||
)
|
||||
|
||||
func (d *KafkaDriver) Prepare() error {
|
||||
flag.BoolVar(&d.kafkaTLS, "transport.kafka.tls", false, "Use TLS to connect to Kafka")
|
||||
flag.StringVar(&d.kafkaSASL, "transport.kafka.sasl", "none",
|
||||
fmt.Sprintf(
|
||||
"Use SASL to connect to Kafka, available settings: %s (TLS is recommended and the environment variables KAFKA_SASL_USER and KAFKA_SASL_PASS need to be set)",
|
||||
strings.Join(saslAlgorithmsList, ", ")))
|
||||
|
||||
flag.BoolVar(&d.kafkaSASL, "transport.kafka.sasl", false, "Use SASL/PLAIN data to connect to Kafka (TLS is recommended and the environment variables KAFKA_SASL_USER and KAFKA_SASL_PASS need to be set)")
|
||||
flag.StringVar(&d.kafkaTopic, "transport.kafka.topic", "flow-messages", "Kafka topic to produce to")
|
||||
flag.StringVar(&d.kafkaSrv, "transport.kafka.srv", "", "SRV record containing a list of Kafka brokers (or use brokers)")
|
||||
flag.StringVar(&d.kafkaBrk, "transport.kafka.brokers", "127.0.0.1:9092,[::1]:9092", "Kafka brokers list separated by commas")
|
||||
@@ -86,14 +111,14 @@ func (d *KafkaDriver) Init(context.Context) error {
|
||||
|
||||
if d.kafkaCompressionCodec != "" {
|
||||
/*
|
||||
// when upgrading sarama, replace with:
|
||||
// note: if the library adds more codecs, they will be supported natively
|
||||
var cc *sarama.CompressionCodec
|
||||
// when upgrading sarama, replace with:
|
||||
// note: if the library adds more codecs, they will be supported natively
|
||||
var cc *sarama.CompressionCodec
|
||||
|
||||
if err := cc.UnmarshalText([]byte(d.kafkaCompressionCodec)); err != nil {
|
||||
return err
|
||||
}
|
||||
kafkaConfig.Producer.Compression = *cc
|
||||
if err := cc.UnmarshalText([]byte(d.kafkaCompressionCodec)); err != nil {
|
||||
return err
|
||||
}
|
||||
kafkaConfig.Producer.Compression = *cc
|
||||
*/
|
||||
|
||||
if cc, ok := compressionCodecs[strings.ToLower(d.kafkaCompressionCodec)]; !ok {
|
||||
@@ -102,7 +127,7 @@ func (d *KafkaDriver) Init(context.Context) error {
|
||||
kafkaConfig.Producer.Compression = cc
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if d.kafkaTLS {
|
||||
rootCAs, err := x509.SystemCertPool()
|
||||
if err != nil {
|
||||
@@ -116,17 +141,34 @@ func (d *KafkaDriver) Init(context.Context) error {
|
||||
kafkaConfig.Producer.Partitioner = sarama.NewHashPartitioner
|
||||
}
|
||||
|
||||
if d.kafkaSASL {
|
||||
if !d.kafkaTLS /*&& log != nil*/ {
|
||||
log.Warn("Using SASL without TLS will transmit the authentication in plaintext!")
|
||||
kafkaSASL := KafkaSASLAlgorithm(d.kafkaSASL)
|
||||
if d.kafkaSASL != "" && kafkaSASL != KAFKA_SASL_NONE {
|
||||
_, ok := saslAlgorithms[KafkaSASLAlgorithm(strings.ToLower(d.kafkaSASL))]
|
||||
if !ok {
|
||||
return errors.New("SASL algorithm does not exist")
|
||||
}
|
||||
|
||||
kafkaConfig.Net.SASL.Enable = true
|
||||
kafkaConfig.Net.SASL.User = os.Getenv("KAFKA_SASL_USER")
|
||||
kafkaConfig.Net.SASL.Password = os.Getenv("KAFKA_SASL_PASS")
|
||||
if kafkaConfig.Net.SASL.User == "" && kafkaConfig.Net.SASL.Password == "" {
|
||||
return errors.New("Kafka SASL config from environment was unsuccessful. KAFKA_SASL_USER and KAFKA_SASL_PASS need to be set.")
|
||||
} else /*if log != nil*/ {
|
||||
log.Infof("Authenticating as user '%s'...", kafkaConfig.Net.SASL.User)
|
||||
}
|
||||
|
||||
if kafkaSASL == KAFKA_SASL_SCRAM_SHA256 || kafkaSASL == KAFKA_SASL_SCRAM_SHA512 {
|
||||
kafkaConfig.Net.SASL.Handshake = true
|
||||
|
||||
if kafkaSASL == KAFKA_SASL_SCRAM_SHA512 {
|
||||
kafkaConfig.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient {
|
||||
return &XDGSCRAMClient{HashGeneratorFcn: SHA512}
|
||||
}
|
||||
kafkaConfig.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA512
|
||||
} else if kafkaSASL == KAFKA_SASL_SCRAM_SHA256 {
|
||||
kafkaConfig.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient {
|
||||
return &XDGSCRAMClient{HashGeneratorFcn: SHA256}
|
||||
}
|
||||
kafkaConfig.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA256
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
39
transport/kafka/scram_client.go
Normal file
39
transport/kafka/scram_client.go
Normal file
@@ -0,0 +1,39 @@
|
||||
package kafka
|
||||
|
||||
// From https://github.com/Shopify/sarama/blob/main/examples/sasl_scram_client/scram_client.go
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
|
||||
"github.com/xdg-go/scram"
|
||||
)
|
||||
|
||||
var (
|
||||
SHA256 scram.HashGeneratorFcn = sha256.New
|
||||
SHA512 scram.HashGeneratorFcn = sha512.New
|
||||
)
|
||||
|
||||
type XDGSCRAMClient struct {
|
||||
*scram.Client
|
||||
*scram.ClientConversation
|
||||
scram.HashGeneratorFcn
|
||||
}
|
||||
|
||||
func (x *XDGSCRAMClient) Begin(userName, password, authzID string) (err error) {
|
||||
x.Client, err = x.HashGeneratorFcn.NewClient(userName, password, authzID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
x.ClientConversation = x.Client.NewConversation()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *XDGSCRAMClient) Step(challenge string) (response string, err error) {
|
||||
response, err = x.ClientConversation.Step(challenge)
|
||||
return
|
||||
}
|
||||
|
||||
func (x *XDGSCRAMClient) Done() bool {
|
||||
return x.ClientConversation.Done()
|
||||
}
|
||||
Reference in New Issue
Block a user