Skip to content

Commit

Permalink
NETOBSERV-397 Kafka TLS (#256)
Browse files Browse the repository at this point in the history
* Kafka TLS

- Add API / config
- Read certificates / prepare TLS config for HTTP transport

* Allow insecure + ca cert

* Add TLS to kafka transformer (ingester)

* Remove currently unused server tls config

* Add TLS config tests

* Use explicit defaults from DefaultDialer
  • Loading branch information
jotak authored Jul 20, 2022
1 parent 8469fe0 commit 3dbc423
Show file tree
Hide file tree
Showing 10 changed files with 364 additions and 29 deletions.
15 changes: 8 additions & 7 deletions pkg/api/encode_kafka.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,14 @@
package api

type EncodeKafka struct {
Address string `yaml:"address" json:"address" doc:"address of kafka server"`
Topic string `yaml:"topic" json:"topic" doc:"kafka topic to write to"`
Balancer string `yaml:"balancer,omitempty" json:"balancer,omitempty" enum:"KafkaEncodeBalancerEnum" doc:"one of the following:"`
WriteTimeout int64 `yaml:"writeTimeout,omitempty" json:"writeTimeout,omitempty" doc:"timeout (in seconds) for write operation performed by the Writer"`
ReadTimeout int64 `yaml:"readTimeout,omitempty" json:"readTimeout,omitempty" doc:"timeout (in seconds) for read operation performed by the Writer"`
BatchBytes int64 `yaml:"batchBytes,omitempty" json:"batchBytes,omitempty" doc:"limit the maximum size of a request in bytes before being sent to a partition"`
BatchSize int `yaml:"batchSize,omitempty" json:"batchSize,omitempty" doc:"limit on how many messages will be buffered before being sent to a partition"`
Address string `yaml:"address" json:"address" doc:"address of kafka server"`
Topic string `yaml:"topic" json:"topic" doc:"kafka topic to write to"`
Balancer string `yaml:"balancer,omitempty" json:"balancer,omitempty" enum:"KafkaEncodeBalancerEnum" doc:"one of the following:"`
WriteTimeout int64 `yaml:"writeTimeout,omitempty" json:"writeTimeout,omitempty" doc:"timeout (in seconds) for write operation performed by the Writer"`
ReadTimeout int64 `yaml:"readTimeout,omitempty" json:"readTimeout,omitempty" doc:"timeout (in seconds) for read operation performed by the Writer"`
BatchBytes int64 `yaml:"batchBytes,omitempty" json:"batchBytes,omitempty" doc:"limit the maximum size of a request in bytes before being sent to a partition"`
BatchSize int `yaml:"batchSize,omitempty" json:"batchSize,omitempty" doc:"limit on how many messages will be buffered before being sent to a partition"`
TLS *ClientTLS `yaml:"tls" json:"tls" doc:"TLS client configuration (optional)"`
}

type KafkaEncodeBalancerEnum struct {
Expand Down
19 changes: 10 additions & 9 deletions pkg/api/ingest_kafka.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,14 @@
package api

type IngestKafka struct {
Brokers []string `yaml:"brokers,omitempty" json:"brokers,omitempty" doc:"list of kafka broker addresses"`
Topic string `yaml:"topic,omitempty" json:"topic,omitempty" doc:"kafka topic to listen on"`
GroupId string `yaml:"groupid,omitempty" json:"groupid,omitempty" doc:"separate groupid for each consumer on specified topic"`
GroupBalancers []string `yaml:"groupBalancers,omitempty" json:"groupBalancers,omitempty" doc:"list of balancing strategies (range, roundRobin, rackAffinity)"`
StartOffset string `yaml:"startOffset,omitempty" json:"startOffset,omitempty" doc:"FirstOffset (least recent - default) or LastOffset (most recent) offset available for a partition"`
BatchReadTimeout int64 `yaml:"batchReadTimeout,omitempty" json:"batchReadTimeout,omitempty" doc:"how often (in milliseconds) to process input"`
Decoder Decoder `yaml:"decoder,omitempty" json:"decoder" doc:"decoder to use (E.g. json or protobuf)"`
BatchMaxLen int `yaml:"batchMaxLen,omitempty" json:"batchMaxLen,omitempty" doc:"the number of accumulated flows before being forwarded for processing"`
CommitInterval int64 `yaml:"commitInterval,omitempty" json:"commitInterval,omitempty" doc:"the interval (in milliseconds) at which offsets are committed to the broker. If 0, commits will be handled synchronously."`
Brokers []string `yaml:"brokers,omitempty" json:"brokers,omitempty" doc:"list of kafka broker addresses"`
Topic string `yaml:"topic,omitempty" json:"topic,omitempty" doc:"kafka topic to listen on"`
GroupId string `yaml:"groupid,omitempty" json:"groupid,omitempty" doc:"separate groupid for each consumer on specified topic"`
GroupBalancers []string `yaml:"groupBalancers,omitempty" json:"groupBalancers,omitempty" doc:"list of balancing strategies (range, roundRobin, rackAffinity)"`
StartOffset string `yaml:"startOffset,omitempty" json:"startOffset,omitempty" doc:"FirstOffset (least recent - default) or LastOffset (most recent) offset available for a partition"`
BatchReadTimeout int64 `yaml:"batchReadTimeout,omitempty" json:"batchReadTimeout,omitempty" doc:"how often (in milliseconds) to process input"`
Decoder Decoder `yaml:"decoder,omitempty" json:"decoder" doc:"decoder to use (E.g. json or protobuf)"`
BatchMaxLen int `yaml:"batchMaxLen,omitempty" json:"batchMaxLen,omitempty" doc:"the number of accumulated flows before being forwarded for processing"`
CommitInterval int64 `yaml:"commitInterval,omitempty" json:"commitInterval,omitempty" doc:"the interval (in milliseconds) at which offsets are committed to the broker. If 0, commits will be handled synchronously."`
TLS *ClientTLS `yaml:"tls" json:"tls" doc:"TLS client configuration (optional)"`
}
66 changes: 66 additions & 0 deletions pkg/api/tls.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
/*
* Copyright (C) 2022 IBM, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/

package api

import (
"crypto/tls"
"crypto/x509"
"errors"
"io/ioutil"
)

type ClientTLS struct {
InsecureSkipVerify bool `yaml:"insecureSkipVerify,omitempty" json:"insecureSkipVerify,omitempty" doc:"skip client verifying the server's certificate chain and host name"`
CACertPath string `yaml:"caCertPath,omitempty" json:"caCertPath,omitempty" doc:"path to the CA certificate"`
UserCertPath string `yaml:"userCertPath,omitempty" json:"userCertPath,omitempty" doc:"path to the user certificate"`
UserKeyPath string `yaml:"userKeyPath,omitempty" json:"userKeyPath,omitempty" doc:"path to the user private key"`
}

func (c *ClientTLS) Build() (*tls.Config, error) {
tlsConfig := &tls.Config{
InsecureSkipVerify: c.InsecureSkipVerify,
}
if c.CACertPath != "" {
caCert, err := ioutil.ReadFile(c.CACertPath)
if err != nil {
return nil, err
}
tlsConfig.RootCAs = x509.NewCertPool()
tlsConfig.RootCAs.AppendCertsFromPEM(caCert)

if c.UserCertPath != "" && c.UserKeyPath != "" {
userCert, err := ioutil.ReadFile(c.UserCertPath)
if err != nil {
return nil, err
}
userKey, err := ioutil.ReadFile(c.UserKeyPath)
if err != nil {
return nil, err
}
pair, err := tls.X509KeyPair([]byte(userCert), []byte(userKey))
if err != nil {
return nil, err
}
tlsConfig.Certificates = []tls.Certificate{pair}
} else if c.UserCertPath != "" || c.UserKeyPath != "" {
return nil, errors.New("userCertPath and userKeyPath must be both present or both absent.")
}
return tlsConfig, nil
}
return nil, nil
}
6 changes: 5 additions & 1 deletion pkg/config/pipeline_builder_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,10 @@ func TestKafkaPromPipeline(t *testing.T) {
Topic: "netflows",
GroupId: "my-group",
Decoder: api.Decoder{Type: "json"},
TLS: &api.ClientTLS{
InsecureSkipVerify: true,
CACertPath: "/ca.crt",
},
})
pl = pl.TransformFilter("filter", api.TransformFilter{
Rules: []api.TransformFilterRule{{
Expand Down Expand Up @@ -137,7 +141,7 @@ func TestKafkaPromPipeline(t *testing.T) {

b, err = json.Marshal(params[0])
require.NoError(t, err)
require.Equal(t, `{"name":"ingest","ingest":{"type":"kafka","kafka":{"brokers":["http://kafka"],"topic":"netflows","groupid":"my-group","decoder":{"type":"json"}}}}`, string(b))
require.Equal(t, `{"name":"ingest","ingest":{"type":"kafka","kafka":{"brokers":["http://kafka"],"topic":"netflows","groupid":"my-group","decoder":{"type":"json"},"tls":{"insecureSkipVerify":true,"caCertPath":"/ca.crt"}}}}`, string(b))

b, err = json.Marshal(params[1])
require.NoError(t, err)
Expand Down
36 changes: 24 additions & 12 deletions pkg/pipeline/encode/encode_kafka.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import (

"github.com/netobserv/flowlogs-pipeline/pkg/api"
"github.com/netobserv/flowlogs-pipeline/pkg/config"
"github.com/segmentio/kafka-go"
kafkago "github.com/segmentio/kafka-go"
log "github.com/sirupsen/logrus"
"golang.org/x/net/context"
Expand Down Expand Up @@ -66,13 +67,13 @@ func (r *encodeKafka) Encode(in []config.GenericMap) {
// NewEncodeKafka create a new writer to kafka
func NewEncodeKafka(params config.StageParam) (Encoder, error) {
log.Debugf("entering NewEncodeKafka")
jsonEncodeKafka := api.EncodeKafka{}
config := api.EncodeKafka{}
if params.Encode != nil && params.Encode.Kafka != nil {
jsonEncodeKafka = *params.Encode.Kafka
config = *params.Encode.Kafka
}

var balancer kafkago.Balancer
switch jsonEncodeKafka.Balancer {
switch config.Balancer {
case api.KafkaEncodeBalancerName("RoundRobin"):
balancer = &kafkago.RoundRobin{}
case api.KafkaEncodeBalancerName("LeastBytes"):
Expand All @@ -88,31 +89,42 @@ func NewEncodeKafka(params config.StageParam) (Encoder, error) {
}

readTimeoutSecs := defaultReadTimeoutSeconds
if jsonEncodeKafka.ReadTimeout != 0 {
readTimeoutSecs = jsonEncodeKafka.ReadTimeout
if config.ReadTimeout != 0 {
readTimeoutSecs = config.ReadTimeout
}

writeTimeoutSecs := defaultWriteTimeoutSeconds
if jsonEncodeKafka.WriteTimeout != 0 {
writeTimeoutSecs = jsonEncodeKafka.WriteTimeout
if config.WriteTimeout != 0 {
writeTimeoutSecs = config.WriteTimeout
}

transport := kafka.Transport{}
if config.TLS != nil {
log.Infof("Using TLS configuration: %v", config.TLS)
tlsConfig, err := config.TLS.Build()
if err != nil {
return nil, err
}
transport.TLS = tlsConfig
}

// connect to the kafka server
kafkaWriter := kafkago.Writer{
Addr: kafkago.TCP(jsonEncodeKafka.Address),
Topic: jsonEncodeKafka.Topic,
Addr: kafkago.TCP(config.Address),
Topic: config.Topic,
Balancer: balancer,
ReadTimeout: time.Duration(readTimeoutSecs) * time.Second,
WriteTimeout: time.Duration(writeTimeoutSecs) * time.Second,
BatchSize: jsonEncodeKafka.BatchSize,
BatchBytes: jsonEncodeKafka.BatchBytes,
BatchSize: config.BatchSize,
BatchBytes: config.BatchBytes,
// Temporary fix may be we should implement a batching systems
// https://github.com/segmentio/kafka-go/issues/326#issuecomment-519375403
BatchTimeout: time.Nanosecond,
Transport: &transport,
}

return &encodeKafka{
kafkaParams: jsonEncodeKafka,
kafkaParams: config,
kafkaWriter: &kafkaWriter,
}, nil
}
58 changes: 58 additions & 0 deletions pkg/pipeline/encode/encode_kafka_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import (
"encoding/json"
"testing"

"github.com/netobserv/flowlogs-pipeline/pkg/api"
"github.com/netobserv/flowlogs-pipeline/pkg/config"
"github.com/netobserv/flowlogs-pipeline/pkg/test"
kafkago "github.com/segmentio/kafka-go"
Expand Down Expand Up @@ -90,3 +91,60 @@ func Test_EncodeKafka(t *testing.T) {
}
require.Equal(t, expectedOutput, receivedData[0])
}

func Test_TLSConfigEmpty(t *testing.T) {
pipeline := config.NewCollectorPipeline("ingest", api.IngestCollector{})
pipeline.EncodeKafka("encode-kafka", api.EncodeKafka{
Address: "any",
Topic: "topic",
})
newEncode, err := NewEncodeKafka(pipeline.GetStageParams()[1])
require.NoError(t, err)
tlsConfig := newEncode.(*encodeKafka).kafkaWriter.(*kafkago.Writer).Transport.(*kafkago.Transport).TLS
require.Nil(t, tlsConfig)
}

func Test_TLSConfigCA(t *testing.T) {
ca, cleanup := test.CreateCACert(t)
defer cleanup()
pipeline := config.NewCollectorPipeline("ingest", api.IngestCollector{})
pipeline.EncodeKafka("encode-kafka", api.EncodeKafka{
Address: "any",
Topic: "topic",
TLS: &api.ClientTLS{
CACertPath: ca,
},
})
newEncode, err := NewEncodeKafka(pipeline.GetStageParams()[1])
require.NoError(t, err)
tlsConfig := newEncode.(*encodeKafka).kafkaWriter.(*kafkago.Writer).Transport.(*kafkago.Transport).TLS

require.Empty(t, tlsConfig.Certificates)
require.NotNil(t, tlsConfig.RootCAs)
require.Len(t, tlsConfig.RootCAs.Subjects(), 1)
}

func Test_MutualTLSConfig(t *testing.T) {
ca, user, userKey, cleanup := test.CreateAllCerts(t)
defer cleanup()
pipeline := config.NewCollectorPipeline("ingest", api.IngestCollector{})
pipeline.EncodeKafka("encode-kafka", api.EncodeKafka{
Address: "any",
Topic: "topic",
TLS: &api.ClientTLS{
CACertPath: ca,
UserCertPath: user,
UserKeyPath: userKey,
},
})
newEncode, err := NewEncodeKafka(pipeline.GetStageParams()[1])
require.NoError(t, err)

tlsConfig := newEncode.(*encodeKafka).kafkaWriter.(*kafkago.Writer).Transport.(*kafkago.Transport).TLS

require.Len(t, tlsConfig.Certificates, 1)
require.NotEmpty(t, tlsConfig.Certificates[0].Certificate)
require.NotNil(t, tlsConfig.Certificates[0].PrivateKey)
require.NotNil(t, tlsConfig.RootCAs)
require.Len(t, tlsConfig.RootCAs.Subjects(), 1)
}
15 changes: 15 additions & 0 deletions pkg/pipeline/ingest/ingest_kafka.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import (
"github.com/netobserv/flowlogs-pipeline/pkg/config"
"github.com/netobserv/flowlogs-pipeline/pkg/pipeline/decode"
"github.com/netobserv/flowlogs-pipeline/pkg/pipeline/utils"
"github.com/segmentio/kafka-go"
kafkago "github.com/segmentio/kafka-go"
log "github.com/sirupsen/logrus"
"golang.org/x/net/context"
Expand Down Expand Up @@ -172,13 +173,27 @@ func NewIngestKafka(params config.StageParam) (Ingester, error) {
commitInterval = jsonIngestKafka.CommitInterval
}

dialer := &kafka.Dialer{
Timeout: kafka.DefaultDialer.Timeout,
DualStack: kafka.DefaultDialer.DualStack,
}
if jsonIngestKafka.TLS != nil {
log.Infof("Using TLS configuration: %v", jsonIngestKafka.TLS)
tlsConfig, err := jsonIngestKafka.TLS.Build()
if err != nil {
return nil, err
}
dialer.TLS = tlsConfig
}

kafkaReader := kafkago.NewReader(kafkago.ReaderConfig{
Brokers: jsonIngestKafka.Brokers,
Topic: jsonIngestKafka.Topic,
GroupID: jsonIngestKafka.GroupId,
GroupBalancers: groupBalancers,
StartOffset: startOffset,
CommitInterval: time.Duration(commitInterval) * time.Millisecond,
Dialer: dialer,
})
if kafkaReader == nil {
errMsg := "NewIngestKafka: failed to create kafka-go reader"
Expand Down
59 changes: 59 additions & 0 deletions pkg/pipeline/ingest/ingest_kafka_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import (
"testing"
"time"

"github.com/netobserv/flowlogs-pipeline/pkg/api"
"github.com/netobserv/flowlogs-pipeline/pkg/config"
"github.com/netobserv/flowlogs-pipeline/pkg/test"
kafkago "github.com/segmentio/kafka-go"
Expand Down Expand Up @@ -249,3 +250,61 @@ func Test_BatchTimeout(t *testing.T) {
require.LessOrEqual(t, int64(100), afterIngest.Sub(beforeIngest).Milliseconds())
require.Greater(t, int64(120), afterIngest.Sub(beforeIngest).Milliseconds())
}

func Test_TLSConfigEmpty(t *testing.T) {
stage := config.NewKafkaPipeline("ingest-kafka", api.IngestKafka{
Brokers: []string{"any"},
Topic: "topic",
Decoder: api.Decoder{Type: "json"},
})
newIngest, err := NewIngestKafka(stage.GetStageParams()[0])
require.NoError(t, err)
tlsConfig := newIngest.(*ingestKafka).kafkaReader.Config().Dialer.TLS
require.Nil(t, tlsConfig)
}

func Test_TLSConfigCA(t *testing.T) {
ca, cleanup := test.CreateCACert(t)
defer cleanup()
stage := config.NewKafkaPipeline("ingest-kafka", api.IngestKafka{
Brokers: []string{"any"},
Topic: "topic",
Decoder: api.Decoder{Type: "json"},
TLS: &api.ClientTLS{
CACertPath: ca,
},
})
newIngest, err := NewIngestKafka(stage.GetStageParams()[0])
require.NoError(t, err)

tlsConfig := newIngest.(*ingestKafka).kafkaReader.Config().Dialer.TLS

require.Empty(t, tlsConfig.Certificates)
require.NotNil(t, tlsConfig.RootCAs)
require.Len(t, tlsConfig.RootCAs.Subjects(), 1)
}

func Test_MutualTLSConfig(t *testing.T) {
ca, user, userKey, cleanup := test.CreateAllCerts(t)
defer cleanup()
stage := config.NewKafkaPipeline("ingest-kafka", api.IngestKafka{
Brokers: []string{"any"},
Topic: "topic",
Decoder: api.Decoder{Type: "json"},
TLS: &api.ClientTLS{
CACertPath: ca,
UserCertPath: user,
UserKeyPath: userKey,
},
})
newIngest, err := NewIngestKafka(stage.GetStageParams()[0])
require.NoError(t, err)

tlsConfig := newIngest.(*ingestKafka).kafkaReader.Config().Dialer.TLS

require.Len(t, tlsConfig.Certificates, 1)
require.NotEmpty(t, tlsConfig.Certificates[0].Certificate)
require.NotNil(t, tlsConfig.Certificates[0].PrivateKey)
require.NotNil(t, tlsConfig.RootCAs)
require.Len(t, tlsConfig.RootCAs.Subjects(), 1)
}
Loading

0 comments on commit 3dbc423

Please sign in to comment.