Skip to content

Commit 4c9bec7

Browse files
tuand27613Luis Sanchez
authored and
Luis Sanchez
committed
[FAB-1809] Enable tls config for Kafka connection
The docker related items in previous patch sets will be submitted under a separate change set. - Added the ability to specify certs and keys via both direct insertion in config or via a file. Change-Id: Ia34ed4d316ddb7177e78ee4c6c347ba5ee82e116 Signed-off-by: Tuan Dang <[email protected]> Signed-off-by: Luis Sanchez <[email protected]>
1 parent 159d098 commit 4c9bec7

13 files changed

+554
-43
lines changed

images/kafka/Dockerfile.in

+1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ ADD payload/kafka-run-class.sh /opt/kafka/bin/kafka-run-class.sh
1515
ADD payload/docker-entrypoint.sh /docker-entrypoint.sh
1616

1717
EXPOSE 9092
18+
EXPOSE 9093
1819

1920
ENTRYPOINT ["/docker-entrypoint.sh"]
2021
CMD ["/opt/kafka/bin/kafka-server-start.sh"]

orderer/kafka/consumer.go

+6-3
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,10 @@ limitations under the License.
1616

1717
package kafka
1818

19-
import "github.com/Shopify/sarama"
19+
import (
20+
"github.com/Shopify/sarama"
21+
"github.com/hyperledger/fabric/orderer/localconfig"
22+
)
2023

2124
// Consumer allows the caller to receive a stream of blobs from the Kafka cluster for a specific partition.
2225
type Consumer interface {
@@ -29,8 +32,8 @@ type consumerImpl struct {
2932
partition sarama.PartitionConsumer
3033
}
3134

32-
func newConsumer(brokers []string, kafkaVersion sarama.KafkaVersion, cp ChainPartition, offset int64) (Consumer, error) {
33-
parent, err := sarama.NewConsumer(brokers, newBrokerConfig(kafkaVersion, rawPartition))
35+
func newConsumer(brokers []string, kafkaVersion sarama.KafkaVersion, tls config.TLS, cp ChainPartition, offset int64) (Consumer, error) {
36+
parent, err := sarama.NewConsumer(brokers, newBrokerConfig(kafkaVersion, rawPartition, tls))
3437
if err != nil {
3538
return nil, err
3639
}

orderer/kafka/orderer.go

+20-17
Original file line numberDiff line numberDiff line change
@@ -29,50 +29,51 @@ import (
2929
)
3030

3131
// New creates a Kafka-backed consenter. Called by orderer's main.go.
32-
func New(kv sarama.KafkaVersion, ro config.Retry) multichain.Consenter {
33-
return newConsenter(kv, ro, bfValue, pfValue, cfValue)
32+
func New(kv sarama.KafkaVersion, ro config.Retry, tls config.TLS) multichain.Consenter {
33+
return newConsenter(kv, ro, tls, bfValue, pfValue, cfValue)
3434
}
3535

3636
// New calls here because we need to pass additional arguments to
3737
// the constructor and New() should only read from the config file.
38-
func newConsenter(kv sarama.KafkaVersion, ro config.Retry, bf bfType, pf pfType, cf cfType) multichain.Consenter {
39-
return &consenterImpl{kv, ro, bf, pf, cf}
38+
func newConsenter(kv sarama.KafkaVersion, ro config.Retry, tls config.TLS, bf bfType, pf pfType, cf cfType) multichain.Consenter {
39+
return &consenterImpl{kv, ro, tls, bf, pf, cf}
4040
}
4141

4242
// bfType defines the signature of the broker constructor.
4343
type bfType func([]string, ChainPartition) (Broker, error)
4444

4545
// pfType defines the signature of the producer constructor.
46-
type pfType func([]string, sarama.KafkaVersion, config.Retry) Producer
46+
type pfType func([]string, sarama.KafkaVersion, config.Retry, config.TLS) Producer
4747

4848
// cfType defines the signature of the consumer constructor.
49-
type cfType func([]string, sarama.KafkaVersion, ChainPartition, int64) (Consumer, error)
49+
type cfType func([]string, sarama.KafkaVersion, config.TLS, ChainPartition, int64) (Consumer, error)
5050

5151
// bfValue holds the value for the broker constructor that's used in the non-test case.
5252
var bfValue = func(brokers []string, cp ChainPartition) (Broker, error) {
5353
return newBroker(brokers, cp)
5454
}
5555

5656
// pfValue holds the value for the producer constructor that's used in the non-test case.
57-
var pfValue = func(brokers []string, kafkaVersion sarama.KafkaVersion, retryOptions config.Retry) Producer {
58-
return newProducer(brokers, kafkaVersion, retryOptions)
57+
var pfValue = func(brokers []string, kafkaVersion sarama.KafkaVersion, retryOptions config.Retry, tls config.TLS) Producer {
58+
return newProducer(brokers, kafkaVersion, retryOptions, tls)
5959
}
6060

6161
// cfValue holds the value for the consumer constructor that's used in the non-test case.
62-
var cfValue = func(brokers []string, kafkaVersion sarama.KafkaVersion, cp ChainPartition, offset int64) (Consumer, error) {
63-
return newConsumer(brokers, kafkaVersion, cp, offset)
62+
var cfValue = func(brokers []string, kafkaVersion sarama.KafkaVersion, tls config.TLS, cp ChainPartition, offset int64) (Consumer, error) {
63+
return newConsumer(brokers, kafkaVersion, tls, cp, offset)
6464
}
6565

6666
// consenterImpl holds the implementation of type that satisfies the
6767
// multichain.Consenter and testableConsenter interfaces. The former
6868
// is needed because that is what the HandleChain contract requires.
6969
// The latter is needed for testing.
7070
type consenterImpl struct {
71-
kv sarama.KafkaVersion
72-
ro config.Retry
73-
bf bfType
74-
pf pfType
75-
cf cfType
71+
kv sarama.KafkaVersion
72+
ro config.Retry
73+
tls config.TLS
74+
bf bfType
75+
pf pfType
76+
cf cfType
7677
}
7778

7879
// HandleChain creates/returns a reference to a Chain for the given set of support resources.
@@ -110,7 +111,7 @@ func newChain(consenter testableConsenter, support multichain.ConsenterSupport,
110111
partition: newChainPartition(support.ChainID(), rawPartition),
111112
batchTimeout: support.SharedConfig().BatchTimeout(),
112113
lastOffsetPersisted: lastOffsetPersisted,
113-
producer: consenter.prodFunc()(support.SharedConfig().KafkaBrokers(), consenter.kafkaVersion(), consenter.retryOptions()),
114+
producer: consenter.prodFunc()(support.SharedConfig().KafkaBrokers(), consenter.kafkaVersion(), consenter.retryOptions(), consenter.tlsConfig()),
114115
halted: false, // Redundant as the default value for booleans is false but added for readability
115116
exitChan: make(chan struct{}),
116117
haltedChan: make(chan struct{}),
@@ -123,13 +124,15 @@ func newChain(consenter testableConsenter, support multichain.ConsenterSupport,
123124
type testableConsenter interface {
124125
kafkaVersion() sarama.KafkaVersion
125126
retryOptions() config.Retry
127+
tlsConfig() config.TLS
126128
brokFunc() bfType
127129
prodFunc() pfType
128130
consFunc() cfType
129131
}
130132

131133
func (co *consenterImpl) kafkaVersion() sarama.KafkaVersion { return co.kv }
132134
func (co *consenterImpl) retryOptions() config.Retry { return co.ro }
135+
func (co *consenterImpl) tlsConfig() config.TLS { return co.tls }
133136
func (co *consenterImpl) brokFunc() bfType { return co.bf }
134137
func (co *consenterImpl) prodFunc() pfType { return co.pf }
135138
func (co *consenterImpl) consFunc() cfType { return co.cf }
@@ -169,7 +172,7 @@ func (ch *chainImpl) Start() {
169172
}
170173

171174
// 2. Set up the listener/consumer for this partition.
172-
consumer, err := ch.consenter.consFunc()(ch.support.SharedConfig().KafkaBrokers(), ch.consenter.kafkaVersion(), ch.partition, ch.lastOffsetPersisted+1)
175+
consumer, err := ch.consenter.consFunc()(ch.support.SharedConfig().KafkaBrokers(), ch.consenter.kafkaVersion(), ch.consenter.tlsConfig(), ch.partition, ch.lastOffsetPersisted+1)
173176
if err != nil {
174177
logger.Criticalf("Cannot retrieve required offset from Kafka cluster for chain %s: %s", ch.partition, err)
175178
close(ch.exitChan)

orderer/kafka/orderer_test.go

+10-7
Original file line numberDiff line numberDiff line change
@@ -51,14 +51,16 @@ func mockNewConsenter(t *testing.T, kafkaVersion sarama.KafkaVersion, retryOptio
5151
prodDisk := make(chan *ab.KafkaMessage)
5252
consDisk := make(chan *ab.KafkaMessage)
5353

54+
mockTLS := config.TLS{Enabled: false}
55+
5456
mockBfValue := func(brokers []string, cp ChainPartition) (Broker, error) {
5557
return mockNewBroker(t, cp)
5658
}
57-
mockPfValue := func(brokers []string, kafkaVersion sarama.KafkaVersion, retryOptions config.Retry) Producer {
59+
mockPfValue := func(brokers []string, kafkaVersion sarama.KafkaVersion, retryOptions config.Retry, tls config.TLS) Producer {
5860
// The first Send on this producer will return a blob with offset #nextProducedOffset
5961
return mockNewProducer(t, cp, nextProducedOffset, prodDisk)
6062
}
61-
mockCfValue := func(brokers []string, kafkaVersion sarama.KafkaVersion, cp ChainPartition, lastPersistedOffset int64) (Consumer, error) {
63+
mockCfValue := func(brokers []string, kafkaVersion sarama.KafkaVersion, tls config.TLS, cp ChainPartition, lastPersistedOffset int64) (Consumer, error) {
6264
if lastPersistedOffset != nextProducedOffset {
6365
panic(fmt.Errorf("Mock objects about to be set up incorrectly (consumer to seek to %d, producer to post %d)", lastPersistedOffset, nextProducedOffset))
6466
}
@@ -67,11 +69,12 @@ func mockNewConsenter(t *testing.T, kafkaVersion sarama.KafkaVersion, retryOptio
6769

6870
return &mockConsenterImpl{
6971
consenterImpl: consenterImpl{
70-
kv: kafkaVersion,
71-
ro: retryOptions,
72-
bf: mockBfValue,
73-
pf: mockPfValue,
74-
cf: mockCfValue,
72+
kv: kafkaVersion,
73+
ro: retryOptions,
74+
tls: mockTLS,
75+
bf: mockBfValue,
76+
pf: mockPfValue,
77+
cf: mockCfValue,
7578
},
7679
prodDisk: prodDisk,
7780
consDisk: consDisk,

orderer/kafka/producer.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -34,10 +34,10 @@ type producerImpl struct {
3434
producer sarama.SyncProducer
3535
}
3636

37-
func newProducer(brokers []string, kafkaVersion sarama.KafkaVersion, retryOptions config.Retry) Producer {
37+
func newProducer(brokers []string, kafkaVersion sarama.KafkaVersion, retryOptions config.Retry, tls config.TLS) Producer {
3838
var p sarama.SyncProducer
3939
var err error
40-
brokerConfig := newBrokerConfig(kafkaVersion, rawPartition)
40+
brokerConfig := newBrokerConfig(kafkaVersion, rawPartition, tls)
4141

4242
repeatTick := time.NewTicker(retryOptions.Period)
4343
panicTick := time.NewTicker(retryOptions.Stop)

orderer/kafka/util.go

+30-1
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,19 @@ limitations under the License.
1717
package kafka
1818

1919
import (
20+
"crypto/tls"
21+
"crypto/x509"
22+
"fmt"
2023
"strconv"
2124

2225
"github.com/Shopify/sarama"
26+
"github.com/hyperledger/fabric/orderer/localconfig"
2327
ab "github.com/hyperledger/fabric/protos/orderer"
2428
)
2529

2630
// TODO Set the returned config file to more appropriate
2731
// defaults as we're getting closer to a stable release
28-
func newBrokerConfig(kafkaVersion sarama.KafkaVersion, chosenStaticPartition int32) *sarama.Config {
32+
func newBrokerConfig(kafkaVersion sarama.KafkaVersion, chosenStaticPartition int32, tlsConfig config.TLS) *sarama.Config {
2933
brokerConfig := sarama.NewConfig()
3034

3135
brokerConfig.Version = kafkaVersion
@@ -40,6 +44,31 @@ func newBrokerConfig(kafkaVersion sarama.KafkaVersion, chosenStaticPartition int
4044
// value of a Kafka broker's socket.request.max.bytes property (100 MiB).
4145
brokerConfig.Producer.MaxMessageBytes = int(sarama.MaxRequestSize)
4246

47+
brokerConfig.Net.TLS.Enable = tlsConfig.Enabled
48+
49+
if brokerConfig.Net.TLS.Enable {
50+
// create public/private key pair structure
51+
keyPair, err := tls.X509KeyPair([]byte(tlsConfig.Certificate), []byte(tlsConfig.PrivateKey))
52+
if err != nil {
53+
panic(fmt.Errorf("Unable to decode public/private key pair. Error: %v", err))
54+
}
55+
56+
// create root CA pool
57+
rootCAs := x509.NewCertPool()
58+
for _, certificate := range tlsConfig.RootCAs {
59+
if !rootCAs.AppendCertsFromPEM([]byte(certificate)) {
60+
panic(fmt.Errorf("Unable to decode certificate. Error: %v", err))
61+
}
62+
}
63+
64+
brokerConfig.Net.TLS.Config = &tls.Config{
65+
Certificates: []tls.Certificate{keyPair},
66+
RootCAs: rootCAs,
67+
MinVersion: 0, // TLS 1.0 (no SSL support)
68+
MaxVersion: 0, // Latest supported TLS version
69+
}
70+
}
71+
4372
return brokerConfig
4473
}
4574

orderer/kafka/util_test.go

+95-2
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,9 @@ import (
2121

2222
"github.com/Shopify/sarama"
2323
"github.com/hyperledger/fabric/orderer/common/bootstrap/provisional"
24+
"github.com/hyperledger/fabric/orderer/localconfig"
25+
"github.com/hyperledger/fabric/orderer/mocks/util"
26+
"github.com/stretchr/testify/assert"
2427
)
2528

2629
func TestProducerConfigMessageMaxBytes(t *testing.T) {
@@ -35,7 +38,8 @@ func TestProducerConfigMessageMaxBytes(t *testing.T) {
3538
"ProduceRequest": sarama.NewMockProduceResponse(t),
3639
})
3740

38-
config := newBrokerConfig(testConf.Kafka.Version, rawPartition)
41+
mockTLS := config.TLS{Enabled: false}
42+
config := newBrokerConfig(testConf.Kafka.Version, rawPartition, mockTLS)
3943
producer, err := sarama.NewSyncProducer([]string{broker.Addr()}, config)
4044
if err != nil {
4145
t.Fatal(err)
@@ -86,7 +90,7 @@ func TestNewBrokerConfig(t *testing.T) {
8690
"ProduceRequest": sarama.NewMockProduceResponse(t),
8791
})
8892

89-
config := newBrokerConfig(testConf.Kafka.Version, differentPartition)
93+
config := newBrokerConfig(testConf.Kafka.Version, differentPartition, config.TLS{Enabled: false})
9094
producer, err := sarama.NewSyncProducer([]string{broker.Addr()}, config)
9195
if err != nil {
9296
t.Fatal("Failed to create producer:", err)
@@ -105,3 +109,92 @@ func TestNewBrokerConfig(t *testing.T) {
105109
}
106110
}
107111
}
112+
113+
func TestTLSConfigEnabled(t *testing.T) {
114+
publicKey, privateKey, err := util.GenerateMockPublicPrivateKeyPairPEM(false)
115+
if err != nil {
116+
t.Fatalf("Enable to generate a public/private key pair: %v", err)
117+
}
118+
caPublicKey, _, err := util.GenerateMockPublicPrivateKeyPairPEM(true)
119+
if err != nil {
120+
t.Fatalf("Enable to generate a signer certificate: %v", err)
121+
}
122+
123+
config := newBrokerConfig(testConf.Kafka.Version, 0, config.TLS{
124+
Enabled: true,
125+
PrivateKey: privateKey,
126+
Certificate: publicKey,
127+
RootCAs: []string{caPublicKey},
128+
})
129+
130+
assert.True(t, config.Net.TLS.Enable)
131+
assert.NotNil(t, config.Net.TLS.Config)
132+
assert.Len(t, config.Net.TLS.Config.Certificates, 1)
133+
assert.Len(t, config.Net.TLS.Config.RootCAs.Subjects(), 1)
134+
assert.Equal(t, uint16(0), config.Net.TLS.Config.MaxVersion)
135+
assert.Equal(t, uint16(0), config.Net.TLS.Config.MinVersion)
136+
}
137+
138+
func TestTLSConfigDisabled(t *testing.T) {
139+
publicKey, privateKey, err := util.GenerateMockPublicPrivateKeyPairPEM(false)
140+
if err != nil {
141+
t.Fatalf("Enable to generate a public/private key pair: %v", err)
142+
}
143+
caPublicKey, _, err := util.GenerateMockPublicPrivateKeyPairPEM(true)
144+
if err != nil {
145+
t.Fatalf("Enable to generate a signer certificate: %v", err)
146+
}
147+
148+
config := newBrokerConfig(testConf.Kafka.Version, 0, config.TLS{
149+
Enabled: false,
150+
PrivateKey: privateKey,
151+
Certificate: publicKey,
152+
RootCAs: []string{caPublicKey},
153+
})
154+
155+
assert.False(t, config.Net.TLS.Enable)
156+
assert.Zero(t, config.Net.TLS.Config)
157+
158+
}
159+
160+
func TestTLSConfigBadCert(t *testing.T) {
161+
publicKey, privateKey, err := util.GenerateMockPublicPrivateKeyPairPEM(false)
162+
if err != nil {
163+
t.Fatalf("Enable to generate a public/private key pair: %v", err)
164+
}
165+
caPublicKey, _, err := util.GenerateMockPublicPrivateKeyPairPEM(true)
166+
if err != nil {
167+
t.Fatalf("Enable to generate a signer certificate: %v", err)
168+
}
169+
170+
t.Run("BadPrivateKey", func(t *testing.T) {
171+
assert.Panics(t, func() {
172+
newBrokerConfig(testConf.Kafka.Version, 0, config.TLS{
173+
Enabled: true,
174+
PrivateKey: privateKey,
175+
Certificate: "TRASH",
176+
RootCAs: []string{caPublicKey},
177+
})
178+
})
179+
})
180+
t.Run("BadPublicKey", func(t *testing.T) {
181+
assert.Panics(t, func() {
182+
newBrokerConfig(testConf.Kafka.Version, 0, config.TLS{
183+
Enabled: true,
184+
PrivateKey: "TRASH",
185+
Certificate: publicKey,
186+
RootCAs: []string{caPublicKey},
187+
})
188+
})
189+
})
190+
t.Run("BadRootCAs", func(t *testing.T) {
191+
assert.Panics(t, func() {
192+
newBrokerConfig(testConf.Kafka.Version, 0, config.TLS{
193+
Enabled: true,
194+
PrivateKey: privateKey,
195+
Certificate: publicKey,
196+
RootCAs: []string{"TRASH"},
197+
})
198+
})
199+
})
200+
}

0 commit comments

Comments
 (0)