(VOL-4959) Introduce retry mechanism when read/write to KAFKA
Change-Id: Ie082afa8f2caff446e13751d4447fb793ab9cf76
Signed-off-by: Akash Reddy Kankanala <akash.kankanala@radisys.com>
diff --git a/cmd/openolt-adapter/main.go b/cmd/openolt-adapter/main.go
index fbe7880..af8b52b 100644
--- a/cmd/openolt-adapter/main.go
+++ b/cmd/openolt-adapter/main.go
@@ -114,7 +114,7 @@
go conf.StartLogFeaturesConfigProcessing(cm, ctx)
// Setup Kafka Client
- if a.kafkaClient, err = newKafkaClient(ctx, "sarama", a.config.KafkaClusterAddress); err != nil {
+ if a.kafkaClient, err = newKafkaClient(ctx, "sarama", a.config); err != nil {
logger.Fatalw(ctx, "Unsupported-common-client", log.Fields{"error": err})
}
@@ -312,17 +312,17 @@
return nil, errors.New("unsupported-kv-store")
}
-func newKafkaClient(ctx context.Context, clientType, address string) (kafka.Client, error) {
+func newKafkaClient(ctx context.Context, clientType string, config *config.AdapterFlags) (kafka.Client, error) {
logger.Infow(ctx, "common-client-type", log.Fields{"client": clientType})
switch clientType {
case "sarama":
return kafka.NewSaramaClient(
- kafka.Address(address),
+ kafka.Address(config.KafkaClusterAddress),
kafka.ProducerReturnOnErrors(true),
kafka.ProducerReturnOnSuccess(true),
- kafka.ProducerMaxRetries(6),
+ kafka.ProducerMaxRetries(config.ProducerRetryMax),
kafka.ProducerRetryBackoff(time.Millisecond*30),
- kafka.MetadatMaxRetries(15)), nil
+ kafka.MetadatMaxRetries(config.MetadataRetryMax)), nil
}
return nil, errors.New("unsupported-client-type")