-
Notifications
You must be signed in to change notification settings - Fork 3
/
consumer_runner.go
70 lines (55 loc) · 1.9 KB
/
consumer_runner.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
package consumer
import (
"context"
"fmt"
"sync"
"github.com/Shopify/sarama"
"github.com/inviqa/kafka-consumer-go/config"
"github.com/inviqa/kafka-consumer-go/data"
"github.com/inviqa/kafka-consumer-go/data/failure/model"
"github.com/inviqa/kafka-consumer-go/data/retry"
"github.com/inviqa/kafka-consumer-go/log"
)
func Start(cfg *config.Config, ctx context.Context, hs HandlerMap, logger log.Logger) error {
if logger == nil {
logger = log.NullLogger{}
}
wg := &sync.WaitGroup{}
fch := make(chan model.Failure)
srmCfg := config.NewSaramaConfig(cfg.TLSEnable, cfg.TLSSkipVerifyPeer)
var cons collection
var err error
if cfg.UseDBForRetryQueue {
cons, err = setupKafkaConsumerDbCollection(cfg, logger, fch, hs, srmCfg)
if err != nil {
return err
}
} else {
kafkaProducer, err := newKafkaFailureProducerWithDefaults(cfg, fch, logger)
if err != nil {
return fmt.Errorf("could not start Kafka failure producer: %w", err)
}
cons = newKafkaConsumerCollection(cfg, kafkaProducer, fch, hs, srmCfg, logger, defaultKafkaConnector)
}
if err := cons.start(ctx, wg); err != nil {
return fmt.Errorf("unable to start consumers: %w", err)
}
defer cons.close()
logger.Info("kafka consumer started")
wg.Wait()
return nil
}
func setupKafkaConsumerDbCollection(cfg *config.Config, logger log.Logger, fch chan model.Failure, hs HandlerMap, srmCfg *sarama.Config) (collection, error) {
db, err := cfg.DB()
if err != nil {
return nil, fmt.Errorf("could not connect to DB: %w", err)
}
if err = data.MigrateDatabase(db, cfg.DBSchema()); err != nil {
return nil, fmt.Errorf("unable to migrate DB: %w", err)
}
repo := retry.NewManagerWithDefaults(cfg.DBRetries, db)
dbProducer := newDatabaseProducer(repo, fch, logger)
cons := newKafkaConsumerDbCollection(cfg, dbProducer, repo, fch, hs, srmCfg, logger, defaultKafkaConnector)
cons.setMaintenanceInterval(cfg.MaintenanceInterval)
return cons, nil
}