forked from psinghal04/kafka-replay-sample
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathproducer.go
65 lines (52 loc) · 1.63 KB
/
producer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
package main
import (
"fmt"
"log"
"time"
"kafka-replay-sample/config"
"github.com/confluentinc/confluent-kafka-go/kafka"
)
func main() {
fmt.Println("Loading config from file...")
cfg, err := config.GetConfig("config.json")
if err != nil {
log.Fatalf("Encountered fatal error initializing configuration, service being terminated: %v", err)
}
c := &kafka.ConfigMap{
"metadata.broker.list": cfg.BrokerHostEndpoint,
"security.protocol": "PLAINTEXT",
"group.id": cfg.ConsumeGroup,
"default.topic.config": kafka.ConfigMap{"auto.offset.reset": "earliest"},
}
topic := cfg.ConsumeTopic
p, err := kafka.NewProducer(c)
if err != nil {
log.Fatalf("Failed to create producer: %s\n", err)
}
fmt.Printf("Created Producer %v\n", p)
doneChan := make(chan bool)
go func() {
defer close(doneChan)
for e := range p.Events() {
switch ev := e.(type) {
case *kafka.Message:
m := ev
if m.TopicPartition.Error != nil {
log.Printf("Delivery failed: %v\n", m.TopicPartition.Error)
} else {
log.Printf("Delivered message to topic %s [%d] at offset %v: \"%s\"\n",
*m.TopicPartition.Topic, m.TopicPartition.Partition, m.TopicPartition.Offset, m.Value)
}
return
default:
log.Printf("Ignored event: %s\n", ev)
}
}
}()
value := "Test is a test message. It was produced at " + time.Now().String()
p.ProduceChannel() <- &kafka.Message{TopicPartition: kafka.TopicPartition{Topic: &topic, Partition: kafka.PartitionAny}, Value: []byte(value)}
// wait for delivery report goroutine to finish
log.Println("waiting for response on delivery channel...")
_ = <-doneChan
p.Close()
}