forked from shaonianyr/goreplay
-
Notifications
You must be signed in to change notification settings - Fork 0
/
output_kafka.go
101 lines (81 loc) · 2.31 KB
/
output_kafka.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
package main
import (
"encoding/json"
"io"
"log"
"strings"
"time"
"github.com/buger/goreplay/proto"
"github.com/Shopify/sarama"
"github.com/Shopify/sarama/mocks"
)
// KafkaOutput is used for sending payloads to kafka in JSON format.
type KafkaOutput struct {
config *KafkaConfig
producer sarama.AsyncProducer
}
// KafkaOutputFrequency in milliseconds
const KafkaOutputFrequency = 500
// NewKafkaOutput creates instance of kafka producer client.
func NewKafkaOutput(address string, config *KafkaConfig) io.Writer {
c := sarama.NewConfig()
var producer sarama.AsyncProducer
if mock, ok := config.producer.(*mocks.AsyncProducer); ok && mock != nil {
producer = config.producer
} else {
c.Producer.RequiredAcks = sarama.WaitForLocal
c.Producer.Compression = sarama.CompressionSnappy
c.Producer.Flush.Frequency = KafkaOutputFrequency * time.Millisecond
brokerList := strings.Split(config.host, ",")
var err error
producer, err = sarama.NewAsyncProducer(brokerList, c)
if err != nil {
log.Fatalln("Failed to start Sarama(Kafka) producer:", err)
}
}
o := &KafkaOutput{
config: config,
producer: producer,
}
if Settings.verbose {
// Start infinite loop for tracking errors for kafka producer.
go o.ErrorHandler()
}
return o
}
// ErrorHandler should receive errors
func (o *KafkaOutput) ErrorHandler() {
for err := range o.producer.Errors() {
log.Println("Failed to write access log entry:", err)
}
}
func (o *KafkaOutput) Write(data []byte) (n int, err error) {
var message sarama.StringEncoder
if !o.config.useJSON {
message = sarama.StringEncoder(data)
} else {
headers := make(map[string]string)
proto.ParseHeaders([][]byte{data}, func(header []byte, value []byte) bool {
headers[string(header)] = string(value)
return true
})
meta := payloadMeta(data)
req := payloadBody(data)
kafkaMessage := KafkaMessage{
ReqURL: string(proto.Path(req)),
ReqType: string(meta[0]),
ReqID: string(meta[1]),
ReqTs: string(meta[2]),
ReqMethod: string(proto.Method(req)),
ReqBody: string(proto.Body(req)),
ReqHeaders: headers,
}
jsonMessage, _ := json.Marshal(&kafkaMessage)
message = sarama.StringEncoder(jsonMessage)
}
o.producer.Input() <- &sarama.ProducerMessage{
Topic: o.config.topic,
Value: message,
}
return len(message), nil
}