/
kafka_writer.go
78 lines (64 loc) · 1.27 KB
/
kafka_writer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
package main
import (
"bytes"
"io"
"github.com/shopify/sarama"
)
type kafkaWriter struct {
producer sarama.SyncProducer
writer io.Writer
topic string
buffer *bytes.Buffer
messages chan sarama.ProducerMessage
}
func (w kafkaWriter) Sender() {
for {
select {
//case m := <-w.messages:
}
}
}
func (w kafkaWriter) send() error {
for {
ln, err := w.buffer.ReadBytes('\n')
if err != nil {
if err == io.EOF {
break
}
// TODO: handle these errors?
break
}
message := &sarama.ProducerMessage{
Topic: w.topic,
Value: sarama.ByteEncoder(ln),
}
go func(m *sarama.ProducerMessage) {
if _, _, err := w.producer.SendMessage(message); err != nil {
if err != nil {
// TODO: handle errors, buffer, etc
}
err = w.writer.(io.Closer).Close()
if err != nil {
// TODO: handle errors, buffer, etc
}
}
}(message)
w.writer.Write(ln)
}
return nil
}
func (w kafkaWriter) Flush() error {
return w.send()
}
func (w kafkaWriter) Write(b []byte) (n int, err error) {
// TODO: support optional in-memory buffering, memory-mapped files and file buffering
if w.producer != nil && len(w.topic) > 0 {
n, err = w.buffer.Write(b)
if err != nil {
return
}
err = w.send()
return
}
return w.writer.Write(b)
}