我正在尝试使用Go 中的/linkedin/goavro包将 avro 编码数据生成到 kafka 主题中。目标是能够使用不同的客户端来使用主题。
首先,我将架构注册如下:
curl -X POST -H "Content-Type: application/vnd.schemaregistry.v1+json" --data '{"schema": "{\"name\":\"test_topic2\",\"type\":\"record\", \"fields\":[{\"name\":\"user\",\"type\":\"string\"},{\"name\":\"password\",\"size\":10,\"type\":\"string\"}]}"}' http://localhost:8081/subjects/test_topic2-value/versions
然后我创建 avro 数据,使用 Go 生成和使用它。
package main
import (
"github.com/Shopify/sarama"
"github.com/linkedin/goavro"
"fmt"
)
const (
brokers = "localhost:9092"
topic = "test_topic2"
)
const loginEventAvroSchema = `{"name":"test_topic2","type":"record", "fields":[{"name":"user","type":"string"},{"name":"password","size":10,"type":"string"}]}`
func main() {
// Create Message
codec, err := goavro.NewCodec(loginEventAvroSchema)
if err != nil {
panic(err)
}
m := map[string]interface{}{
"user": "pikachu", "password": 231231,
}
single, err := codec.SingleFromNative(nil, m)
if err != nil {
panic(err)
}
// Producer
config := sarama.NewConfig()
config.Consumer.Return.Errors = true
config.Producer.Return.Successes = true
config.Version = sarama.V2_4_0_0
//get broker
cluster, err := sarama.NewSyncProducer(brokers, config)
if err != nil {
panic(err)
}
defer func() {
if err := cluster.Close(); err != nil {
panic(err)
}
}()
msg := &sarama.ProducerMessage{
Topic: topic,
Value: sarama.StringEncoder(single),
}
cluster.SendMessage(msg)
// Consumer
clusterConsumer, err := sarama.NewConsumer(brokers, config)
if err != nil {
panic(err)
}
defer func() {
if err := clusterConsumer.Close(); err != nil {
panic(err)
}
}()
msgK, _ := clusterConsumer.ConsumePartition(topic, 0, sarama.OffsetOldest)
for {
q := <-msgK.Messages()
native, _, err := codec.NativeFromSingle([]byte(q.Value))
if err != nil {
fmt.Println(err)
}
fmt.Println(native)
}
此代码工作正常,我可以成功地在 kafka 主题中生成和使用消息。
现在我尝试使用来自 python avro-consumer 的主题:
from confluent_kafka import KafkaError
from confluent_kafka.avro import AvroConsumer
from confluent_kafka.avro.serializer import SerializerError
c = AvroConsumer({
'bootstrap.servers': 'localhost',
'group.id': 'groupid',
'schema.registry.url': 'http://localhost:8081',
'auto.offset.reset': 'earliest'})
c.subscribe(['test_topic2'])
while True:
try:
msg = c.poll(10)
except SerializerError as e:
print("Message deserialization failed for {}: {}".format(msg, e))
break
if msg is None:
continue
if msg.error():
print("AvroConsumer error: {}".format(msg.error()))
continue
print(msg.value(), msg.key())
c.close()
但我收到以下错误:
confluent_kafka.avro.serializer.SerializerError: Message deserialization failed for message at test_topic2 [0] offset 1: message does not start with magic byte
我认为我在 Go 生产者部分遗漏了一些东西,如果有人能分享他/她关于如何解决这个问题的经验,我将不胜感激。