使用带有Kafka API和使用者组功能的Azure EventHub

时间:2018-07-30 08:51:08

标签: go apache-kafka azure-eventhub

Azure使它们的EventHub与Kafka API结合使用成为可能。

我正在尝试通过sarama-cluster库在GoLang中使用它。

首先,我需要能够使用“消费者组”功能和库https://github.com/bsm/sarama-cluster(位于-> https://github.com/Shopify/sarama的基本库之上,库是“基本” kafka客户)实施。

conf有点棘手,但是我已经设法通过基本的sarama lib使用了主题,但是当使用bsm库时,它不再起作用了……

有人已经设法做到这一点吗?

我的对话对吗?

这是我带有基本Sarama库的示例文件

package main

import (
    "fmt"
    "strings"

    "github.com/Shopify/sarama"

    "time"
    "crypto/tls"

)



func produce() {

    brokerList := "service_test.servicebus.windows.net:9093"

    connectionString := "Endpoint=sb://service_test.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=vaWQE2ymDtegC87k24YGNE4cVgKpZq1Bcia6c6HiR3E="

    topic := "topic_test"

    confProducer := sarama.NewConfig()
    confProducer.Producer.RequiredAcks = sarama.WaitForAll
    confProducer.Producer.Retry.Max = 10
    confProducer.Producer.Return.Successes = true

    confProducer.Version = sarama.V1_0_0_0

    confProducer.Net.SASL.User = "$ConnectionString"
    confProducer.Net.SASL.Password = connectionString
    confProducer.Net.SASL.Handshake = true
    confProducer.Net.SASL.Enable = true
    confProducer.Net.TLS.Enable= true
    confProducer.Net.TLS.Config = &tls.Config{
        InsecureSkipVerify: true,
        ClientAuth: 0,
    }

    producer, err := sarama.NewSyncProducer(strings.Split(brokerList, ","), confProducer)
    if err != nil {
        panic("Can't connect producer to kafka")
    }


    msg := &sarama.ProducerMessage{
        Topic: topic,
        Value: sarama.StringEncoder("Test"),
    }
    _, _, err = producer.SendMessage(msg)
    if err != nil {
        panic(fmt.Sprintf("SendMessage() err: %+v\n", err))
    }
}




func consumeSarama() {

    brokerList := "service_test.servicebus.windows.net:9093"

    connectionString := "Endpoint=sb://service_test.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=vaWQE2ymDtegC87k24YGNE4cVgKpZq1Bcia6c6HiR3E="

    topic := "topic_test"

    //consumerGroup := "$Default"

    clusterConfig := sarama.NewConfig()
    clusterConfig.Consumer.Return.Errors = true
    clusterConfig.Metadata.Retry.Backoff = 2 * time.Second

    clusterConfig.Consumer.Offsets.Initial = sarama.OffsetNewest
    //clusterConfig.Consumer.Offsets.Initial = sarama.OffsetOldest

    clusterConfig.Version = sarama.V1_0_0_0

    clusterConfig.Producer.RequiredAcks = sarama.WaitForAll
    clusterConfig.Producer.Retry.Max = 10
    clusterConfig.Producer.Return.Successes = true
    clusterConfig.Net.SASL.User = "$ConnectionString"
    clusterConfig.Net.SASL.Password = connectionString
    clusterConfig.Net.SASL.Handshake = true
    clusterConfig.Net.SASL.Enable = true
    clusterConfig.Net.TLS.Enable= true
    clusterConfig.Net.TLS.Config = &tls.Config{
        InsecureSkipVerify: true,
        ClientAuth: 0,
    }

    client, err := sarama.NewClient(strings.Split(brokerList, ","), clusterConfig)
    if err != nil {
        panic(fmt.Sprintf("bsm.NewClient(): %+v\nbrokerList: %+v\n", err, brokerList))
    }

    oldestOffset, err := client.GetOffset(topic, 0, sarama.OffsetOldest)
    if err != nil {
        panic(err)
    }
    newestOffset, err := client.GetOffset(topic, 0, sarama.OffsetNewest)
    if err != nil {
        panic(err)
    }
    fmt.Printf("#########################\n")
    fmt.Printf("Oldest Offset    : %+v\n", oldestOffset)
    fmt.Printf("Newest Offset    : %+v\n", newestOffset)
    fmt.Printf("#########################\n")


    clusterConsumer, err := sarama.NewConsumerFromClient(client)//, consumerGroup, []string{ topic })
    if err != nil {
        fmt.Printf("sarama.NewConsumerFromClient() err: %+v\n", err)
        panic(err)
    }


    //consumeQueue, _ := clusterConsumer.ConsumePartition(topic, 0, sarama.OffsetNewest)
    consumeQueue, _ := clusterConsumer.ConsumePartition(topic, 0, sarama.OffsetNewest)
    for {
        select {
        case err := <-consumeQueue.Errors():
            fmt.Sprintf("Error from kafka, stopping consumer... err: %+v\n", err)
        case msg, ok := <-consumeQueue.Messages():
            if ok {
                fmt.Printf("Received messages key=%+v value=%+v\n", string(msg.Key), string(msg.Value))

            }
        }
    }


}




func main() {

    go consumeSarama()

    time.Sleep(time.Second * 5)

    produce()

    // Time to print received message
    time.Sleep(time.Second * 5)

}

这是我带有Bsm库的示例文件

package main

import (
    "fmt"
    "strings"

    "github.com/Shopify/sarama"
    bsm "github.com/bsm/sarama-cluster"

    "time"
    "crypto/tls"

)




func produce() {

    brokerList := "service_test.servicebus.windows.net:9093"

    connectionString := "Endpoint=sb://service_test.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=vaWQE2ymDtegC87k24YGNE4cVgKpZq1Bcia6c6HiR3E="

    topic := "test_topic"

    confProducer := sarama.NewConfig()
    confProducer.Producer.RequiredAcks = sarama.WaitForAll
    confProducer.Producer.Retry.Max = 10
    confProducer.Producer.Return.Successes = true

    confProducer.Version = sarama.V1_0_0_0

    confProducer.Net.SASL.User = "$ConnectionString"
    confProducer.Net.SASL.Password = connectionString
    confProducer.Net.SASL.Handshake = true
    confProducer.Net.SASL.Enable = true
    confProducer.Net.TLS.Enable= true
    confProducer.Net.TLS.Config = &tls.Config{
        InsecureSkipVerify: true,
        ClientAuth: 0,
    }

    producer, err := sarama.NewSyncProducer(strings.Split(brokerList, ","), confProducer)
    if err != nil {
        panic("Can't connect producer to kafka")
    }


    msg := &sarama.ProducerMessage{
        Topic: topic,
        Value: sarama.StringEncoder("Test"),
    }
    _, _, err = producer.SendMessage(msg)
    if err != nil {
        panic(fmt.Sprintf("SendMessage() err: %+v\n", err))
    }
}




func consumeBsm() {

    brokerList := "service_test.servicebus.windows.net:9093"

    connectionString := "Endpoint=sb://service_test.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=vaWQE2ymDtegC87k24YGNE4cVgKpZq1Bcia6c6HiR3E="

    topic := "test_topic"

    consumerGroup := "$Default"

    clusterConfig := bsm.NewConfig()

    clusterConfig.Metadata.Retry.Backoff = 2 * time.Second

    //clusterConfig.Consumer.Offsets.Initial = sarama.OffsetNewest
    clusterConfig.Consumer.Offsets.Initial = sarama.OffsetOldest

    clusterConfig.Version = sarama.V1_0_0_0

    clusterConfig.Consumer.Retry.Backoff = 200 * time.Millisecond

    clusterConfig.Net.SASL.User = "$ConnectionString"
    clusterConfig.Net.SASL.Password = connectionString
    clusterConfig.Net.SASL.Handshake = true
    clusterConfig.Net.SASL.Enable = true
    clusterConfig.Net.TLS.Enable= true
    clusterConfig.Net.TLS.Config = &tls.Config{
        InsecureSkipVerify: true,
        ClientAuth: 0,
    }
    clusterConfig.Group.Return.Notifications = true
    clusterConfig.Consumer.Return.Errors = true



    client, err := bsm.NewClient(strings.Split(brokerList, ","), clusterConfig)
    if err != nil {
        panic(fmt.Sprintf("bsm.NewClient(): %+v\nbrokerList: %+v\n", err, brokerList))
    }

    oldestOffset, err := client.GetOffset(topic, 0, sarama.OffsetOldest)
    if err != nil {
        panic(err)
    }
    newestOffset, err := client.GetOffset(topic, 0, sarama.OffsetNewest)
    if err != nil {
        panic(err)
    }
    fmt.Printf("#########################\n")
    fmt.Printf("Oldest Offset    : %+v\n", oldestOffset)
    fmt.Printf("Newest Offset    : %+v\n", newestOffset)
    fmt.Printf("#########################\n")


    fmt.Printf("Now part of consumer group -> %s\n", consumerGroup)
    //clusterConsumer, err = bsm.NewConsumerFromClient(client, consumerGroup, []string{topic})
    clusterConsumer, err := bsm.NewConsumer(strings.Split(brokerList, ","), consumerGroup, []string{ topic }, clusterConfig)
    if err != nil {
        fmt.Printf("bsm.NewConsumerFromClient() err: %+v\n", err)
        panic(err)
    }

    for {
        select {
        case notifs := <-clusterConsumer.Notifications():
            fmt.Sprintf("Kafka notifs are -> %+v\n", notifs)
        case err := <-clusterConsumer.Errors():
            fmt.Sprintf("Error from kafka, stopping consumer... err: %+v\n", err)
        case msg, ok := <-clusterConsumer.Messages():
            fmt.Printf("Received messages a=%+v\n", msg)

            if ok {
                fmt.Printf("Received messages key=%+v value=%+v\n", string(msg.Key), string(msg.Value))

            }

        }
    }

}



func main() {

    go consumeBsm()

    time.Sleep(time.Second * 5)

    produce()

    // Time to print received message
    time.Sleep(time.Second * 5)

}

0 个答案:

没有答案