如何为kafka主题创建消费者?

时间:2016-06-22 20:23:59

标签: javascript node.js apache-kafka producer-consumer

我在kafka服务器上创建主题现在我创建消费者从服务器读取主题消息,但是当我尝试使用consumer.on('message')消费消息时我没有看到任何数据,任何想法在下面的代码中实现了什么错误,我需要设置偏移吗?

consumer.js

var kafka = require('kafka-node');
var config = require('./config.js');
var zk = require('node-zookeeper-client');
var kafkaConn = config.kafkaCon.dit;
var HighLevelConsumer = kafka.HighLevelConsumer;
var Client = kafka.Client;

function start() {
    topics = [{
        topic: 'test-1'
    }];
    var groupId = 'push';
    var clientId = "consumer-" + Math.floor(Math.random() * 10000);
    var options = {
        autoCommit: true,
        fetchMaxWaitMs: 100,
        fetchMaxBytes: 10 * 1024 * 1024,
        groupId: groupId
    };
    console.log("Started consumer: ", clientId);
    var consumer_client = new kafka.Client(kafkaConn, clientId);
    var client = new Client(consumer_client.connectionString, clientId);
    var consumer = new HighLevelConsumer(client, topics, options);
    consumer.on('message', function(message) {
        var topic = message.topic;
        console.log('Message', topic);
    });

};

start();

2 个答案:

答案 0 :(得分:0)

const Kafka = require("node-rdkafka");

const kafkaConf = {
  "group.id": "cloudkarafka-example",
  "metadata.broker.list": ["localhost:9092"],
  "socket.keepalive.enable": true,
  //"security.protocol": "SASL_SSL",
  //"sasl.mechanisms": "SCRAM-SHA-256",
  //"sasl.username": process.env.CLOUDKARAFKA_USERNAME,
  //"sasl.password": process.env.CLOUDKARAFKA_PASSWORD,
  "debug": "generic,broker,security",
  'enable.auto.commit': false,
};

//const prefix = process.env.CLOUDKARAFKA_USERNAME;
const topics = ['topicName'];
const consumer = new Kafka.KafkaConsumer(kafkaConf, {
  "auto.offset.reset": "beginning"
});

consumer.on("error", function(err) {
  console.error(err);
});
consumer.on("ready", function(arg) {
  console.log(`Consumer ${arg.name} ready`);
  consumer.subscribe(topics);
  consumer.consume();
});

consumer.on("data", function(m) {
 console.log(m.value.toString());
});
consumer.on("disconnected", function(arg) {
  process.exit();
});
consumer.on('event.error', function(err) {
  console.error(err);
  process.exit(1);
});
consumer.on('event.log', function(log) {
  console.log(log);
});
consumer.connect();

答案 1 :(得分:0)

可以使用kafka-node npm模块编写Kafka Consumer。就我的用例而言,我的使用者是一台单独的Express服务器,该服务器侦听事件并将其存储在数据库中。

import kafka from "kafka-node"

const client = new kafka.Client("http://localhost:2181");

const topics = [
    {
        topic: "webevents.dev"
    }
];
const options = {
    autoCommit: true,
    fetchMaxWaitMs: 1000,
    fetchMaxBytes: 1024 * 1024,
    encoding: "buffer"
};

const consumer = new kafka.HighLevelConsumer(client, topics, options);

consumer.on("message", function(message) {

    // Read string into a buffer.
    var buf = new Buffer(message.value, "binary"); 
    var decodedMessage = JSON.parse(buf.toString());

    //Events is a Sequelize Model Object. 
    return Events.create({
        id: decodedMessage.id,
        type: decodedMessage.type,
        userId: decodedMessage.userId,
        sessionId: decodedMessage.sessionId,
        data: JSON.stringify(decodedMessage.data),
        createdAt: new Date()
    });
});

consumer.on("error", function(err) {
    console.log("error", err);
});

process.on("SIGINT", function() {
    consumer.close(true, function() {
        process.exit();
    });
});

https://nodewebapps.com/2017/11/04/getting-started-with-nodejs-and-kafka/中的更多信息