使用kafka-node批量读取数据

时间:2020-03-16 08:29:53

标签: node.js apache-kafka

我正在使用kafka-node ConsumerGroupStream来消费来自Kafka的消息。我想读取大小为1024字节的数据流:

 const chunk = consumerGroup.read(1024)

这是我的消费者代码:

const ConsumerGroupStream = require('kafka-node').ConsumerGroupStream;

const consumerOptions = {
    kafkaHost: config.KafkaHost,
    groupId: 'ExampleTestGroup',
    fetchMaxWaitMs: 1000,
    fetchMaxBytes: 1024,
    sessionTimeout: 15000,
    protocol: ['roundrobin'],
    asyncPush: false,
    id: 'consumer1',
    autoCommit: true,
    fromOffset: 'latest'
};

function sleep(ms) {
    return new Promise((resolve) => setTimeout(resolve, ms));
}

const consumerGroup = new ConsumerGroupStream(consumerOptions, config.KafkaTopic);

consumerGroup.on('error', err => console.error(err))

let i = 0

async function main() {

    while (true) {
        const chunk = consumerGroup.read(1024)
        if (!chunk) {
            console.log('Sleep for 15 second \n')
            await sleep(15 * 1000)
            continue
        } else {
            i += 1
            let chatlog = JSON.parse(chunk.value)
            console.log(util.inspect(JSON.stringify(chatlog)))
            console.log(i)
        }
    }
}

但是,消费者每次始终仅消耗一条消息,而不是一次消耗1024字节。

请帮助我!预先感谢!

0 个答案:

没有答案
相关问题