Kafka Consumer不会从Producer读取数据

时间:2018-02-12 12:52:02

标签: apache-kafka kafka-consumer-api data-science

我的Kafka消费者没有从我的制片人那里读到。我注意到在调用poll方法之后,代码没有执行打印“Hello”并且没有显示错误消息。

代码执行得很好但是就像它在poll方法之后中断

注意:我的制作人效果很好。我创建了一个消费者来测试它。

代码

public class ConsumerApp {

    public static void main(String[] args) {

        // Create Propety dictionary for the producer Config settings
        Properties props = new Properties();

        props.put("bootstrap.servers", "localhost:9092");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> myconsumer = new KafkaConsumer<String, String>(props);
        myconsumer.subscribe(Arrays.asList("test"));

        try {
            while (true) {

                ConsumerRecords<String, String> records = myconsumer.poll(100);
                System.out.println("hello");
                // processing logic goes here
                for (ConsumerRecord<String, String> record : records) {
                    // processing records
                    System.out.println(String.format(record.topic(), record.partition(), record.offset(), record.key(),
                            record.value()));

                }

            }

        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            // Closing Consumer
            myconsumer.close();
        }

    }
}

2 个答案:

答案 0 :(得分:1)

我发现解决方案我没有设置与zookeeper服务器的连接,现在我做了我的消费者读取数据!这是代码

public static void main(String[] args) {


    //Create Propety dictionary for the producer Config settings

    Properties props = new Properties();

    props.put("bootstrap.servers", "localhost:9092");
    props.put("zookeeper.connect", "localhost:2181");
    props.put("group.id", "console");
    props.put("zookeeper.session.timeout.ms", "500");
    props.put("zookeeper.sync.timeout.ms", "500");
    props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");


    KafkaConsumer< String, String > myconsumer = new KafkaConsumer<String, String>  (props);
    myconsumer.subscribe(Collections.singletonList("test"));

    try {
        while(true){


            ConsumerRecords<String, String> records = myconsumer.poll(100);

            // processing logic goes here
            for (ConsumerRecord<String, String> record : records) {
                    // processing records
                System.out.printf("offset = %d, key = %s, value = %s\n",
                        record.offset(), record.key(), record.value());

            }

        }

    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        // Closing Consumer
        myconsumer.close();
    }


}

}

答案 1 :(得分:0)

很久以前我正在玩这个例子而且效果很好,试试看:

消费者:

package com.spnotes.kafka.simple;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.WakeupException;

import java.util.Arrays;
import java.util.Properties;
import java.util.Scanner;

/**
 * Created by sunilpatil on 12/28/15.
 */
public class Consumer {
    private static Scanner in;

    public static void main(String[] argv)throws Exception{
        if (argv.length != 2) {
            System.err.printf("Usage: %s <topicName> <groupId>\n",
                    Consumer.class.getSimpleName());
            System.exit(-1);
        }
        in = new Scanner(System.in);
        String topicName = argv[0];
        String groupId = argv[1];

        ConsumerThread consumerRunnable = new ConsumerThread(topicName,groupId);
        consumerRunnable.start();
        String line = "";
        while (!line.equals("exit")) {
            line = in.next();
        }
        consumerRunnable.getKafkaConsumer().wakeup();
        System.out.println("Stopping consumer .....");
        consumerRunnable.join();
    }

    private static class ConsumerThread extends Thread{
        private String topicName;
        private String groupId;
        private KafkaConsumer<String,String> kafkaConsumer;

        public ConsumerThread(String topicName, String groupId){
            this.topicName = topicName;
            this.groupId = groupId;
        }
        public void run() {
            Properties configProperties = new Properties();
            configProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
            configProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer");
            configProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
            configProperties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
            configProperties.put(ConsumerConfig.CLIENT_ID_CONFIG, "simple");

            //Figure out where to start processing messages from
            kafkaConsumer = new KafkaConsumer<String, String>(configProperties);
            kafkaConsumer.subscribe(Arrays.asList(topicName));
            //Start processing messages
            try {
                while (true) {
                    ConsumerRecords<String, String> records = kafkaConsumer.poll(100);
                    for (ConsumerRecord<String, String> record : records)
                        System.out.println(record.value());
                }
            }catch(WakeupException ex){
                System.out.println("Exception caught " + ex.getMessage());
            }finally{
                kafkaConsumer.close();
                System.out.println("After closing KafkaConsumer");
            }
        }
        public KafkaConsumer<String,String> getKafkaConsumer(){
           return this.kafkaConsumer;
        }
    }
}

制片人:

package com.spnotes.kafka.simple;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;
import java.util.Scanner;

/**
 * Created by sunilpatil on 12/28/15.
 */
public class Producer {
    private static Scanner in;
    public static void main(String[] argv)throws Exception {
        if (argv.length != 1) {
            System.err.println("Please specify 1 parameters ");
            System.exit(-1);
        }
        String topicName = argv[0];
        in = new Scanner(System.in);
        System.out.println("Enter message(type exit to quit)");

        //Configure the Producer
        Properties configProperties = new Properties();
        configProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"localhost:9092");
        configProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.ByteArraySerializer");
        configProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer");

        org.apache.kafka.clients.producer.Producer producer = new KafkaProducer(configProperties);
        String line = in.nextLine();
        while(!line.equals("exit")) {
            //TODO: Make sure to use the ProducerRecord constructor that does not take parition Id
            ProducerRecord<String, String> rec = new ProducerRecord<String, String>(topicName,line);
            producer.send(rec);
            line = in.nextLine();
        }
        in.close();
        producer.close();
    }
}

你可以在这里找到另一个不错的例子:https://www.codenotfound.com/spring-kafka-consumer-producer-example.html