public void consumer2(){
ConsumerThread consumerThread=new ConsumerThread();
consumerThread.start();
for(int i=0;i<3;i++){
ApacheKafkaWebController apacheKafkaWebController = new ApacheKafkaWebController();
apacheKafkaWebController.producer("test");
}
try {
TimeUnit.SECONDS.sleep(20);
} catch (InterruptedException e) {
e.printStackTrace();
}
for(int i=0;i<10;i++) {
(new ConsumerThread()).start();
}
public class ConsumerThread extends Thread {
public void run(){
ConsumerService consumerService = new ConsumerService();
consumerService.consumer();
}
public ConsumerRecords<String, byte[]> consumer(){
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
//props.put("group.id", "test");
props.put("group.id", String.valueOf(Instant.now().getEpochSecond()));
props.put("auto.offset.reset","earliest");
props.put("enable.auto.commit", "false");
//props.put("auto.commit.interval.ms", "1000");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Collections.singletonList("topiccc"));
ConsumerRecords<String, byte[]> records = consumer.poll(100);
consumer.seekToBeginning(consumer.assignment());
/* List<byte[]> videoContents = new ArrayList<byte[]>();
for (ConsumerRecord<String, byte[]> record : records) {
System.out.printf("offset = %d, key = %s, value = %s\n", record.offset(), record.key(), record.value());
videoContents.add(record.value());
}*/
System.out.println("ISSSSSSSSSSSSSSSSSSSSSSSSSSSSS EMPTYYYYYYYYYY:"+String.valueOf(records.isEmpty()));
System.out.println("RRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRCOUNT:"+records.count());
return records;
}
@GetMapping(value = "/producer")
public String producer(@RequestParam("message") String message) {
Map<String, Object> props = new HashMap<>();
// list of host:port pairs used for establishing the initial connections to the Kakfa cluster
props.put("bootstrap.servers", "localhost:9092");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
Producer<String, byte[]> producer = new KafkaProducer<>(props);
Path path = Paths.get("C:/Programming Files/video-2012-07-05-02-29-27.mp4");
ProducerRecord<String, byte[]> record = null;
try {
//byte[] arr= (Arrays.copyOf(Files.readAllBytes(path),2000000));
record = new ProducerRecord<>("topiccc", "keyyyyy"
, Files.readAllBytes(path));
} catch (IOException e) {
e.printStackTrace();
}
producer.send(record);
producer.close();
//kafkaSender.send(record);
return "Message sent to the Kafka Topic java_in_use_topic Successfully";
}
我有一个循环中运行的10个消费者线程,在生产者被多次调用以插入记录但消费者返回空白之后调用Kafka消费者。
每次调用时,我都将组ID设置为不同的值,以便记录保留在Kafka中,并且消费者可以下载它。我还通过浏览器多次呼叫消费者并测试了群组ID的工作情况。