我们正在使用spring-kafka-test-2.2.8-RELEASE。 当我使用模板发送消息时,它会正确触发侦听器,但是无法在consumer.poll中获取消息内容。如果我实例化KafkaTemplate而没有在class属性中“连接”它并基于生产者工厂实例化它,它会发送消息,但不会触发@KafkaListener,仅当我在@Test方法内设置消息监听器时才起作用。我需要触发kafka侦听器,并意识到哪个主题将被称为next(在无错误的情况下执行时称为“成功”主题,而侦听器抛出“ ExceptionTopic”则引发异常)和消息内容。
@RunWith(SpringRunner.class)
@SpringBootTest
@EmbeddedKafka(partitions = 1, topics = { "tp-in-gco-mao-notasfiscais" })
public class InvoicingServiceTest {
@Autowired
private NFKafkaListener nfKafkaListener;
@ClassRule
public static EmbeddedKafkaRule broker = new EmbeddedKafkaRule(1, false, "tp-in-gco-mao-
notasfiscais");
@Value("${" + EmbeddedKafkaBroker.SPRING_EMBEDDED_KAFKA_BROKERS + "}")
private String brokerAddresses;
@Autowired
private KafkaTemplate<Object, Object> template;
@BeforeClass
public static void setup() {
System.setProperty(EmbeddedKafkaBroker.BROKER_LIST_PROPERTY,
"spring.kafka.bootstrap-servers");
}
@Test
public void testTemplate() throws Exception {
NFServiceTest nfServiceTest = spy(new NFServiceTest());
nfKafkaListener.setNfServiceClient(nfServiceTest);
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("teste9", "false", broker.getEmbeddedKafka());
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, InvoiceDeserializer.class);
consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
DefaultKafkaConsumerFactory<Integer, Object> cf = new DefaultKafkaConsumerFactory<Integer, Object>(
consumerProps);
Consumer<Integer, Object> consumer = cf.createConsumer();
broker.getEmbeddedKafka().consumeFromAnEmbeddedTopic(consumer, "tp-in-gco-mao-notasfiscais");
ZfifNfMao zf = new ZfifNfMao();
zf.setItItensnf(new Zfietb011());
Zfietb011 zfietb011 = new Zfietb011();
Zfie011 zfie011 = new Zfie011();
zfie011.setMatkl("TESTE");
zfietb011.getItem().add(zfie011);
zf.setItItensnf(zfietb011);
template.send("tp-in-gco-mao-notasfiscais", zf);
List<ConsumerRecord<Integer, Object>> received = new ArrayList<>();
int n = 0;
while (received.size() < 1 && n++ < 10) {
ConsumerRecords<Integer, Object> records1 = consumer.poll(Duration.ofSeconds(10));
//records1 is always empty
if (!records1.isEmpty()) {
records1.forEach(rec -> received.add(rec));
}
}
assertThat(received).extracting(rec -> {
ZfifNfMao zfifNfMaoRdesponse = (ZfifNfMao) rec.value();
return zfifNfMaoRdesponse.getItItensnf().getItem().get(0).getMatkl();
}).contains("TESTE");
broker.getEmbeddedKafka().getKafkaServers().forEach(b -> b.shutdown());
broker.getEmbeddedKafka().getKafkaServers().forEach(b -> b.awaitShutdown());
consumer.close();
}
public static class NFServiceTest implements INFServiceClient {
CountDownLatch latch = new CountDownLatch(1);
@Override
public ZfifNfMaoResponse enviarSap(ZfifNfMao zfifNfMao) {
ZfifNfMaoResponse zfifNfMaoResponse = new ZfifNfMaoResponse();
zfifNfMaoResponse.setItItensnf(new Zfietb011());
Zfietb011 zfietb011 = new Zfietb011();
Zfie011 zfie011 = new Zfie011();
zfie011.setMatkl("TESTE");
zfietb011.getItem().add(zfie011);
zfifNfMaoResponse.setItItensnf(zfietb011);
return zfifNfMaoResponse;
}
}
}
答案 0 :(得分:0)
您有两个经纪人;一个由@EmbeddedKafka
创建,另一个由@ClassRule
创建。
使用一个或另一个;最好是@EmbeddedKafka
,而简单地@Autowired
是代理实例。
我猜想消费者正在听不同的经纪人;您可以通过查看使用者配置发布的INFO日志来确认。
答案 1 :(得分:0)
我已经遵循了您的建议,但是它会不断触发侦听器,但是consumer.poll不会捕获主题内容。
@RunWith(SpringRunner.class)
@SpringBootTest
@EmbeddedKafka(partitions = 1, topics = { "tp-in-gco-mao-notasfiscais" })
public class InvoicingServiceTest {
@Autowired
private NFKafkaListener nfKafkaListener;
@Autowired
public EmbeddedKafkaBroker broker;
@Autowired
private KafkaTemplate<Object, Object> template;
@BeforeClass
public static void setup() {
System.setProperty(EmbeddedKafkaBroker.BROKER_LIST_PROPERTY,
"spring.kafka.bootstrap-servers");
}
@Test
public void testTemplate() throws Exception {
NFServiceTest nfServiceTest = spy(new NFServiceTest());
nfKafkaListener.setNfServiceClient(nfServiceTest);
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("teste9", "false", broker);
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, InvoiceDeserializer.class);
DefaultKafkaConsumerFactory<Integer, Object> cf = new DefaultKafkaConsumerFactory<Integer, Object>(
consumerProps);
Consumer<Integer, Object> consumer = cf.createConsumer();
broker.consumeFromAnEmbeddedTopic(consumer, "tp-in-gco-mao-notasfiscais");
ZfifNfMao zf = new ZfifNfMao();
zf.setItItensnf(new Zfietb011());
Zfietb011 zfietb011 = new Zfietb011();
Zfie011 zfie011 = new Zfie011();
zfie011.setMatkl("TESTE");
zfietb011.getItem().add(zfie011);
zf.setItItensnf(zfietb011);
template.send("tp-in-gco-mao-notasfiscais", zf);
List<ConsumerRecord<Integer, Object>> received = new ArrayList<>();
int n = 0;
while (received.size() < 1 && n++ < 10) {
ConsumerRecords<Integer, Object> records1 = consumer.poll(Duration.ofSeconds(10));
//records1 is always empty
if (!records1.isEmpty()) {
records1.forEach(rec -> received.add(rec));
}
}
assertThat(received).extracting(rec -> {
ZfifNfMao zfifNfMaoRdesponse = (ZfifNfMao) rec.value();
return zfifNfMaoRdesponse.getItItensnf().getItem().get(0).getMatkl();
}).contains("TESTE");
broker.getKafkaServers().forEach(b -> b.shutdown());
broker.getKafkaServers().forEach(b -> b.awaitShutdown());
consumer.close();
}
public static class NFServiceTest implements INFServiceClient {
CountDownLatch latch = new CountDownLatch(1);
@Override
public ZfifNfMaoResponse enviarSap(ZfifNfMao zfifNfMao) {
ZfifNfMaoResponse zfifNfMaoResponse = new ZfifNfMaoResponse();
zfifNfMaoResponse.setItItensnf(new Zfietb011());
Zfietb011 zfietb011 = new Zfietb011();
Zfie011 zfie011 = new Zfie011();
zfie011.setMatkl("TESTE");
zfietb011.getItem().add(zfie011);
zfifNfMaoResponse.setItItensnf(zfietb011);
return zfifNfMaoResponse;
}
}
}