我正在编写一个代码,以使用Springboot框架上来自kafka主题的记录。这些记录将在avro模式中产生,我们必须在表中使用和添加数据。我已经建立了一个使用记录的原型,并且可以按预期运行。
我的问题更多地在消费者设计方面。由于我们无法控制生产者部分,因此我在考虑应该在消费者部分添加哪些额外的配置,以使其处理不同和最坏的情况?我应该遵循什么标准?
示例:
当我考虑可能存在多种情况时,我想确保我的消费过程应容错。请提出建议。
消费者配置:
package com.example.consumer;
import java.util.Collections;
import java.util.Properties;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.apache.avro.generic.GenericRecord;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig;
import io.confluent.kafka.serializers.KafkaAvroDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.config.SslConfigs;
import org.apache.kafka.clients.CommonClientConfigs;
@Component
public class CreateConsumer2 {
//configuration consumer properties and subscribing to topic
@Value("${app.kafka_brokers}")
private String KAFKA_BROKERS;
@Value("${app.topic}")
private String KAFKA_TOPIC;
@Value("${app.group_id_config}")
private String GROUP_ID_CONFIG;
@Value("${app.schema_registry_url}")
private String SCHEMA_REGISTRY_URL;
@Value("${app.offset_reset}")
private String OFFSET_RESET;
@Value("${app.max_poll_records}")
private String MAX_POLL_RECORDS;
@Value("${app.security.protocol}")
private String SSL_PROTOCOL;
@Value("${app.ssl.truststore.password}")
private String SSL_TRUSTSTORE_SECURE;
@Value("${app.ssl.keystore.password}")
private String SSL_KEYSTORE_SECURE;
@Value("${app.ssl.key.password}")
private String SSL_KEY_SECURE;
@Value("${app.ssl.truststore.location}")
private String SSL_TRUSTSTORE_LOCATION_FILE_NAME;
@Value("${app.ssl.keystore.location}")
private String SSL_KEYSTORE_LOCATION_FILE_NAME;
//@Value("$DefaultPropsForSession_and_Request")
//private String DefaultProps;
/**
* @Value("${MAX_POLL_INTERVAL_MS_CONFIG}")
private String MAX_POLL_INTERVAL_MS_CONFIG;
@Value("${REQUEST_TIMEOUT_MS_CONFIG}")
private String REQUEST_TIMEOUT_MS_CONFIG;
*/
public Consumer<String, GenericRecord> consumerCreate(){
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,KAFKA_BROKERS);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, OFFSET_RESET);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class.getName());
props.put(ConsumerConfig.GROUP_ID_CONFIG, GROUP_ID_CONFIG);
props.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, SCHEMA_REGISTRY_URL);
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SSL_PROTOCOL);
props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,SSL_TRUSTSTORE_LOCATION_FILE_NAME);
props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, SSL_TRUSTSTORE_SECURE);
props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG,SSL_KEYSTORE_LOCATION_FILE_NAME);
props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, SSL_KEYSTORE_SECURE);
props.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, SSL_KEY_SECURE);
KafkaConsumer<String, GenericRecord> consumer = new KafkaConsumer<String, GenericRecord>(props);
consumer.subscribe(Collections.singleton(KAFKA_TOPIC));
return consumer;
}
}