在我的应用程序中,我已经自动连接了dataSource对象,该对象将返回null。 该对象正在DAO类中使用。
申请流程-> 1.OAConsumer.java开始执行 2.调用service.java 3.调用KafkaDAOImpl类中的insertOffset
eclispe氧气中使用的IDE。 下面的代码和错误详细信息。
显示错误
java.lang.NullPointerException
at com.oa.dao.KafkaOffsetDAOImpl.insertOffset(KafkaOffsetDAOImpl.java:36)
at com.oa.services.Service.savePayload(Service.java:54)
at com.oa.consumer.OAConsumer.orderConsumer(OAConsumer.java:30)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:170)
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:120)
at org.springframework.kafka.listener.adapter.HandlerAdapter.invoke(HandlerAdapter.java:48)
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:283)
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.invoke(BatchMessagingMessageListenerAdapter.java:146)
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:138)
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:59)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchOnMessage(KafkaMessageListenerContainer.java:1052)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchOnMessage(KafkaMessageListenerContainer.java:1036)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchListener(KafkaMessageListenerContainer.java:998)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchListener(KafkaMessageListenerContainer.java:938)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:921)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:740)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:689)
at java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
at java.util.concurrent.FutureTask.run(Unknown Source)
at java.lang.Thread.run(Unknown Source)
KafkaOffsetDAOImpl.java
package com.oa.dao;
import java.sql.Connection;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import org.springframework.jdbc.core.simple.SimpleJdbcCall;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import com.oa.model.KafkaOffsetRecord;
public class KafkaOffsetDAOImpl implements KafkaOffsetDAO {
// @Autowired
// JdbcTemplate jdbcTemplate;
@Autowired
DataSource dataSource;
@Override
public boolean insertOffset(KafkaOffsetRecord offsetRecord) {
boolean status=false;
try(Connection connection =dataSource.getConnection()){
if(connection.isValid(10000))
status= true;
}catch (Exception e) {
e.printStackTrace();
}
return false;
}
OracleConfig.java
package com.oa.config;
import java.sql.SQLException;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
@Configuration
public class OracleConfig {
private static final Logger logger = LoggerFactory.getLogger(OracleConfig.class);
@Value("${spring.datasource.url}")
private String url;
@Value("${spring.datasource.username}")
private String username;
@Value("${spring.datasource.password}")
private String password;
@Value("${spring.datasource.driver-class-name}")
private String driverClassName;
@Value("${spring.datasource.maximumPoolSize}")
private int maxPoolSize;
@Value("${spring.datasource.minimumIdle}")
private int minIdle;
@Value("${spring.datasource.connectionTimeout}")
private long connTimeOut;
@Value("${spring.datasource.idleTimeout}")
private long idleTimeout;
//
/**
* See <a href="http://google.com">http://www.baeldung.com/hikaricp</a>
*/
@Bean
public HikariDataSource dataSource() {
HikariConfig hikariConfig = new HikariConfig();
hikariConfig.setDriverClassName(driverClassName);
hikariConfig.setUsername(username);
hikariConfig.setPassword(password);
hikariConfig.setJdbcUrl(url);
hikariConfig.setMaximumPoolSize(maxPoolSize);
hikariConfig.setConnectionTimeout(connTimeOut);
hikariConfig.setMinimumIdle(minIdle);
hikariConfig.setIdleTimeout(idleTimeout);
HikariDataSource hikariDataSource = new HikariDataSource(hikariConfig);
if (hikariDataSource.isRunning()) {
logger.info("------> Oracle DB connection created successfully.");
}
return hikariDataSource;
}
Consumer.java
package com.oa.consumer;
import java.util.List;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.messaging.handler.annotation.Header;
import org.springframework.messaging.handler.annotation.Payload;
import com.oa.services.OrderService;
public class OAConsumer {
private static final Logger LOGGER = LoggerFactory.getLogger(OAConsumer.class);
@KafkaListener(topics = "**removed**", groupId = "**removed**")
public void orderConsumer(List<ConsumerRecord<String, String>> records, Acknowledgment acknowledgment) {
LOGGER.debug("START :: orderConsumer");
OrderService orderService = new OrderService();
try {
for (ConsumerRecord<String, String> record : records) {
LOGGER.debug("Processing Offset Order :: offset={} :: partitionId={} :: topicName={}", record.offset(),
record.partition(), record.topic());
orderService.savePayload(record);
}
acknowledgment.acknowledge();
LOGGER.debug("END :: orderConsumer:: Acknowledged");
} catch (Exception exception) {
exception.printStackTrace();
LOGGER.error(exception.getMessage());
}
}
Service.java
package com.oa.services;
import java.io.IOException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.oa.dao.KafkaOffsetDAO;
import com.oa.dao.KafkaOffsetDAOImpl;
import com.oa.model.KafkaOffsetRecord;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
public class Service {
private static final Logger LOGGER = LoggerFactory.getLogger(Service.class);
public Boolean checkOAEligible(String message, long offset, String topic) throws IOException {
return true;
}
public Boolean savePayload(ConsumerRecord<String, String> record) {
LOGGER.debug("START:: saveOrderPayload ::offset= {} :: topic= {}", record.offset(), record.topic());
Boolean status = false;
KafkaOffsetDAO dao = new KafkaOffsetDAOImpl();
try {
if (checkOAEligible(record.value(), record.offset(), record.topic())) {
KafkaOffsetRecord kafkaOffsetRecord= new KafkaOffsetRecord(record);
status = dao.insertOffset(kafkaOffsetRecord);
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
LOGGER.debug("END:: saveOrderPayload :: offset={} :: topic={} , status={}", record.offset(), record.topic(), status);
return status;
}
}
KafkaOffsetRecord
包com.oa.model;
导入org.apache.kafka.clients.consumer.ConsumerRecord;
公共类KafkaOffsetRecord {
long offsetId;
String jsonMsg;
String kafkaSourceType;
int partitionId;
String kafkaTopicName;
public KafkaOffsetRecord() {
}
public KafkaOffsetRecord(ConsumerRecord<String, String> consumerRecord) {
offsetId = consumerRecord.offset();
jsonMsg = consumerRecord.value();
partitionId=consumerRecord.partition();
kafkaTopicName= consumerRecord.topic();
kafkaSourceType = "TEST";
}
public long getOffsetId() {
return offsetId;
}
public String getJsonMsg() {
return jsonMsg;
}
public String getKafkaSourceType() {
return kafkaSourceType;
}
public int getPartitionId() {
return partitionId;
}
public String getKafkaTopicName() {
return kafkaTopicName;
}
public void setOffsetId(long offsetId) {
this.offsetId = offsetId;
}
public void setJsonMsg(String jsonMsg) {
this.jsonMsg = jsonMsg;
}
public void setKafkaSourceType(String kafkaSourceType) {
this.kafkaSourceType = kafkaSourceType;
}
public void setPartitionId(int partitionId) {
this.partitionId = partitionId;
}
public void setKafkaTopicName(String kafkaTopicName) {
this.kafkaTopicName = kafkaTopicName;
}
}
application.properties
spring.datasource.url=jdbc:oracle:thin:@(DESCRIPTION=(ADDRESS_LIST=(LOAD_BALANCE=ON)(FAILOVER=OFF)(ADDRESS=(PROTOCOL=TCP)(HOST=**removed**)(PORT=**removed**))(ADDRESS=(PROTOCOL=TCP)(HOST=**removed**)(PORT=**removed**)))(CONNECT_DATA=(SERVICE_NAME=**removed**)(SERVER=DEDICATED)))
spring.datasource.driver-class-name=oracle.jdbc.OracleDriver
spring.datasource.username=**removed**
spring.datasource.password=**removed**
spring.datasource.type=oracle.jdbc.pool.OracleDataSource
spring.datasource.maximumPoolSize=20
spring.datasource.minimumIdle=5
spring.datasource.connectionTimeout=30000
spring.datasource.idleTimeout=1500000
答案 0 :(得分:-1)
在使用时使用构造函数注入
public class KafkaOffsetDAOImpl implements KafkaOffsetDAO {
private final DataSource dataSource;
@Autowired
KafkaOffsetDAOImpl(final Datasource datasrouce) {
this.dataSource = dataSource;
}
通过这种方式可以确定,弹簧会创建自动连接的场并将其注入。