即使ItemReader返回null,spring批处理作业也会陷入循环

时间:2018-01-15 14:48:26

标签: spring-integration spring-batch

我正在使用spring batch和integration将xml转换为csv。我有一种方法,工作将从读取文件开始从登陆目录。处理它时会移动 inprocess dir 中的文件。错误文件将移至错误目录。成功处理/写入文件将移至输出/完成

经过一段时间的搜索,我知道itemreader一定有问题,但它也返回null。我不知道问题出在哪里。

以下是我的批量配置

@Bean
public Job commExportJob(Step parseStep) throws IOException {
        return jobs.get("commExportJob")
                .incrementer(new RunIdIncrementer())
                .flow(parseStep)
                .end()
                .listener(listener())
                .build();
    }

@Bean
public Step streamStep() throws IOException {
    CommReader itemReader = itemReader(null);
    if (itemReader != null) {
        return 
                steps.get("streamStep")
                .<Object, Object>chunk(env.getProperty(Const.INPUT_READ_CHUNK_SIZE, Integer.class))
                .reader(itemReader)
                .processor(itemProcessor())
                .writer(itemWriter(null))              
                .listener(getChunkListener())                   
                .build();
    }
    return null;
}

@Bean
@StepScope
public ItemWriter<Object> itemWriter(@Value("#{jobParameters['outputFilePath']}") String outputFilePath) {
    log.info("CommBatchConfiguration.itemWriter() : " + outputFilePath);
    CommItemWriter writer = new CommItemWriter();
    return writer;
}

@Bean
@StepScope
public CommReader itemReader(@Value("#{jobParameters['inputFilePath']}") String inputFilePath) {
    log.info("CommBatchConfiguration.itemReader() : " + inputFilePath);
    CommReader reader = new CommReader(inputFilePath);
    // reader.setEncoding(env.getProperty("char.encoding","UTF-8"));
    return reader;
}

@Bean
@StepScope
public CommItemProcessor itemProcessor() {
    log.info("CommBatchConfiguration.itemProcessor() : Entry");
    return new CommItemProcessor(ruleService);
}

CommReader.java

File inputFile = null;
private String jobName;

public CommReader(String inputFilePath) {
    inputFile = new File(inputFilePath);
}

@Value("#{stepExecution}")
private StepExecution stepExecution;

public String getJobName() {
    return jobName;
}

public void setJobName(String jobName) {
    this.jobName = jobName;
}

@Override
public Object read() throws IOException {

    DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
    DocumentBuilder builder;
    if (inputFile.exists()) {
        try {
            builder = factory.newDocumentBuilder();
            log.info("CommReader.read() :" + inputFile.getAbsolutePath());
            Document document = builder.parse(inputFile);
            return document;
        } catch (ParserConfigurationException | SAXException | TransformerFactoryConfigurationError e) {
            log.error("Exception while reading ", e);
        }
    }

    return null;
}

@Override
public void close() throws ItemStreamException {
}

@Override
public void open(ExecutionContext arg0) throws ItemStreamException {
}

@Override
public void update(ExecutionContext arg0) throws ItemStreamException {
}

@Override
public void setResource(Resource arg0) {
}

CommItemProcessor.java

@Autowired
CommExportService ruleService;

public CommItemProcessor(CommExportService ruleService) {
    this.ruleService = ruleService;
}

@Override
public Object process(Object bean) throws Exception {
    log.info("CommItemProcessor.process() : Item Processor : " + bean);
    return bean;
}

CommItemWriter.java

FlatFileItemWriter<byte[]>  delegate;
ExecutionContext            execContext;
FileOutputStream            fileWrite;
File                        stylesheet;
StreamSource    stylesource;
Transformer     transformer;
List<List<?>>   itemsTotal  = null;
int             recordCount = 0;

@Autowired
FileUtil fileUtil;

@Value("${input.completed.dir}")
String completedDir;

@Value("${input.inprocess.dir}")
String inprocessDir;


public void update(ExecutionContext arg0) throws ItemStreamException {
    this.delegate.update(arg0);
}

public void open(ExecutionContext arg0) throws ItemStreamException {
    this.execContext = arg0;
    this.delegate.open(arg0);

}

public void close() throws ItemStreamException {
    this.delegate.close();

}

@Override
public void write(List<? extends Object> items) throws Exception {
    log.info("CommItemWriter.write() : items.size() : " + items.size());
    stylesheet = new File("./config/style.xsl");
    stylesource = new StreamSource(stylesheet);
    String fileName = fileUtil.getFileName();
    try {
        transformer = TransformerFactory.newInstance().newTransformer(stylesource);
    } catch (TransformerConfigurationException | TransformerFactoryConfigurationError e) {
        log.error("Exception while writing",e);
    }

    for (Object object : items) {
        log.info("CommItemWriter.write()  : Object : " + object.getClass().getName());
        log.info("CommItemWriter.write()  : FileName : " + fileName);
        Source source = new DOMSource((Document) object);
        Result outputTarget = new StreamResult(
                new File(fileName));
        transformer.transform(source, outputTarget);
    }
}

在chunkListener中我没有做什么。

以下是工作听众。

@Override
public void beforeJob(JobExecution jobExecution) {
    log.info("JK: CommJobListener.beforeJob()");
}

@Override
public void afterJob(JobExecution jobExecution) {
    log.info("JK: CommJobListener.afterJob()");
    JobParameters jobParams = jobExecution.getJobParameters();
    File inputFile = new File(jobParams.getString("inputFilePath"));
    File outputFile = new File(jobParams.getString("outputFilePath"));
    try {
        if (jobExecution.getStatus().isUnsuccessful()) {
            Files.move(inputFile.toPath(), Paths.get(inputErrorDir, inputFile.getName()),
                    StandardCopyOption.REPLACE_EXISTING);
            Files.move(outputFile.toPath(), Paths.get(outputErrorDir, outputFile.getName()),
                    StandardCopyOption.REPLACE_EXISTING);
        } else {
            String inputFileName = inputFile.getName();
            Files.move(inputFile.toPath(), Paths.get(inputCompletedDir, inputFileName),
                    StandardCopyOption.REPLACE_EXISTING);
            Files.move(outputFile.toPath(), Paths.get(outputCompletedDir, outputFile.getName()),
                    StandardCopyOption.REPLACE_EXISTING);
        }
    } catch (IOException ioe) {
        log.error("IOException occured ",ioe);
    }
}

我也在使用整合流程。

@Bean
public IntegrationFlow messagesFlow(JobLauncher jobLauncher) {
    try {
        Map<String, Object> headers = new HashMap<>();
        headers.put("jobName", "commExportJob");
        return IntegrationFlows
                .from(Files.inboundAdapter(new File(env.getProperty(Const.INPUT_LANDING_DIR)))
                        ,
                        e -> e.poller(Pollers
                                .fixedDelay(env.getProperty(Const.INPUT_POLLER_DELAY, Integer.class).intValue())
                                .maxMessagesPerPoll(
                                        env.getProperty(Const.INPUT_MAX_MESSAGES_PER_POLL, Integer.class).intValue())
                                .taskExecutor(getFileProcessExecutor())))
                .handle("moveFile","moveFile")
                .enrichHeaders(headers)
                .transform(jobTransformer)
                .handle(jobLaunchingGw(jobLauncher))                    
                .channel("nullChannel").get();
    } catch (Exception e) {
        log.error("Exception in Integration flow",e);
    }
    return null;
}

@Autowired
private Environment env;

@Bean
public MessageHandler jobLaunchingGw(JobLauncher jobLauncher) {
    return new JobLaunchingGateway(jobLauncher);
}

@MessagingGateway
public interface IMoveFile {
    @Gateway(requestChannel = "moveFileChannel")
    Message<File> moveFile(Message<File> inputFileMessage);
}

@Bean(name = "fileProcessExecutor")
public Executor getFileProcessExecutor() {
    ThreadPoolTaskExecutor fileProcessExecutor = new ThreadPoolTaskExecutor();
    fileProcessExecutor.setCorePoolSize(env.getRequiredProperty(Const.INPUT_EXECUTOR_POOLSIZE, Integer.class));
    fileProcessExecutor.setMaxPoolSize(env.getRequiredProperty(Const.INPUT_EXECUTOR_MAXPOOLSIZE, Integer.class));
    fileProcessExecutor.setQueueCapacity(env.getRequiredProperty(Const.INPUT_EXECUTOR_QUEUECAPACITY, Integer.class));
    fileProcessExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
    fileProcessExecutor.initialize();
    return fileProcessExecutor;
}

1 个答案:

答案 0 :(得分:0)

尝试在CommReader.java的read方法中返回文档本身而不是Object,然后在writer中检查它是否为null以阻止它。