如何从集成流程中启动Spring Batch工作?

时间:2018-11-02 14:58:43

标签: java spring-integration spring-batch spring-integration-sftp

我对Spring Integration和Spring Batch有问题。我想将csv文件从sftp传递到批处理作业,然后将信息转换成POJO并传递到输出。我怎样才能做到这一点?我有下一个配置:

@Configuration
@RequiredArgsConstructor
@Setter
class BatchJobConfig {
    private final JobBuilderFactory jobBuilderFactory;
    private final StepBuilderFactory stepBuilderFactory;
    private final DataSource dataSource;
    private final PlatformTransactionManager transactionManager;

    @Bean
    public Job readCSVFilesJob() {
        return jobBuilderFactory
                .get("readCSVFilesJob")
                .incrementer(new RunIdIncrementer())
                .start(step())
                .build();
    }

    @Bean
    public Step step() {
        return stepBuilderFactory.get("step").<Bill, Bill>chunk(7)
                .reader(reader())
                .writer(writer())
                .build();
    }

    @Bean
    @StepScope
    public FlatFileItemReader<Bill> reader() {
        FlatFileItemReader<Bill> reader = new FlatFileItemReader<>();
        reader.setResource(new FileSystemResource("/info"));
        reader.setLinesToSkip(1);
        reader.setStrict(false);
        reader.setLineMapper(new DefaultLineMapper<Bill>() {{
                setLineTokenizer(new DelimitedLineTokenizer() {{
                        setNames("first-name", "last-name", "amount");
                        setDelimiter(";");
                    }
                });

                setFieldSetMapper(new RecordFieldSetMapper());
            }
        });
        return reader;
    }

    @Bean
    public ConsoleItemWriter<Bill> writer() {
        return new ConsoleItemWriter<>();
    }

    @Bean
    protected JobRepository createJobRepository() throws Exception {
        JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
        factory.setDataSource(dataSource);
        factory.setTransactionManager(transactionManager);
        factory.setIsolationLevelForCreate("ISOLATION_SERIALIZABLE");
        factory.setTablePrefix("BATCH_");
        factory.setMaxVarCharLength(1000);
        return factory.getObject();
    }
}

作家

public class ConsoleItemWriter<T> implements ItemWriter<T> {
    @Override
    public void write(List<? extends T> items) throws Exception {
        for (T item : items) {
            System.out.println(item);
        }
    }
}

集成配置

@Configuration
@RequiredArgsConstructor
public class SftpConfig {
    private final Job job;
    private final JobRepository jobRepository;

    @Value("${sftp.host}")
    private String sftpHost;

    @Value("${sftp.user}")
    private String sftpUser;

    @Value("${sftp.password}")
    private String sftpPassword;

    @Value("${sftp.port}")
    private int sftpPort;

    @Value("${poller.trigger}")
    private int pollerTrigger;

    @Bean(name = PollerMetadata.DEFAULT_POLLER)
    public PollerMetadata defaultPoller() {
        PollerMetadata pollerMetadata = new PollerMetadata();
        pollerMetadata.setTrigger(new PeriodicTrigger(pollerTrigger));
        return pollerMetadata;
    }

    @Bean
    SftpInboundFileSynchronizer sftpInboundFileSynchronizer() {
        SftpInboundFileSynchronizer fileSynchronizer = new SftpInboundFileSynchronizer(sftpSessionFactory());
        fileSynchronizer.setDeleteRemoteFiles(false);
        fileSynchronizer.setRemoteDirectory("/info");
        return fileSynchronizer;
    }

    @Bean
    @InboundChannelAdapter("sftpChannel")
    public MessageSource<File> sftpMessageSource() {
        SftpInboundFileSynchronizingMessageSource source = new SftpInboundFileSynchronizingMessageSource(sftpInboundFileSynchronizer());
        source.setLocalDirectory(new File("/tmp/info"));
        source.setAutoCreateLocalDirectory(true);
        return source;
    }

    @Bean
    public SessionFactory<LsEntry> sftpSessionFactory() {
        DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory(true);
        factory.setHost(sftpHost);
        factory.setPort(sftpPort);
        factory.setUser(sftpUser);
        factory.setPassword(sftpPassword);
        factory.setAllowUnknownKeys(true);
        return new CachingSessionFactory<>(factory);
    }

    @Bean
    @ServiceActivator(inputChannel = "sftpChannel")
    public MessageHandler handler() {
        return message -> System.out.println("transferred");
    }

    @Bean
    public FileMessageToJobRequest fileMessageToJobRequest() {
        FileMessageToJobRequest fileMessageToJobRequest = new FileMessageToJobRequest();
        fileMessageToJobRequest.setFileParameterName("input.file.name");
        fileMessageToJobRequest.setJob(job);
        return fileMessageToJobRequest;
    }

    @Bean
    public JobLaunchingGateway jobLaunchingGateway() {
        SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
        simpleJobLauncher.setJobRepository(jobRepository);
        simpleJobLauncher.setTaskExecutor(new SyncTaskExecutor());
        JobLaunchingGateway jobLaunchingGateway = new JobLaunchingGateway(simpleJobLauncher);

        return jobLaunchingGateway;
    }

    @Bean
    public IntegrationFlow integrationFlow() {
        return IntegrationFlows.from(Files.inboundAdapter(new File("/info")).
                        filter(new SimplePatternFileListFilter("*.csv")),
                c -> c.poller(Pollers.fixedRate(1000).maxMessagesPerPoll(1))).
                handle(fileMessageToJobRequest()).
                handle(jobLaunchingGateway()).
                log(LoggingHandler.Level.WARN, "headers.id + ': ' + payload").
                get();
    }
}

型号

@Data
@AllArgsConstructor
@NoArgsConstructor
@Accessors(chain = true)
public class Bill {
    private String firstName;
    private String lastName;
    private String amount;
}

映射器

import org.springframework.batch.item.file.mapping.FieldSetMapper;
import org.springframework.batch.item.file.transform.FieldSet;

public class RecordFieldSetMapper implements FieldSetMapper<Bill> {

    @Override
    public Bill mapFieldSet(FieldSet fieldSet) {

        return new Bill()
                .setFirstName(fieldSet.readString("first-name"))
                .setLastName(fieldSet.readString("last-name"));
    }
}

请求

@Setter
public class FileMessageToJobRequest {
    private Job job;
    private String fileParameterName;

    @Transformer
    public JobLaunchRequest toJobLaunchRequest(Message<File> message) {
        JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
        jobParametersBuilder.addString(fileParameterName, message.getPayload().getAbsolutePath());

        return new JobLaunchRequest(job, jobParametersBuilder.toJobParameters());
    }
}

一切正常。但是在输出中,我看不到有关记录的信息。 IntegrationFlow无法调用批处理作业。

0 个答案:

没有答案