Spring Integration处理方法问题

时间:2019-01-10 14:05:35

标签: spring-integration

我有一个处理方法的集成流程。 将文件从FTP服务器拉到本地后,此方法读取csv文件,例如:foo.csv并创建一个新文件bar.csv,然后将bar.csv再次ftpd传输到FTP服务器,现在的问题是该方法保持打开状态读取foo.csv并创建一个新的bar.csv并基于轮询器将其发送,这是在方法fileInboundFlowFromFTPServer中完成的,我需要执行一次此过程,并且除非已更改或使用新的foo,否则不要在同一foo.csv上重复.csv被拉出,我已经在@Gary Russell的帮助下使用了JDBC元数据存储,该存储可以根据需要完美运行,但是由于处理程序方法会继续读取foo.csv并创建新的bar.csv,因此更改了日期,因此元数据存储已更新,并且文件已再次发送。我正在考虑将foo.csv的名称更改为foo_10012019.csv的解决方案,然后将其再次发送到FTP服务器到下游的History文件夹,然后从本地删除它,该怎么办?我应该为仅发送foo_10012019.csv的部分创建新流程吗?

这是我的整合课程:

@Configuration
@EnableIntegration
@ComponentScan
public class FTIntegration {

public static final String TIMEZONE_UTC = "UTC";
public static final String TIMESTAMP_FORMAT_OF_FILES = "yyyyMMddHHmmssSSS";
public static final String TEMPORARY_FILE_SUFFIX = ".part";
public static final int POLLER_FIXED_PERIOD_DELAY = 5000;
public static final int MAX_MESSAGES_PER_POLL = 100;


private DataSource dataSource;

//private static final Logger LOG = LoggerFactory.getLogger(FTIntegration.class);
private static final Logger LOG1 = Logger.getLogger(FTIntegration.class);
private static final String CHANNEL_INTERMEDIATE_STAGE = "intermediateChannel";

private static final String OUTBOUND_CHANNEL = "outboundChannel";

/* pulling the server config from postgres DB*/

private final BranchRepository branchRepository;

@Autowired
private CSVToCSVNoQ csvToCSVNoQ;

@Value("${app.temp-dir}")
private String localTempPath;


public FTIntegration(BranchRepository branchRepository) {
    this.branchRepository = branchRepository;
}

@Bean
public Branch myBranch(){
    return new Branch();
}

/**
 * The default poller with 5s, 100 messages, RotatingServerAdvice and transaction.
 *
 * @return default poller.
 */
@Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata poller(){
    return Pollers
            .fixedDelay(POLLER_FIXED_PERIOD_DELAY)
            .maxMessagesPerPoll(MAX_MESSAGES_PER_POLL)
            .transactional()
            .get();
}

/**
 * The direct channel for the flow.
 *
 * @return MessageChannel
 */
@Bean
public MessageChannel stockIntermediateChannel() {
    return new DirectChannel();
}
/**
 * Get the files from a remote directory. Add a timestamp to the filename
 * and write them to a local temporary folder.
 *
 * @return IntegrationFlow
 */

@Bean
public PropertiesPersistingMetadataStore store() {
    PropertiesPersistingMetadataStore store = new PropertiesPersistingMetadataStore();
    return store;
}
   public IntegrationFlow fileInboundFlowFromFTPServer(Branch myBranch) throws IOException {

    final FtpInboundChannelAdapterSpec sourceSpecFtp = Ftp.inboundAdapter(createNewFtpSessionFactory(myBranch))
            .preserveTimestamp(true)
          //.patternFilter("*.csv")
            .maxFetchSize(MAX_MESSAGES_PER_POLL)
            .remoteDirectory(myBranch.getFolderPath())
            .regexFilter("FEFOexport"+myBranch.getBranchCode()+".csv")
            .deleteRemoteFiles(true)
            .localDirectory(new File(myBranch.getBranchCode()))
            .temporaryFileSuffix(TEMPORARY_FILE_SUFFIX)


            /*.localFilenameExpression(new FunctionExpression<String>(s -> {
                final int fileTypeSepPos = s.lastIndexOf('.');
                return DateTimeFormatter
                        .ofPattern(TIMESTAMP_FORMAT_OF_FILES)
                        .withZone(ZoneId.of(TIMEZONE_UTC))
                        .format(Instant.now())
                        + "_"
                        + s.substring(0,fileTypeSepPos)
                        + s.substring(fileTypeSepPos);
            }))*/;

    // Poller definition
    final Consumer<SourcePollingChannelAdapterSpec> stockInboundPoller = endpointConfigurer -> endpointConfigurer
            .id("stockInboundPoller")
            .autoStartup(true)
            .poller(poller());

    IntegrationFlow flow = IntegrationFlows
            .from(sourceSpecFtp, stockInboundPoller)

            .transform(File.class, p ->{
                // log step
                LOG1.info("flow=stockInboundFlowFromAFT, message=incoming file: " + p);
                return p;
            })
            .handle(m -> {
                try {
                    this.csvToCSVNoQ.writeCSVfinal("test", myBranch.getBranchCode() + "/final" + myBranch.getBranchCode() + ".csv", myBranch.getBranchCode() + "/FEFOexport" + myBranch.getBranchCode() + ".csv");
                    LOG1.info("Writing final file .csv " + m);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            })
            .get();

    return flow;
}

@Bean
public IntegrationFlow stockIntermediateStageChannel() {
    IntegrationFlow flow = IntegrationFlows
            .from(CHANNEL_INTERMEDIATE_STAGE)
            .transform(p -> {
                //log step
                LOG1.info("flow=stockIntermediateStageChannel, message=rename file: " + p);

                return p;
            })
            //TODO
            .channel(new NullChannel())
            .get();

    return flow;

}

/*
* Creating the outbound adaptor to send files from local to FTP server
*
* */


public IntegrationFlow localToFtpFlow(Branch myBranch){

         return IntegrationFlows.from(Files.inboundAdapter(new File(myBranch.getBranchCode()))
                    .filter(new ChainFileListFilter<File>()
                            .addFilter(new RegexPatternFileListFilter("final" + myBranch.getBranchCode() +".csv"))
                            .addFilter(new FileSystemPersistentAcceptOnceFileListFilter(metadataStore(dataSource), "foo"))),//FileSystemPersistentAcceptOnceFileListFilter
            e -> e.poller(Pollers.fixedDelay(10_000)))

            .transform( p ->{
                LOG1.info("Sending file " + p + " to FTP branch " + myBranch.getBranchCode());

                return p;
            })


            .log()
            .handle(Ftp.outboundAdapter(createNewFtpSessionFactory(myBranch),FileExistsMode.REPLACE)
                    .useTemporaryFileName(true)
                    .autoCreateDirectory(false)
                    .remoteDirectory(myBranch.getFolderPath()), e -> e.advice(expressionAdvice()))
                    )
            .get();
}


    @Bean
public Advice expressionAdvice() {
    ExpressionEvaluatingRequestHandlerAdvice advice = new ExpressionEvaluatingRequestHandlerAdvice();
    //advice.setSuccessChannelName("success.input");
    advice.setOnSuccessExpressionString("payload.delete() + ' was successful'");
    //advice.setFailureChannelName("failure.input");
    advice.setOnFailureExpressionString("payload + ' was bad, with reason: ' + #exception.cause.message");
    advice.setTrapException(true);
    return advice;
}


public DefaultFtpSessionFactory createNewFtpSessionFactory(Branch branch){
    final DefaultFtpSessionFactory factory = new DefaultFtpSessionFactory();
    factory.setHost(branch.getHost());
    factory.setUsername(branch.getUsern());
    factory.setPort(branch.getFtpPort());
    factory.setPassword(branch.getPassword());
    return factory;
}

@Bean
public ConcurrentMetadataStore metadataStore(final DataSource dataSource) {
    return new JdbcMetadataStore(dataSource);
}

}

1 个答案:

答案 0 :(得分:0)

好;问题的一部分是您在每次轮询时都重新获取远程文件。但是,尚不清楚为什么将其作为新消息再次发送,因为默认的.localFilterAcceptOnceFileListFilter,因此应将其忽略。也许调试日志记录或在调试器中运行将有助于弄清那里发生了什么。

您应将FtpPersistentAcceptOnceFileListFilter添加到.filter。这样,只有在远程服务器上的时间戳更改时,您才可以重新获取文件。

此外,如果您要处理此类情况,localFilter需要一个FileSystemAcceptOnceFileListFilter,以便在时间戳更改时通过文件。