我最近开始使用java配置方式编写spring批处理程序并使用spring batch和starter包。我使用分区步骤和任务执行器来完成我的工作。我面临的问题是,一旦工作完成,批处理过程就不会停止,它会继续在我的eclipse和Linux框中运行。我手动找到并杀死了这份工作。你能帮忙吗? 当我在没有分区步骤的情况下以单线程方式运行作业时,此工作正常。
我的工作配置:
@Bean
@StepScope
public ItemReader<MediaAsset> metaDataExportReader(@Value("#{jobParameters[sourceSystemCode]}") String sourceSystemCode,@Value("#{jobParameters[assetType]}") String assetType,@Value("#{stepExecutionContext[startingMediaAssetId]}") long startingMediaAssetId,
@Value("#{stepExecutionContext[endingMediaAssetId]}") long endingMediaAssetId,@Value("#{stepExecutionContext[threadName]}") String threadName) throws Exception {
logger.debug("Reader is called...."+sourceSystemCode);
logger.debug("page size---------->"+jobConfig.getPageOrChunkSizeMetaDataExport());
logger.debug("startingMediaAssetId---------->"+startingMediaAssetId);
logger.debug("endingMediaAssetId"+endingMediaAssetId);
logger.debug("threadName"+threadName);
final Map<String,Object> parameters = new HashMap<>();
parameters.put("startingMediaAssetId",startingMediaAssetId);
parameters.put("endingMediaAssetId",endingMediaAssetId);
JdbcPagingItemReader<MediaAsset> jdbcPagingItemReader = getJdbcPagingItemReader(sourceSystemCode, assetType);
jdbcPagingItemReader.setParameterValues(parameters);
return jdbcPagingItemReader;
}
@Bean(destroyMethod="close")
@StepScope
public ItemWriter<MediaAsset> metaDataExportWriter(@Value("#{jobParameters[sourceSystemCode]}") String sourceSystemCode,@Value("#{jobParameters[assetType]}") String assetType,@Value("#{stepExecutionContext[startingMediaAssetId]}") long startingMediaAssetId,
@Value("#{stepExecutionContext[endingMediaAssetId]}") long endingMediaAssetId,@Value("#{stepExecutionContext[threadName]}") String threadName) throws Exception {
logger.debug("Coming here Item Writer,..."+threadName);
logger.debug("getItemsPerFile---------->"+jobConfig.getPageOrChunkSizeMetaDataExport());
//for xml file creation
StaxEventItemWriter<MediaAsset> staxEventItemWriter = new StaxEventItemWriter<>();
staxEventItemWriter.setRootTagName(DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_ROOT_TAG);
staxEventItemWriter.setMarshaller(marshaller);
staxEventItemWriter.setOverwriteOutput(true);
//for splitting the files into multiple files based on record size
MultiResourceItemWriter<MediaAsset> multiResourceItemWriter = new MultiResourceItemWriter<>();
multiResourceItemWriter.setItemCountLimitPerResource(jobConfig.getPageOrChunkSizeMetaDataExport());
multiResourceItemWriter.setDelegate(staxEventItemWriter);
multiResourceItemWriter.setResourceSuffixCreator(new ResourceSuffixCreator() {
@Override
public String getSuffix(int index) {
return DL3ConstantUtil.UNDERSCORE+threadName+DL3ConstantUtil.UNDERSCORE+startingMediaAssetId+DL3ConstantUtil.UNDERSCORE+endingMediaAssetId+DL3ConstantUtil.UNDERSCORE+index+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_FILE_NAME_SUFFIX;
}
});
logger.debug("writer sourceSystemCode"+sourceSystemCode);
switch (assetType) {
case DL3ConstantUtil.IMAGE_ASSET:
switch (sourceSystemCode) {
case DL3ConstantUtil.LIGHTBOX:
multiResourceItemWriter.setResource(new FileSystemResource(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"DPL"+jobConfig.getBackSlash()+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_LIGHT_BOX_FILE_NAME_PREFIX_NAME_IMG));
break;
case DL3ConstantUtil.SOLAR:
multiResourceItemWriter.setResource(new FileSystemResource(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"SOLAR"+jobConfig.getBackSlash()+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_SOLAR_BOX_FILE_NAME_PREFIX_NAME_IMG));
break;
case DL3ConstantUtil.MANUAL_UPLOAD:
multiResourceItemWriter.setResource(new FileSystemResource(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"DDDS"+jobConfig.getBackSlash()+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_DDDS_BOX_FILE_NAME_PREFIX_NAME_IMG));
break;
default:
break;
}
break;
case DL3ConstantUtil.DOCUMENT_ASSET:
switch (sourceSystemCode) {
case DL3ConstantUtil.SOLAR:
multiResourceItemWriter.setResource(new FileSystemResource(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"DA"+jobConfig.getBackSlash()+"SOLAR"+jobConfig.getBackSlash()+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_SOLAR_BOX_FILE_NAME_PREFIX_NAME_DOC));
break;
case DL3ConstantUtil.MANUAL_UPLOAD:
multiResourceItemWriter.setResource(new FileSystemResource(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"DA"+jobConfig.getBackSlash()+"DDDS"+jobConfig.getBackSlash()+DL3ConstantUtil.EXPORT_ASSET_METADATA_BY_SOURCESYSTEM_CODE_DDDS_BOX_FILE_NAME_PREFIX_NAME_DOC));
break;
default:
break;
}
break;
default:
throw new Exception("no matching assetType ");
}
return multiResourceItemWriter;
}
@Bean(name="GenerateXMLFilesMaster")
public Step generateXMLFilesMaster(ItemReader<MediaAsset> metaDataExportReader,ItemWriter<MediaAsset> metaDataExportWriter) {
logger.debug("Master Step initialization...");
return stepBuilderFactory.get("GenerateXMLFilesMaster").
partitioner(generateXMLFilesSlave(metaDataExportReader,metaDataExportWriter)).
partitioner("GenerateXMLFilesSlave",metaDataExportPartioner(null,null,null)).
partitionHandler(metaDataExportPartionHandler(metaDataExportReader,metaDataExportWriter)).
build();
}
@Bean(name="GenerateXMLFilesSlave")
public Step generateXMLFilesSlave(ItemReader<MediaAsset> metaDataExportReader,ItemWriter<MediaAsset> metaDataExportWriter) {
return stepBuilderFactory.get("GenerateXMLFilesSlave")
.<MediaAsset, MediaAsset> chunk(jobConfig.getPageOrChunkSizeMetaDataExport())
.reader(metaDataExportReader)
.writer(metaDataExportWriter)
.build();
}
@Bean(name="uploadTaskletMetaData")
@StepScope
public Tasklet uploadTaskletMetaData(@Value("#{jobParameters[sourceSystemCode]}") String sourceSystemCode,@Value("#{jobParameters[assetType]}") String assetType){
MetaDataUploadTasklet metaDataUploadTasklet = new MetaDataUploadTasklet();
logger.debug("sourceSystemCode----->"+sourceSystemCode);
logger.debug("assetType----->"+assetType);
metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath());
switch (assetType) {
case DL3ConstantUtil.IMAGE_ASSET:
switch (sourceSystemCode) {
case DL3ConstantUtil.LIGHTBOX:
metaDataUploadTasklet.setSourceDirectory(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"DPL"+jobConfig.getBackSlash());
//metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()+"/IA/DPL");
break;
case DL3ConstantUtil.SOLAR:
metaDataUploadTasklet.setSourceDirectory(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"SOLAR"+jobConfig.getBackSlash());
//metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()+"/IA/SOLAR");
break;
case DL3ConstantUtil.MANUAL_UPLOAD:
metaDataUploadTasklet.setSourceDirectory(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"IA"+jobConfig.getBackSlash()+"DDDS"+jobConfig.getBackSlash());
//metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()+"/IA/DDDS");
break;
default:
break;
}
break;
case DL3ConstantUtil.DOCUMENT_ASSET:
switch (sourceSystemCode) {
case DL3ConstantUtil.SOLAR:
metaDataUploadTasklet.setSourceDirectory(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"DA"+jobConfig.getBackSlash()+"SOLAR"+jobConfig.getBackSlash());
//metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()+"/DA/SOLAR");
break;
case DL3ConstantUtil.MANUAL_UPLOAD:
metaDataUploadTasklet.setSourceDirectory(jobConfig.getTargetFileLocation()+jobConfig.getBackSlash()+"DA"+jobConfig.getBackSlash()+"DDDS"+jobConfig.getBackSlash());
//metaDataUploadTasklet.setTargetFolder(jobConfig.getTargetMetaDataRootPath()+"/DA/DDDS");
break;
default:
break;
}
break;
default:
break;
}
return metaDataUploadTasklet;
}
@Bean(name="UploadXMLFiles")
public Step uploadXMLFiles(){
return stepBuilderFactory.get("UploadXMLFiles").tasklet(uploadTaskletMetaData(null,null)).build();
}
@Bean
@StepScope
public Partitioner metaDataExportPartioner(@Value("#{jobParameters[sourceSystemCode]}") String sourceSystemCode,@Value("#{jobParameters[assetType]}") String assetType,@Value("#{jobExecutionContext[totalCount]}") String totalCount){
logger.debug("source system code--->"+sourceSystemCode);
logger.debug("assetType--->"+assetType);
MetaDataExportPartioner metaDataExportPartioner = new MetaDataExportPartioner();
metaDataExportPartioner.setSourceSystemCode(sourceSystemCode);
metaDataExportPartioner.setAssetType(assetType);
logger.debug("In the partioner initiliazation------>"+totalCount);
metaDataExportPartioner.setTotalCount(StringUtils.isEmpty(totalCount)?0:Integer.parseInt(totalCount));
return metaDataExportPartioner;
}
@Bean
public PartitionHandler metaDataExportPartionHandler(ItemReader<MediaAsset> reader,ItemWriter<MediaAsset> writer){
logger.debug("Initializing partionHandler------>");
TaskExecutorPartitionHandler partitionHandler = new TaskExecutorPartitionHandler();
partitionHandler.setStep(generateXMLFilesSlave(reader,writer));
partitionHandler.setGridSize(6);
partitionHandler.setTaskExecutor(taskExecutor());
return partitionHandler;
}
@Bean
public TaskExecutor taskExecutor() {
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setMaxPoolSize(10);
taskExecutor.setCorePoolSize(10);
taskExecutor.afterPropertiesSet();
return taskExecutor;
}
@Bean()
public JobExecutionListener metaDataExportJobExecutionListener(){
JobExecutionListener jobExecutionListener = new MetaDataExportJobListener();
return jobExecutionListener;
}
@Bean
public Job exportMetaDataJob(JobExecutionListener metaDataExportJobExecutionListener) throws Exception {
return jobBuilderFactory.get("ExportMetaDataJob")
.incrementer(new RunIdIncrementer())
.listener(metaDataExportJobExecutionListener)
.flow(generateXMLFilesMaster(metaDataExportReader(null,null,0L,0L,null),metaDataExportWriter(null,null,0L,0L,null)))
//.next(uploadXMLFiles())
.end()
.build();
}
我的pom文件条目:
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>1.3.2.RELEASE</version>
<relativePath /> <!-- lookup parent from repository -->
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<java.version>1.8</java.version>
<spring-cloud-version>1.0.4.RELEASE</spring-cloud-version>
<spring-batch-admin.version>1.3.0.RELEASE</spring-batch-admin.version>
</properties>
<dependencies>
<!-- <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-web</artifactId>
</dependency> -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-batch</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-mail</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- <dependency> <groupId>org.springframework.batch</groupId> <artifactId>spring-batch-admin-manager</artifactId>
<version>${spring-batch-admin.version}</version> <exclusions> <exclusion>
<artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion>
<exclusion> <artifactId>slf4j-api</artifactId> <groupId>org.slf4j</groupId>
</exclusion> </exclusions> </dependency> -->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-aws-context</artifactId>
<version>${spring-cloud-version}</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>sqljdbc4</artifactId>
<version>4.0</version>
</dependency>
<dependency>
<groupId>com.oracle</groupId>
<artifactId>ojdbc14</artifactId>
<version>10.2.0.3.0</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-oxm</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-devtools</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.3</version>
</dependency>
<!-- <dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-dbcp2</artifactId>
<version>2.0.1</version>
</dependency> -->
<!-- <dependency> <groupId>com.sun.xml.bind</groupId> <artifactId>jaxb-impl</artifactId>
<version>2.0.1</version> </dependency> -->
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
答案 0 :(得分:0)
当非守护程序线程运行为零时,JVM会自动关闭。在非分区情况下,当作业完成时,您没有运行非守护程序线程,因此JVM将关闭。但是,在分区用例中,您必须等待工作仍然阻止应用程序关闭。执行线程转储有助于诊断问题,但我的赌注是ThreadPoolTaskExecutor
所持有的线程是问题。如果是,您可能希望查看不创建线程池的选项(阻止JVM关闭)。