我创建了一个util来读取多个文件并写入多个表。 util工作正常,但需要40分钟才能加载300k记录。我尝试在我的春季批处理作业中启用多线程。但它不能正常工作。
<task:executor id="taskExecutor" pool-size="30" />
<job id="MFJob" xmlns="http://www.springframework.org/schema/batch">
<step id="stepFileId">
<partition step="partitionerFileStepId" partitioner="multipleFilePartitioner">
<handler grid-size="10" task-executor="taskExecutor" />
</partition>
</step>
</job>
<step id="partitionerFileStepId" xmlns="http://www.springframework.org/schema/batch">
<tasklet>
<chunk reader="FileRdr" writer="dbiwtr" commit-interval="10000"/>
</tasklet>
</step>
<!-- This is to demo File Partitioner -->
<bean id="multipleFilePartitioner" class="com.xxc.MultipleFilePartitioner" scope="step">
<property name="filenames" value="#{jobParameters[fileName]}"></property>
<property name="extractLocation" value="#{jobParameters[fileLocation]}"></property>
</bean>
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor" />
<bean id="FileRdr" class="org.springframework.batch.item.file.FlatFileReader"
scope="step">
<!-- Read a csv file -->
<property name="resource"
value="file:#{stepExecutionContext['file']}" />
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<!-- split it -->
<property name="lineTokenizer">
<bean class="com.xx.LineTokenizer">
<property name="delimiter" value="," />
<property name="tName" value="#{stepExecutionContext['tname']}" />
<property name="dataSource" ref="dataSource" />
</bean>
</property>
<property name="fieldSetMapper" ref="mapper" />
</bean>
</property>
<property name="linesToSkip" value="0" />
</bean>
<bean id="mapper" class="com.xx.MapFieldSetMapper" scope="step"/>
<bean id="dbiwtr" class="com.XXX.dbiwtr" scope="step">
<property name="dataSource" ref="extractorDataSource" />
<property name="tName" value="#{stepExecutionContext['tname']}" />
</bean>
</beans>
public class MultipleFilePartitioner implements Partitioner {
private String extractLocation;
private String filenames;
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> queue = new HashMap<String, ExecutionContext>();
String[] tableNameArray = this.filenames.split(",");
if(tableNameArray!=null && tableNameArray.length!=0)
{
File[] fileList = new File[tableNameArray.length];
int i=0;
for(String fileObject :tableNameArray)
{
fileList[i] = new File(extractLocation,fileObject+".dat");
i++;
}
logger.debug("Number of tables to be Persisting..."+ fileList.length);
for(File fileName:fileList)
{
ExecutionContext ec = new ExecutionContext();
String tablename=fileName.getName().replaceAll(".dat","");
ec.put("file",fileName);
ec.put("tablename",tablename);
queue.put(fileName.getAbsolutePath(), ec);
}
}
return queue;
}