我实现了一个spring批处理,可以从csv文件中读取数据并根据记录进行插入和更新
我有一个表XXX037,其中有80万条记录,并且插入和更新它花费了太多时间。
我已使用spring批处理配置,commit-interval为1000。仍然需要时间来处理
我有什么方法可以提高性能?
配置
<batch:job id="pcqJob">
<batch:step id="pcqStep">
<batch:tasklet>
<batch:chunk reader="pcqReader" writer="compositeWriter" commit-interval="1000">
<!-- <batch:skippable-exception-classes>
<batch:include class="javax.persistence.PersistenceException"/>
</batch:skippable-exception-classes> -->
</batch:chunk>
</batch:tasklet>
</batch:step>
</batch:job>
<!-- <bean id="skipPolicy" class="com.test.domain.services.writer.SkipPolicy">
<property name="skipLimit" value="2500"/>
</bean> -->
<bean id="compositeWriter" class="org.springframework.batch.item.support.ClassifierCompositeItemWriter">
<property name="classifier">
<bean class="org.springframework.classify.BackToBackPatternClassifier">
<property name="routerDelegate">
<bean class="com.test.domain.services.writer.ItemCodeClassifier" />
</property>
<property name="matcherMap">
<map>
<entry key="*Doss*" value-ref="fileItemWriter1" />
<entry key="*Ldt*" value-ref="fileItemWriter2" />
<entry key="*Old*" value-ref="oldDossierfileItemWriter" />
<entry key="*Tpm*" value-ref="tpmfileItemWriter" />
<entry key="*Txm*" value-ref="tpxfileItemWriter" />
<entry key="*DoD*" value-ref="dossierDeletefileItemWriter" />
<entry key="*LdD*" value-ref="ldtDeletefileItemWriter" />
<entry key="*TpD*" value-ref="tpmDeletefileItemWriter" />
<entry key="*TxD*" value-ref="txmDeletefileItemWriter" />
</map>
</property>
</bean>
</property>
</bean>