性能问题:Spring批处理作业花费太长时间在DB中插入行

时间:2017-01-17 12:59:43

标签: oracle performance spring-batch

创建弹出批处理作业以从文件读取并写入DB,程序接受动态列,即可用于任何表。这个工具正常工作,但加载100万条记录需要1个多小时。请帮忙解决这个问题!

 <bean id="FileItemReader" class="org.springframework.batch.item.file.FlatFileItemReader"
                scope="step">
                <!-- Read a csv file -->
                <property name="resource"
                    value="file:#{stepExecutionContext['file']}" />
                <property name="lineMapper">
                    <bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
                        <!-- split it -->
                        <property name="lineTokenizer">
                            <bean class="com.mypack.LineTokenizer">
                                <property name="delimiter" value="|" /> 
                                <property name="tableName" value="#{stepExecutionContext['tablename']}" />
                                <property name="dataSource" ref="dtsoruce" />
                            </bean>
                        </property>
                        <property name="fieldSetMapper" ref="mapper" />
                    </bean>
                </property>

                <property name="linesToSkip" value="0" />
            </bean>

            <bean id="mapper" class="com.mypack.MapFieldSetMapper" scope="step"/>

        <bean id="DbItemWriter" class="com.mypack.Filewrtr" scope="step">
                <property name="dataSource" ref="dtsoruce" />
                <property name="tableName" value="#{stepExecutionContext['tablename']}" />
            </bean>
        </beans>

        public class LineTokenizer extends DelimitedLineTokenizer implements InitializingBean {

            private String columnNames = "SELECT COLUMN_NAME FROM ALL_TAB_COLS  WHERE  TABLE_NAME =";
            private String tableName;
            private JdbcTemplate jdbcTemplate;

            protected JdbcTemplate createJdbcTemplate(DataSource dataSource) {
                return new JdbcTemplate(dataSource);
            }
            public final void setDataSource(DataSource dataSource) {
                if (this.jdbcTemplate == null || dataSource != this.jdbcTemplate.getDataSource()) {
                    this.jdbcTemplate = createJdbcTemplate(dataSource);
                    initTemplateConfig();
                }
            }
            protected void initTemplateConfig() {
            }
            public final JdbcTemplate getJdbcTemplate() {
                  return this.jdbcTemplate;
                }

            public void afterPropertiesSet() throws Exception {
                jdbcTemplate.execute("TRUNCATE TABLE "+tableName);
                logger.debug("TABLE TRUNCATED SUCCESSFULLY :"+tableName);
                String[] names;
                int i=0;
                List<Map<String,Object>> columns=jdbcTemplate.queryForList(columnNames+"'"+tableName+"' ORDER BY COLUMN_ID");
                names=new String[columns.size()];
                for(Map<String,Object> convert:columns){
                    for(String key:convert.keySet()){
                        names[i]=(String) convert.get(key);
                        i++;
                    }
                }
                this.names = names==null ? null : Arrays.asList(names).toArray(new String[names.length]);
                logger.debug("FIELDS NAME => "+names);
                Filewrtr.columnNames=names;

            }
        }
        public class Filewrtr extends JdbcDaoSupport implements ItemWriter<Map<String, String>>{

            private String tableName;
            private String sql;
            private Object[] data;
            public static String columnNames[];

            public void write(final List<? extends Map<String, String>>  items)
                    throws Exception {
                String columntype = "INSERT INTO " + tableName + "(";
                String value = ") VALUES(";
                int i = 0;
                for (String key : columnNames) {
                    if (i != (columnNames.length - 1)) {
                        columntype = columntype + key + ",";
                        value = value + "?,";

        logger.info("column type"+columntype+"value"+value);
                    } else {
                        columntype = columntype + key;
                    }
                    i++;
                }

                sql = columntype + value + "?)";
                this.getJdbcTemplate().batchUpdate(sql, new BatchPreparedStatementSetter() {
                    public void setValues(PreparedStatement ps, int i) throws SQLException {
                        int j = 1;
                        for (String tradeColumn : columnNames) {
                            if(isValid(items.get(i).get(tradeColumn)))
                            {
                                SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd");
                                try
                                {
                                    Date time = df.parse(items.get(i).get(tradeColumn));

                                    ps.setDate(j, new java.sql.Date(time.getTime()));
                                }catch(Exception e)
                                {
                                    ps.setTimestamp(j, null);
                                }
                            }
                            else
                            {
                                    ps.setString( j , items.get(i).get(tradeColumn));

                            }
                            j++;
                        }
                    }
                    public int getBatchSize() {
                        return items.size();
                    }
                });
            }
        }

1 个答案:

答案 0 :(得分:0)

Oracle提供了一个实用程序SQL Loader Express来加载CSV文件,而无需编写复杂的代码。