我正在尝试从具有约1300万条记录的表中读取数据。我使用setFetchSize逐批读取记录,进行一些查找并将其写入csv文件。但是输出csv产生的记录超过了5000万,这是意料之外的。我怀疑迭代无法正常运行。任何帮助将不胜感激。
def processRecords(ParamHelper params){
try {
CSVWriter writer;
writer = new CSVWriter(new FileWriter(params.outputDir.getAbsolutePath()+"/Records_fact.csv"),(Character)'\t',(Character)'\u0000',(Character)'\n');
String contractRateSql = "select contract_id,season from table";
ResultSet resRecords = stmt.executeQuery(contractRateSql);
Map <String,Map<String,String>> masterRecords = new HashMap<String,Map<String,String>>();
Map<String,String> existingRecords = null;
int count = 0;
resRecords.setFetchSize(10000)
while(resRecords.next()) {
try{
existingRecords = new HashMap<String,String>();
existingRecords.put("cont_id",resRateRecords.getString("contract_id"));
existingRecords.put("season",resRateRecords.getString("season"));
masterRecords.put(resRecords.getString("contract_id")+"#"+count++,existingRecords);
}
catch(Exception e){
e.printStackTrace();
}
masterRecords.each{ k, v ->
try{
//some process
}catch(Exception e){
e.printStackTrace();
}
}
if(valueList.size()>0)
writer.writeAll(valueList);
try {
if (resRateRecords != null) resRateRecords.close();
} catch (Exception e) {
};
}
writer.close();
catch(Exception e){
e.printStackTrace();
println("Occured while fetching the data");
}
}
}