我得到了数组中的100.000条记录:
var eData = { "id": "1001", "type": "Regular" },
{ "id": "1002", "type": "Chocolate" },
{ "id": "1003", "type": "Blueberry" },
{ "id": "1004", "type": "Devil's Food" }
依旧...... 当我触发
下面的node.js脚本时var db = require('/QOpenSys/QIBM/ProdData/OPS/Node6/os400/db2i/lib/db2a');
var DBname = "*LOCAL";
var dbconn = new db.dbconn();
dbconn.conn(DBname);
var sqlA = new db.dbstmt(dbconn);
eData.forEach(function(eRow, i) {
var sql = "INSERT INTO lib.table VALUES( xx x x x) WITH NONE"
sqlA.exec(sql, function(rs, err) {
console.log("Execute Done.");
console.log(err);
});
});
数据将在数据库中混淆。相同的id和类型将有10次,但它将达到插入记录的确切数量。
如果我改为execSync,一切都结果正确,但接缝有点慢。我错过了什么异步插入?
进行大量插入的最快方法是什么?
答案 0 :(得分:1)
任何时候都会有最佳数量的异步操作进行处理。限制异步操作数量的最简单方法是使用优秀的async.js模块。
https://caolan.github.io/async/docs.html#eachLimit
var async = require('async')
var db = require('/QOpenSys/QIBM/ProdData/OPS/Node6/os400/db2i/lib/db2a');
var DBname = "*LOCAL";
var dbconn = new db.dbconn();
dbconn.conn(DBname);
var sqlA = new db.dbstmt(dbconn);
async.eachLimit(eData, 100, function(eRow, cb) {
var sql = "INSERT INTO lib.table VALUES( xx x x x) WITH NONE"
sqlA.exec(sql, function(rs, err) {
console.log("Execute Done.");
cb(err)
});
}, function (error) {
if (error) {
console.error(error)
} else {
console.log('Done')
}
})