我尝试将许多记录从Firebird复制到MongoDB。这是我的功能;
var queue = 0;
connection.sequentially(sql, (row) => {
queue++;
collection.insert(row, (err, result) => {
if (err)
return done(err);
queue--;
if (queue <= 0)
return done(null, result);
});
}, (err) => {
connection.detach();
done(err);
}
我的问题是记忆。写入操作较慢,大约100000次读取后我的内存已满。是否有可能延迟下一次读取,直到队列值在某个级别下降?
答案 0 :(得分:0)
这是可以帮助您的众多解决方案之一,使用async.cargo
,演示应该说明工作流程
// npm install async --save
var async = require('async');
// your sql api, simple demo
var collection = {
insert: function (row, cb) {
console.log('row inserted', row.name);
cb(null);
}
};
// create a cargo object with 1 row payload
// and extract cargo into worker variable
var cargo = async.cargo(function (cargo_rows, callback) {
cargo_rows.forEach(function (row) {
console.log('processing: ' + row.name);
collection.insert(row, function(ciErr) {
if(ciErr){
callback(ciErr);
}
}); // add rows, probably need some error checks
});
callback();
}, 1); // number of cargo rows to process, if 2 - 2 rows will be inserted before callback called
// add some items
var rows = [{ name: 'item1' }, { name: 'item2' }, { name: 'item3' }];
rows.forEach(function (row) {
cargo.push(row, function (err) {
if (err) {
console.log('error processing: ' + row.name, '\nExiting');
} else {
console.log('finished processing: ' + row.name);
}
});
});
// Result
//
// processing: item1
// row inserted item1
// finished processing: item1
// processing: item2
// row inserted item2
// finished processing: item2
// processing: item3
// row inserted item3
// finished processing: item3
当货物数量为2时,结果为:
// processing: item1
// row inserted item1
// processing: item2
// row inserted item2
// finished processing: item1
// finished processing: item2
// processing: item3
// row inserted item3
// finished processing: item3
所以想法是按顺序添加行,你的队列不是一个好的解决方案,你最好依靠promise,简单的put:
start processing row => // you can have some validations here
process row => // some error checking/logging
finish process row => // cleanup memory, indeed i don't think you gonna need it
have other rows ? restart worker with new row : done;