限制Node.js中的异步调用

时间:2012-03-02 20:27:32

标签: node.js asynchronous

我有一个Node.js应用程序,它在本地获取文件列表并将它们上传到服务器。此列表可能包含数千个文件。

for (var i = 0; i < files.length; i++) {
   upload_file(files[i]);
}

如果我用数千个文件执行此操作,upload_file将同时被调用数千次,并且很可能会死亡(或者至少是挣扎)。在同步世界中,我们创建一个线程池并将其限制为一定数量的线程。是否有一种简单的方法可以限制一次执行多少次异步调用?

5 个答案:

答案 0 :(得分:68)

像往常一样,我推荐Caolan McMahon的async module

upload_file函数作为第二个参数进行回调:

var async = require("async");

function upload_file(file, callback) {
    // Do funky stuff with file
    callback();
}

var queue = async.queue(upload_file, 10); // Run ten simultaneous uploads

queue.drain = function() {
    console.log("All files are uploaded");
};

// Queue your files for upload
queue.push(files);

queue.concurrency = 20; // Increase to twenty simultaneous uploads

答案 1 :(得分:20)

上面的回答,关于NPM的async是最好的答案,但是如果你想了解更多有关控制流程的信息:


您应该研究控制流模式。在Chapter 7 of Mixu's Node Book中对控制流模式进行了精彩的讨论。也就是说,我将看一下7.2.3中的示例:有限并行 - 一个异步,并行,并发限制的循环

我改编了他的榜样:

function doUpload() {
    // perform file read & upload here...
}

var files   = [...];
var limit   = 10;       // concurrent read / upload limit
var running = 0;        // number of running async file operations

function uploader() {
    while(running < limit && files.length > 0) {
        var file = files.shift();
        doUpload(file, function() {
            running--;
            if(files.length > 0)
                uploader();
        });
        running++;
    }
}

uploader();

答案 2 :(得分:7)

你应该尝试排队。我假设upload_file()完成时会触发回调。这样的事情应该可以做到(未经测试):

function upload_files(files, maxSimultaneousUploads, callback) {
    var runningUploads = 0,
        startedUploads = 0,
        finishedUploads = 0;

    function next() {
        runningUploads--;
        finishedUploads++;

        if (finishedUploads == files.length) {
            callback();
        } else {
            // Make sure that we are running at the maximum capacity.
            queue();
        }
    }

    function queue() {
        // Run as many uploads as possible while not exceeding the given limit.
        while (startedUploads < files.length && runningUploads < maxSimultaneousUploads) {
            runningUploads++;
            upload_file(files[startedUploads++], next);
        }
    }

    // Start the upload!
    queue();
}

答案 3 :(得分:2)

其他答案似乎已经过时了。这可以使用paralleLimit中的async轻松解决。以下是如何使用它。我还没有测试过它。

var tasks = files.map(function(f) {
    return function(callback) {
        upload_file(f, callback)
    }
});

parallelLimit(tasks, 10, function(){
});

答案 4 :(得分:0)

可以使用递归来解决。

这个想法是,最初您将发送允许的最大请求数,并且这些请求中的每一个都应在完成后递归地继续发送自己。

function batchUpload(files, concurrentRequestsLimit) {
    return new Promise(resolve => {
        var responses = [];
        var index = 0;

        function recursiveUpload() {
            if (index === files.length) {
                return;
            }
            upload_file(files[index++]).then(r => {
                responses.push(r);
                if (responses.length === files.length) {
                    resolve(responses);
                } else {
                    recursiveUpload();
                }
            });
        }

        for (var i = 0; i < concurrentRequestsLimit; i++) {
            recursiveUpload();
        }
    });
}

var files = [
    'file_1',
    'file_2',
    'file_3',
    ...
    'file_100'
];
batchUpload(files, 5).then(responses => {
   console.log(responses);
});