简体   繁体   中英

multiple async call issue in node.js

I'm fairly new to node.js so kindly forgive my ignorance, what I'm trying to do is loop through an array of servers and allocate a maximum of 60 tasks to each server. However the incoming number of tasks can be anything between 10 to 200.

So what I'm looking to do assign 60 tasks to the first server, then the next 60 tasks to second server and so on.

I have the following function, which seems to go horribly wrong..

function mount_the_tasks(server_info){
    async.forEach(server_info, function(single_server, callback1){
        Tasks.find({task_status : 0}).limit(single_server.can_perform_tasks).exec(function(err, tasks_for_server){
            async.forEach(tasks_for_server, function(single_task, callback2){
                Tasks.findOneAndUpdate({_id : single_task._id}, {task_status : 1, server_id : single_server.instance_id}, function(err, numberAffected){
                    console.log(single_task.task_id+' -> '+single_server.instance_id);
                    callback2();
                });
            }, function(err){
                console.log('Moving to next server!');
                callback1();
            });
        });
    }, function(err){
        console.log('all done!');
    });
}

The problem is you are going through all of the servers in parallel. What this means is for each server, you're assigning the first 60 active tasks to each because by the time the next server is iterated through, the previous set of tasks aren't done yet so their status is still 0. To fix this, you'll have to use async.eachSeries instead when iterating over the servers so that the tasks are marked complete before moving on to the next server.

function mount_the_tasks(server_info){
    async.eachSeries(server_info, function(single_server, callback1){
        Tasks.find({task_status : 0}).limit(single_server.can_perform_tasks).exec(function(err, tasks_for_server){
            async.each(tasks_for_server, function(single_task, callback2){
                Tasks.findOneAndUpdate({_id : single_task._id}, {task_status : 1, server_id : single_server.instance_id}, function(err, numberAffected){
                    console.log(single_task.task_id+' -> '+single_server.instance_id);
                    callback2();
                });
            }, function(err){
                console.log('Moving to next server!');
                callback1();
            });
        });
    }, function(err){
        console.log('all done!');
    });
}

Another option would be to use .skip() to cause each iteration to skip the number of tasks handled by the previous server. This should be a little bit faster than the previous option assuming your server isn't already bottlenecked due to the number of requests being sent simultaneously.

function mount_the_tasks(server_info){
    var skip = 0;
    async.each(server_info, function(single_server, callback1){
        Tasks.find({task_status : 0}).limit(single_server.can_perform_tasks).skip(skip).exec(function(err, tasks_for_server){
            async.each(tasks_for_server, function(single_task, callback2){
                Tasks.findOneAndUpdate({_id : single_task._id}, {task_status : 1, server_id : single_server.instance_id}, function(err, numberAffected){
                    console.log(single_task.task_id+' -> '+single_server.instance_id);
                    callback2();
                });
            }, function(err){
                console.log('Moving to next server!');
                skip += single_server.can_perform_tasks;
                callback1();
            });
        });
    }, function(err){
        console.log('all done!');
    });
}

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM