简体   繁体   中英

How to delay in node.js an operation in callback

I try to copy many records from Firebird to MongoDB. This is my function;

var queue = 0;
connection.sequentially(sql, (row) => {
    queue++;
    collection.insert(row, (err, result) => {
        if (err)
            return done(err);
        queue--;
        if (queue <= 0)
          return done(null, result);
    });
}, (err) => {
    connection.detach();
    done(err);
}

My problem is memory. Write operations are slower and after approx 100000 reads is my memory full. Is it possible to delay next read until the value of queue goes down under some level?

This is one of many solutions that could help you, using async.cargo , demo should illustrate workflow

// npm install async --save
var async = require('async');

// your sql api, simple demo
var collection = {
    insert: function (row, cb) {
        console.log('row inserted', row.name);
        cb(null);
    }
};

// create a cargo object with 1 row payload
// and extract cargo into worker variable
var cargo = async.cargo(function (cargo_rows, callback) {
    cargo_rows.forEach(function (row) {
        console.log('processing: ' + row.name);
        collection.insert(row, function(ciErr) {
            if(ciErr){
                callback(ciErr);
            }
        }); // add rows, probably need some error checks
    });
    callback();
}, 1); // number of cargo rows to process, if 2 - 2 rows will be inserted before callback called

// add some items
var rows = [{ name: 'item1' }, { name: 'item2' }, { name: 'item3' }];
rows.forEach(function (row) {
    cargo.push(row, function (err) {
        if (err) {
            console.log('error processing: ' + row.name, '\nExiting');
        } else {
            console.log('finished processing: ' + row.name);
        }
    });
});

// Result
// 
// processing: item1
// row inserted item1
// finished processing: item1
// processing: item2
// row inserted item2
// finished processing: item2
// processing: item3
// row inserted item3
// finished processing: item3

And when number of cargo_rows is 2, result is:

// processing: item1
// row inserted item1
// processing: item2
// row inserted item2
// finished processing: item1
// finished processing: item2
// processing: item3
// row inserted item3
// finished processing: item3

So the idea is to add rows sequentially, your queue is not a good solution, you better rely on promise, simple put:

start processing row =>  // you can have some validations here
process row =>           // some error checking/logging
finish process row =>    // cleanup memory, indeed i don't think you gonna need it
have other rows ? restart worker with new row : done;

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM