'Nodejs block when iterating for 1 million records
/* * This function is written to get data in big size (in Mb i.e. around 143 MB) as input and chunk it in 30 MB and perform the next operation... * using 'object-sizeof' npm module to calculate size on every iteration */
const Q = require('q');
const sizeof = require('object-sizeof');
function insertProcess(formattedData){
let deferred = Q.defer();
let finalData = [];
// here data limit is 30 mb i.e. out of 143 mb want to send data 30 mb at time
let dataLimit = 30;
if ((sizeof(formattedData) / 1000000) <= dataLimit) {
// some function invoked
deferred.resolve();
}
else {
async.forEachLimit(formattedData, 1000, (element, callback) => {
finalData.push(element);
if ((sizeof(finalData) / 1000000) >= dataLimit) {
// passing finaldata to another function
// removing all element from finalData by setting its length to 0
finalData.length = 0;
callback();
}
}, (error) => {
if (finalData.length) {
// passing finaldata to another function
// removing all element from finalData by setting its length to 0
finalData.length = 0;
deferred.resolve();
}
})
}
return deferred.promise;
}
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
| Solution | Source |
|---|
