'Exponential memory growth Node.js, memory leak?
I'm making a script to fetch blockchain transactions, however the memory usage by the process grows untill it uses around 4 gigabytes and crashes. I suspect it is a memory leak and has something to do with the garbage collection mechanism.
async function getTransactions(blockNumber) {
if (blockNumber < 15000000) {
let block = await web3.eth.getBlock(blockNumber);
for (let i = 0; i < block.transactions.length; i++) {
let transaction = await web3.eth.getTransactionReceipt(block.transactions[i]);
for (let j = 0; j < transaction.logs.length; j++) {
try {
if (transaction.logs[j].topics[0] == "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef") {
let parsed1 = transaction.logs[j].topics[1].replace("0x", "").replace(/^0+/, '');
let parsed2 = transaction.logs[j].topics[2].replace("0x", "").replace(/^0+/, '');;
let contract = new web3.eth.Contract(erc20_abi, transaction.logs[j].address)
let decimals = await contract.methods.decimals().call();
let transfer = {
from: "0x" + parsed1,
to: "0x" + parsed2,
token: transaction.logs[j].address,
amount: parseInt(transaction.logs[j].data) / (10**decimals),
block: block.number,
timestamp: block.timestamp,
hash: transaction.logs[j].transactionHash,
}
console.log(transfer);
await createTransfer(transfer)
}
} catch (error) {
}
}
}
}
}
(async () => {
await connectdb();
for (let i = await findMax(); i < 15000000; i++) {
await getTransactions(i);
}})();
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
| Solution | Source |
|---|
