'delay between requests - pagination requests violate api rate limit node js

I have a while loop in a function which I use to query an API web service. The loop handles the pagination in the responses from the API server so the function can return all the entries in the database.

I need to adapt to rate limiting from the web service. The best way may be to set up a delay between the requests. How can I add a delay of 1000ms between each of the requests that handle pagination in this function?

var db = new Discogs().database();


const getALL = async (id) => {

    let releases = [];
    let currentPage = 1;
    let totalPages = null

    while (totalPages === null || currentPage < totalPages) {
        
        let response = db.getArtistReleases(id);
        let results = await response;

        if (totalPages === null) {
            totalPages = parseInt(results.pagination.pages);
        }

        releases.push(results);
        currentPage++;

    }

    return Promise.all(releases);
}

I execute the function in my app's backend API call

    const response = getAll(
        req.body.artistId
    );

    response.then(results => {
     
    })
    .catch((error) => {

    });


Solution 1:[1]

To adapt to the API's rate limit I rewrote the function that handles the pagination and api library calls after this solution

Also added the sleep(ms) function to pause execution for a given number of seconds if the rate limit is being approached.

    const getAll = async (id) => {
        return new Promise(resolve => {


            let mainReleases = []
            let perMinuteRateLimit = 60;
            let rateLimitThrottleSeconds = 4;
            let latestResults;


            db.getArtistReleases(id, {page: 1, per_page: 75}, (err, results, rateLimit)  => {
                if (err) {
                    console.log("Discogs API Error: " + err);
                }

                latestResults = results;
                mainReleases.push(latestResults.releases);

                async.whilst(
                    (cb) => cb(null, latestResults['pagination'].urls.next),
                    (iteration_cb) => {
                        var q = url.parse(latestResults['pagination'].urls.next, true);
                        
                        console.log("Discogs API - Artist Releases Query Page " + q.query['page'] + " of " + latestResults['pagination'].pages + "...");

                        db.getArtistReleases(id, {page : q.query['page']}, (err, res, rateLimit)  => {
         
                            latestResults = res;
                            mainReleases.push(latestResults.releases);


                            if (rateLimit['used'] > perMinuteRateLimit*0.9) {
                                sleep(rateLimitThrottleSeconds * 1000).then(() => {
                                    console.log("near Discogs API rate limit -- throttling requests\n" + JSON.stringify(rateLimit));
                                    iteration_cb(null, res);
                                });
                            } else {
                                iteration_cb(null, res);
                            }
                    });
                    
                    },
                    (err, results) => {
                        resolve(mainReleases);
                    }
                )
            });
        });
    };

    function sleep(ms) {
      return new Promise((resolve) => setTimeout(resolve, ms));
    }
  


queueing looks like a great solution but with this I was able to get the results I needed quickly at the expense of scalability probably.

Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source
Solution 1 Max McCarty