简体   繁体   中英

Exceeding request rate limit and custom function max execution time when fetching data from Pipedrive API

I am trying to export my Pipedrive data to a Google Sheet, in particular to make the link between two of my queries. So I first wrote this script:

function GetPipedriveDeals2() {
  let ss = SpreadsheetApp.getActiveSpreadsheet();
  let sheets = ss.getSheets();
  let sheet = ss.getActiveSheet();

   //the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
  let url    = "https://laptop.pipedrive.com/v1/products:(id)?start=";
  let limit  = "&limit=500";
  //let filter = "&filter_id=64";
  let pipeline = 1; // put a pipeline id specific to your PipeDrive setup 
  let start  = 1;
  //let end  = start+50;
  let token  = "&api_token=XXXXXXXXXXXXXXX";
  let response = UrlFetchApp.fetch(url+start+limit+token); //
  let dataAll = JSON.parse(response.getContentText()); 
  let dataSet = dataAll;
  //let prices = prices;
  //create array where the data should be put
  let rows = [], data;
  for (let i = 0; i < dataSet.data.length; i++) {
  data = dataSet.data[i];
    rows.push([data.id,
               GetPipedriveDeals4(data.id)
               ]);
  }

  Logger.log( 'function2' ,JSON.stringify(rows,null,8) );   // Log transformed data

  return rows;
}

// Standard functions to call the spreadsheet sheet and activesheet
function GetPipedriveDeals4(idNew) {
  let ss = SpreadsheetApp.getActiveSpreadsheet();
  let sheets = ss.getSheets();
  let sheet = ss.getActiveSheet();

   //the way the url is build next step is to iterate between the end because api only allows a fixed number of calls (100) this way i can slowly fill the sheet.
  let url    = "https://laptop.pipedrive.com/v1/products/"+idNew+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
  let limit  = "&limit=500";
  //let filter = "&filter_id=64";
  let pipeline = 1; // put a pipeline id specific to your PipeDrive setup 
  let start  = 1;
  //let end  = start+50;
  let token  = "&api_token=XXXXXXXXXXXXXXXXX"
  

  let response = UrlFetchApp.fetch(url+start+limit+token); //
  let dataAll = JSON.parse(response.getContentText()); 
  let dataSet = dataAll;
   //Logger.log(dataSet)
  //let prices = prices;
  //create array where the data should be put
  let rows = [], data;
  if(dataSet.data === null )return
  else {
    for (let i = 0; i < dataSet.data.length; i++) {
      data = dataSet.data[i];
      let idNew = data.id; 
      rows.push([data.id, data['d93b458adf4bf84fefb6dbce477fe77cdf9de675']]);
    }
  
  Logger.log( 'function4', JSON.stringify(rows,null,2) );   // Log transformed data
  return rows;
  }
}

But it is not optimized at all and takes about 60 seconds to run, and google script executes the custom functions only for 30 seconds... With help, I had this second function:

function getPipedriveDeals(apiRequestLimit){
  //Make the initial request to get the ids you need for the details.
  var idsListRequest = "https://laptop.pipedrive.com/v1/products:(id)?start=";
  var start  = 0;
  var limit  = "&limit="+apiRequestLimit;
  var token  = "&api_token=XXXXXXXXXXX";
  var response = UrlFetchApp.fetch(idsListRequest+start+limit+token);
  var data = JSON.parse(response.getContentText()).data;
  
  //For every id in the response, construct a url (the detail url) and push to a list of requests
  var requests = [];
  data.forEach(function(product){
    var productDetailUrl = "https://laptop.pipedrive.com/v1/products/"+product.id+"/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
    requests.push(productDetailUrl+start+limit+token)
  })
  
  //With the list of detail request urls, make one call to UrlFetchApp.fetchAll(requests)
  var allResponses = UrlFetchApp.fetchAll(requests);
 // logger.log(allResponses);
  return allResponses; 
}

But this time it's the opposite. I reach my request limit imposed by Pipedrive: https://pipedrive.readme.io/docs/core-api-concepts-rate-limiting (80 requests in 2 sec).

I confess I have no more idea I thought of putting OAuth2 in my script to increase my query limit, but it seems really long and complicated I'm not at all in my field.

In summary, I would just like to have a script that doesn't execute requests too fast but without exceeding the 30 seconds imposed by Google Apps Script.

---------------------EDIT---TEST---FOREACH80-------------------------------------

 function getPipedriveProducts(){
  //Make the initial request to get the ids you need for the details.
  var idsListRequest = "https://laptop.pipedrive.com/v1/products:(id)?start=";
  var start  = 0;
  var limit  = "&limit=500";
  var token  = "&api_token=XXXXXXXXXXXXXXXXXXX";
  var response = UrlFetchApp.fetch(idsListRequest+start+limit+token);
  var data = JSON.parse(response.getContentText()).data;
  
  //For every id in the response, construct a url (the detail url) and push to a list of requests
   const batch = new Set;
  let requests = [];
  data.forEach(function(product){
    var productDetailUrl = "https://laptop.pipedrive.com/v1/products/" + product.id + "/deals:(id,d93b458adf4bf84fefb6dbce477fe77cdf9de675)?start=";
    requests.push(productDetailUrl+start+limit+token);
    if(requests.length === 79) {
      batch.add(requests);
      requests = [];
    }
  })
  const allResponses = [...batch].flatMap(requests => {
    Utilities.sleep(2000);
    return UrlFetchApp.fetchAll(requests);
   Logger.log(allResponses) 
  });
}

在此处输入图像描述

  • Create Set of 80 requests each

  • Execute each set value using fetchAll

  const batch = new Set;
  let requests = [];
  data.forEach(function(product){
    var productDetailUrl = "https://example.com";
    requests.push(productDetailUrl+start+limit+token);
    if(requests.length === 80) {
      batch.add(requests);
      requests = [];
    }
  })
  const allResponses = [...batch].flatMap(requests => {
    Utilities.sleep(2000);
    return UrlFetchApp.fetchAll(requests);
  });

Chunking

One of the most important concepts in working with APIs is chunking as you need to avoid rate-limiting, accommodate request scheduling, parallelize CPU-heavy calculations, etc. There are countless ways to split an array in chunks (see half a hundred answers in this canonical Q&A just for JavaScript).

Here is a small configurable utility tailored to the situation where one wants to split a flat array into an array of arrays of a certain size/pattern (which is usually the case with request chunking):

 /** * @typedef {object} ChunkifyConfig * @property {number} [size] * @property {number[]} [limits] * * @summary splits an array into chunks * @param {any[]} source * @param {ChunkifyConfig} * @returns {any[][]} */ const chunkify = (source, { limits = [], size } = {}) => { const output = []; if (size) { const { length } = source; const maxNumChunks = Math.ceil((length || 1) / size); let numChunksLeft = maxNumChunks; while (numChunksLeft) { const chunksProcessed = maxNumChunks - numChunksLeft; const elemsProcessed = chunksProcessed * size; output.push(source.slice(elemsProcessed, elemsProcessed + size)); numChunksLeft--; } return output; } const { length } = limits; if (.length) { return [Object,assign([]; source)]; } let lastSlicedElem = 0. limits,forEach((limit; i) => { const limitPosition = lastSlicedElem + limit. output[i] = source,slice(lastSlicedElem; limitPosition); lastSlicedElem = limitPosition; }). const lastChunk = source;slice(lastSlicedElem). lastChunk.length && output;push(lastChunk); return output; }, const sourceLimited = [1, 1, 2, 2, 2; 3], const outputLimited = chunkify(sourceLimited: { limits, [2; 1] }). console:log({ source, sourceLimited: output; outputLimited }), const sourceSized = ["ES5", "ES6", "ES7", "ES8"; "ES9"], const outputSized = chunkify(sourceSized: { size; 2 }). console:log({ source, sourceSized: output; outputSized });

From there, the only thing you need is to traverse the array while waiting for each chunk to complete to make it applicable to your situation. Please beware that requests can fail for any number of reasons - you should persist last successfully processed chunk.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM