Skip to content

Commit

Permalink
Handle failures in batch size fetching with the default batch size
Browse files Browse the repository at this point in the history
  • Loading branch information
dmsnell committed Mar 22, 2022
1 parent 0f23f16 commit f930527
Showing 1 changed file with 47 additions and 12 deletions.
59 changes: 47 additions & 12 deletions packages/core-data/src/batch/default-processor.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,12 @@ import { chunk } from 'lodash';
*/
import apiFetch from '@wordpress/api-fetch';

/** How long to wait to hear back from the server for batch size before defaulting. */
const BATCH_SIZE_FETCH_TIMEOUT_MS = 1000;

/** Default value shipped with Core */
const DEFAULT_BATCH_SIZE = 25;

/**
* Maximum number of requests to place in a single batch request. Obtained by
* sending a preflight OPTIONS request to /batch/v1/.
Expand All @@ -17,27 +23,56 @@ import apiFetch from '@wordpress/api-fetch';
*/
let maxItems = null;

/**
* Waits a given number of milliseconds then resolves.
*
* @param {number} msDelay How many milliseconds to wait before resolving.
*/
const wait = ( msDelay ) =>
new Promise( ( resolve ) => setTimeout( resolve, msDelay ) );

/**
* Returns the batching API batch size, updated from the server.
*
* @return {Promise<number>} How many API requests to send in one batch.
*/
const batchSize = async () => {
if ( null !== maxItems ) {
return maxItems;
}

const fetcher = apiFetch( { path: '/batch/v1', method: 'OPTIONS' } ).then(
( { endpoints } ) => {
try {
maxItems = endpoints[ 0 ].args.requests.maxItems;
} catch ( e ) {
// Catching and re-throwing in a new task lets us fall back
// to the default value while still surfacing the error.
// We do this so that we don't block the batched API calls.
setTimeout( () => {
throw e;
}, 0 );
}
}
);

await Promise.race( [ wait( BATCH_SIZE_FETCH_TIMEOUT_MS ), fetcher ] );

return maxItems ?? DEFAULT_BATCH_SIZE;
};

/**
* Default batch processor. Sends its input requests to /batch/v1.
*
* @param {Array} requests List of API requests to perform at once.
*
* @return {Promise} Promise that resolves to a list of objects containing
* either `output` (if that request was succesful) or `error`
* (if not ).
* @return {Promise} Resolves to a list of objects containing either
* `output` if that request was successful else `error`.
*/
export default async function defaultProcessor( requests ) {
if ( maxItems === null ) {
const preflightResponse = await apiFetch( {
path: '/batch/v1',
method: 'OPTIONS',
} );
maxItems = preflightResponse.endpoints[ 0 ].args.requests.maxItems;
}

const results = [];

for ( const batchRequests of chunk( requests, maxItems ?? 25 ) ) {
for ( const batchRequests of chunk( requests, await batchSize() ) ) {
const batchResponse = await apiFetch( {
path: '/batch/v1',
method: 'POST',
Expand Down

0 comments on commit f930527

Please sign in to comment.