diff --git a/packages/core-data/src/batch/default-processor.js b/packages/core-data/src/batch/default-processor.js index 477f67e78106e1..26c38c503864eb 100644 --- a/packages/core-data/src/batch/default-processor.js +++ b/packages/core-data/src/batch/default-processor.js @@ -8,6 +8,12 @@ import { chunk } from 'lodash'; */ import apiFetch from '@wordpress/api-fetch'; +/** How long to wait to hear back from the server for batch size before defaulting. */ +const BATCH_SIZE_FETCH_TIMEOUT_MS = 1000; + +/** Default value shipped with Core */ +const DEFAULT_BATCH_SIZE = 25; + /** * Maximum number of requests to place in a single batch request. Obtained by * sending a preflight OPTIONS request to /batch/v1/. @@ -17,27 +23,56 @@ import apiFetch from '@wordpress/api-fetch'; */ let maxItems = null; +/** + * Waits a given number of milliseconds then resolves. + * + * @param {number} msDelay How many milliseconds to wait before resolving. + */ +const wait = ( msDelay ) => + new Promise( ( resolve ) => setTimeout( resolve, msDelay ) ); + +/** + * Returns the batching API batch size, updated from the server. + * + * @return {Promise} How many API requests to send in one batch. + */ +const batchSize = async () => { + if ( null !== maxItems ) { + return maxItems; + } + + const fetcher = apiFetch( { path: '/batch/v1', method: 'OPTIONS' } ).then( + ( { endpoints } ) => { + try { + maxItems = endpoints[ 0 ].args.requests.maxItems; + } catch ( e ) { + // Catching and re-throwing in a new task lets us fall back + // to the default value while still surfacing the error. + // We do this so that we don't block the batched API calls. + setTimeout( () => { + throw e; + }, 0 ); + } + } + ); + + await Promise.race( [ wait( BATCH_SIZE_FETCH_TIMEOUT_MS ), fetcher ] ); + + return maxItems ?? DEFAULT_BATCH_SIZE; +}; + /** * Default batch processor. Sends its input requests to /batch/v1. * * @param {Array} requests List of API requests to perform at once. * - * @return {Promise} Promise that resolves to a list of objects containing - * either `output` (if that request was succesful) or `error` - * (if not ). + * @return {Promise} Resolves to a list of objects containing either + * `output` if that request was successful else `error`. */ export default async function defaultProcessor( requests ) { - if ( maxItems === null ) { - const preflightResponse = await apiFetch( { - path: '/batch/v1', - method: 'OPTIONS', - } ); - maxItems = preflightResponse.endpoints[ 0 ].args.requests.maxItems; - } - const results = []; - for ( const batchRequests of chunk( requests, maxItems ?? 25 ) ) { + for ( const batchRequests of chunk( requests, await batchSize() ) ) { const batchResponse = await apiFetch( { path: '/batch/v1', method: 'POST',