From 4eb8145fb3b503884ea610e813be359127d1a705 Mon Sep 17 00:00:00 2001 From: Maneesh Tewani Date: Thu, 6 Oct 2022 14:53:43 -0500 Subject: [PATCH] Implemented exponential backoff with query (#6653) --- .changeset/large-lamps-reflect.md | 6 + common/api-review/storage.api.md | 6 +- .../storage/src/implementation/backoff.ts | 27 +- .../storage/src/implementation/constants.ts | 5 + packages/storage/src/implementation/error.ts | 11 +- .../storage/src/implementation/request.ts | 51 ++-- .../storage/src/implementation/requests.ts | 13 +- packages/storage/src/implementation/utils.ts | 40 +++ packages/storage/src/service.ts | 6 +- packages/storage/src/task.ts | 42 +++- packages/storage/test/unit/requests.test.ts | 65 +++++ packages/storage/test/unit/task.test.ts | 182 +++++++++++++- packages/storage/test/unit/testshared.ts | 231 +++++++++++++++++- packages/storage/test/unit/utils.test.ts | 40 +++ 14 files changed, 669 insertions(+), 56 deletions(-) create mode 100644 .changeset/large-lamps-reflect.md create mode 100644 packages/storage/src/implementation/utils.ts create mode 100644 packages/storage/test/unit/utils.test.ts diff --git a/.changeset/large-lamps-reflect.md b/.changeset/large-lamps-reflect.md new file mode 100644 index 00000000000..25bbb80bfb7 --- /dev/null +++ b/.changeset/large-lamps-reflect.md @@ -0,0 +1,6 @@ +--- +"@firebase/storage": patch +--- + +Fixed bug where upload status wasn't being checked after an upload failure. +Implemented exponential backoff and max retry strategy. diff --git a/common/api-review/storage.api.md b/common/api-review/storage.api.md index 4715253d510..04df972b727 100644 --- a/common/api-review/storage.api.md +++ b/common/api-review/storage.api.md @@ -83,7 +83,7 @@ export class _FirebaseStorageImpl implements FirebaseStorage { // Warning: (ae-forgotten-export) The symbol "Request" needs to be exported by the entry point index.d.ts // // (undocumented) - _makeRequest(requestInfo: RequestInfo_2, requestFactory: () => Connection, authToken: string | null, appCheckToken: string | null): Request_2; + _makeRequest(requestInfo: RequestInfo_2, requestFactory: () => Connection, authToken: string | null, appCheckToken: string | null, retry?: boolean): Request_2; // (undocumented) makeRequestWithTokens(requestInfo: RequestInfo_2, requestFactory: () => Connection): Promise; _makeStorageReference(loc: _Location): _Reference; @@ -319,11 +319,13 @@ export class _UploadTask { _blob: _FbsBlob; cancel(): boolean; catch(onRejected: (p1: StorageError_2) => T | Promise): Promise; + // (undocumented) + isExponentialBackoffExpired(): boolean; // Warning: (ae-forgotten-export) The symbol "Metadata" needs to be exported by the entry point index.d.ts _metadata: Metadata | null; // Warning: (ae-forgotten-export) The symbol "Unsubscribe" needs to be exported by the entry point index.d.ts // Warning: (ae-forgotten-export) The symbol "Subscribe" needs to be exported by the entry point index.d.ts - on(type: _TaskEvent, nextOrObserver?: StorageObserver | null | ((snapshot: UploadTaskSnapshot) => unknown), error?: ((a: StorageError_2) => unknown) | null, completed?: Unsubscribe_2 | null): Unsubscribe_2 | Subscribe_2; + on(type: _TaskEvent, nextOrObserver?: StorageObserver | null | ((snapshot: UploadTaskSnapshot) => unknown), error?: ((a: StorageError_2) => unknown) | null, completed?: CompleteFn | null): Unsubscribe_2 | Subscribe_2; pause(): boolean; resume(): boolean; get snapshot(): UploadTaskSnapshot; diff --git a/packages/storage/src/implementation/backoff.ts b/packages/storage/src/implementation/backoff.ts index 9560870abea..2a44bd85150 100644 --- a/packages/storage/src/implementation/backoff.ts +++ b/packages/storage/src/implementation/backoff.ts @@ -24,15 +24,24 @@ type id = (p1: boolean) => void; export { id }; /** - * @param f May be invoked - * before the function returns. - * @param callback Get all the arguments passed to the function - * passed to f, including the initial boolean. + * Accepts a callback for an action to perform (`doRequest`), + * and then a callback for when the backoff has completed (`backoffCompleteCb`). + * The callback sent to start requires an argument to call (`onRequestComplete`). + * When `start` calls `doRequest`, it passes a callback for when the request has + * completed, `onRequestComplete`. Based on this, the backoff continues, with + * another call to `doRequest` and the above loop continues until the timeout + * is hit, or a successful response occurs. + * @description + * @param doRequest Callback to perform request + * @param backoffCompleteCb Callback to call when backoff has been completed */ export function start( - f: (p1: (success: boolean) => void, canceled: boolean) => void, + doRequest: ( + onRequestComplete: (success: boolean) => void, + canceled: boolean + ) => void, // eslint-disable-next-line @typescript-eslint/no-explicit-any - callback: (...args: any[]) => unknown, + backoffCompleteCb: (...args: any[]) => unknown, timeout: number ): id { // TODO(andysoto): make this code cleaner (probably refactor into an actual @@ -55,14 +64,14 @@ export function start( function triggerCallback(...args: any[]): void { if (!triggeredCallback) { triggeredCallback = true; - callback.apply(null, args); + backoffCompleteCb.apply(null, args); } } function callWithDelay(millis: number): void { retryTimeoutId = setTimeout(() => { retryTimeoutId = null; - f(handler, canceled()); + doRequest(responseHandler, canceled()); }, millis); } @@ -72,7 +81,7 @@ export function start( } } - function handler(success: boolean, ...args: any[]): void { + function responseHandler(success: boolean, ...args: any[]): void { if (triggeredCallback) { clearGlobalTimeout(); return; diff --git a/packages/storage/src/implementation/constants.ts b/packages/storage/src/implementation/constants.ts index d36e7fbf676..48c6db55c11 100644 --- a/packages/storage/src/implementation/constants.ts +++ b/packages/storage/src/implementation/constants.ts @@ -42,6 +42,11 @@ export const DEFAULT_MAX_OPERATION_RETRY_TIME = 2 * 60 * 1000; */ export const DEFAULT_MAX_UPLOAD_RETRY_TIME = 10 * 60 * 1000; +/** + * 1 second + */ +export const DEFAULT_MIN_SLEEP_TIME_MILLIS = 1000; + /** * This is the value of Number.MIN_SAFE_INTEGER, which is not well supported * enough for us to use it directly. diff --git a/packages/storage/src/implementation/error.ts b/packages/storage/src/implementation/error.ts index cdccfc51053..fa12d76d763 100644 --- a/packages/storage/src/implementation/error.ts +++ b/packages/storage/src/implementation/error.ts @@ -34,8 +34,9 @@ export class StorageError extends FirebaseError { * @param code - A StorageErrorCode string to be prefixed with 'storage/' and * added to the end of the message. * @param message - Error message. + * @param status_ - Corresponding HTTP Status Code */ - constructor(code: StorageErrorCode, message: string) { + constructor(code: StorageErrorCode, message: string, private status_ = 0) { super( prependCode(code), `Firebase Storage: ${message} (${prependCode(code)})` @@ -46,6 +47,14 @@ export class StorageError extends FirebaseError { Object.setPrototypeOf(this, StorageError.prototype); } + get status(): number { + return this.status_; + } + + set status(status: number) { + this.status_ = status; + } + /** * Compares a StorageErrorCode against this error's code, filtering out the prefix. */ diff --git a/packages/storage/src/implementation/request.ts b/packages/storage/src/implementation/request.ts index 226b1ea08de..572562d1528 100644 --- a/packages/storage/src/implementation/request.ts +++ b/packages/storage/src/implementation/request.ts @@ -26,6 +26,7 @@ import { ErrorHandler, RequestHandler, RequestInfo } from './requestinfo'; import { isJustDef } from './type'; import { makeQueryString } from './url'; import { Connection, ErrorCode, Headers, ConnectionType } from './connection'; +import { isRetryStatusCode } from './utils'; export interface Request { getPromise(): Promise; @@ -69,7 +70,8 @@ class NetworkRequest implements Request { private errorCallback_: ErrorHandler | null, private timeout_: number, private progressCallback_: ((p1: number, p2: number) => void) | null, - private connectionFactory_: () => Connection + private connectionFactory_: () => Connection, + private retry = true ) { this.promise_ = new Promise((resolve, reject) => { this.resolve_ = resolve as (value?: O | PromiseLike) => void; @@ -93,16 +95,15 @@ class NetworkRequest implements Request { const connection = this.connectionFactory_(); this.pendingConnection_ = connection; - const progressListener: (progressEvent: ProgressEvent) => void = - progressEvent => { - const loaded = progressEvent.loaded; - const total = progressEvent.lengthComputable - ? progressEvent.total - : -1; - if (this.progressCallback_ !== null) { - this.progressCallback_(loaded, total); - } - }; + const progressListener: ( + progressEvent: ProgressEvent + ) => void = progressEvent => { + const loaded = progressEvent.loaded; + const total = progressEvent.lengthComputable ? progressEvent.total : -1; + if (this.progressCallback_ !== null) { + this.progressCallback_(loaded, total); + } + }; if (this.progressCallback_ !== null) { connection.addUploadProgressListener(progressListener); } @@ -118,7 +119,11 @@ class NetworkRequest implements Request { this.pendingConnection_ = null; const hitServer = connection.getErrorCode() === ErrorCode.NO_ERROR; const status = connection.getStatus(); - if (!hitServer || this.isRetryStatusCode_(status)) { + if ( + (!hitServer || + isRetryStatusCode(status, this.additionalRetryCodes_)) && + this.retry + ) { const wasCanceled = connection.getErrorCode() === ErrorCode.ABORT; backoffCallback( false, @@ -196,22 +201,6 @@ class NetworkRequest implements Request { this.pendingConnection_.abort(); } } - - private isRetryStatusCode_(status: number): boolean { - // The codes for which to retry came from this page: - // https://cloud.google.com/storage/docs/exponential-backoff - const isFiveHundredCode = status >= 500 && status < 600; - const extraRetryCodes = [ - // Request Timeout: web server didn't receive full request in time. - 408, - // Too Many Requests: you're getting rate-limited, basically. - 429 - ]; - const isExtraRetryCode = extraRetryCodes.indexOf(status) !== -1; - const isRequestSpecificRetryCode = - this.additionalRetryCodes_.indexOf(status) !== -1; - return isFiveHundredCode || isExtraRetryCode || isRequestSpecificRetryCode; - } } /** @@ -271,7 +260,8 @@ export function makeRequest( authToken: string | null, appCheckToken: string | null, requestFactory: () => Connection, - firebaseVersion?: string + firebaseVersion?: string, + retry = true ): Request { const queryPart = makeQueryString(requestInfo.urlParams); const url = requestInfo.url + queryPart; @@ -291,6 +281,7 @@ export function makeRequest( requestInfo.errorHandler, requestInfo.timeout, requestInfo.progressCallback, - requestFactory + requestFactory, + retry ); } diff --git a/packages/storage/src/implementation/requests.ts b/packages/storage/src/implementation/requests.ts index ab3b4e26827..5b32e05a9b5 100644 --- a/packages/storage/src/implementation/requests.ts +++ b/packages/storage/src/implementation/requests.ts @@ -104,7 +104,7 @@ export function sharedErrorHandler( xhr: Connection, err: StorageError ): StorageError { - let newErr; + let newErr: StorageError; if (xhr.getStatus() === 401) { if ( // This exact message string is the only consistent part of the @@ -126,6 +126,7 @@ export function sharedErrorHandler( } } } + newErr.status = xhr.getStatus(); newErr.serverResponse = err.serverResponse; return newErr; } @@ -534,8 +535,14 @@ export function continueResumableUpload( } const startByte = status_.current; const endByte = startByte + bytesToUpload; - const uploadCommand = - bytesToUpload === bytesLeft ? 'upload, finalize' : 'upload'; + let uploadCommand = ''; + if (bytesToUpload === 0) { + uploadCommand = 'finalize'; + } else if (bytesLeft === bytesToUpload) { + uploadCommand = 'upload, finalize'; + } else { + uploadCommand = 'upload'; + } const headers = { 'X-Goog-Upload-Command': uploadCommand, 'X-Goog-Upload-Offset': `${status_.current}` diff --git a/packages/storage/src/implementation/utils.ts b/packages/storage/src/implementation/utils.ts new file mode 100644 index 00000000000..155613dde16 --- /dev/null +++ b/packages/storage/src/implementation/utils.ts @@ -0,0 +1,40 @@ +/** + * @license + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Checks the status code to see if the action should be retried. + * + * @param status Current HTTP status code returned by server. + * @param additionalRetryCodes additional retry codes to check against + */ +export function isRetryStatusCode( + status: number, + additionalRetryCodes: number[] +): boolean { + // The codes for which to retry came from this page: + // https://cloud.google.com/storage/docs/exponential-backoff + const isFiveHundredCode = status >= 500 && status < 600; + const extraRetryCodes = [ + // Request Timeout: web server didn't receive full request in time. + 408, + // Too Many Requests: you're getting rate-limited, basically. + 429 + ]; + const isExtraRetryCode = extraRetryCodes.indexOf(status) !== -1; + const isAdditionalRetryCode = additionalRetryCodes.indexOf(status) !== -1; + return isFiveHundredCode || isExtraRetryCode || isAdditionalRetryCode; +} diff --git a/packages/storage/src/service.ts b/packages/storage/src/service.ts index 3034543e531..6777cb7b659 100644 --- a/packages/storage/src/service.ts +++ b/packages/storage/src/service.ts @@ -302,7 +302,8 @@ export class FirebaseStorageImpl implements FirebaseStorage { requestInfo: RequestInfo, requestFactory: () => Connection, authToken: string | null, - appCheckToken: string | null + appCheckToken: string | null, + retry = true ): Request { if (!this._deleted) { const request = makeRequest( @@ -311,7 +312,8 @@ export class FirebaseStorageImpl implements FirebaseStorage { authToken, appCheckToken, requestFactory, - this._firebaseVersion + this._firebaseVersion, + retry ); this._requests.add(request); // Request removes itself from set when complete. diff --git a/packages/storage/src/task.ts b/packages/storage/src/task.ts index e35ee579b40..2b428d1be16 100644 --- a/packages/storage/src/task.ts +++ b/packages/storage/src/task.ts @@ -22,7 +22,8 @@ import { FbsBlob } from './implementation/blob'; import { canceled, StorageErrorCode, - StorageError + StorageError, + retryLimitExceeded } from './implementation/error'; import { InternalTaskState, @@ -53,6 +54,9 @@ import { } from './implementation/requests'; import { Reference } from './reference'; import { newTextConnection } from './platform/connection'; +import { isRetryStatusCode } from './implementation/utils'; +import { CompleteFn } from '@firebase/util'; +import { DEFAULT_MIN_SLEEP_TIME_MILLIS } from './implementation/constants'; /** * Represents a blob being uploaded. Can be used to pause/resume/cancel the @@ -92,6 +96,14 @@ export class UploadTask { private _reject?: (p1: StorageError) => void = undefined; private _promise: Promise; + private sleepTime: number; + + private maxSleepTime: number; + + isExponentialBackoffExpired(): boolean { + return this.sleepTime > this.maxSleepTime; + } + /** * @param ref - The firebaseStorage.Reference object this task came * from, untyped to avoid cyclic dependencies. @@ -111,6 +123,20 @@ export class UploadTask { this._needToFetchStatus = true; this.completeTransitions_(); } else { + const backoffExpired = this.isExponentialBackoffExpired(); + if (isRetryStatusCode(error.status, [])) { + if (backoffExpired) { + error = retryLimitExceeded(); + } else { + this.sleepTime = Math.max( + this.sleepTime * 2, + DEFAULT_MIN_SLEEP_TIME_MILLIS + ); + this._needToFetchStatus = true; + this.completeTransitions_(); + return; + } + } this._error = error; this._transition(InternalTaskState.ERROR); } @@ -124,6 +150,8 @@ export class UploadTask { this._transition(InternalTaskState.ERROR); } }; + this.sleepTime = 0; + this.maxSleepTime = this._ref.storage.maxUploadRetryTime; this._promise = new Promise((resolve, reject) => { this._resolve = resolve; this._reject = reject; @@ -163,7 +191,9 @@ export class UploadTask { // Happens if we miss the metadata on upload completion. this._fetchMetadata(); } else { - this._continueUpload(); + setTimeout(() => { + this._continueUpload(); + }, this.sleepTime); } } } @@ -283,7 +313,8 @@ export class UploadTask { requestInfo, newTextConnection, authToken, - appCheckToken + appCheckToken, + /*retry=*/ false // Upload requests should not be retried as each retry should be preceded by another query request. Which is handled in this file. ); this._request = uploadRequest; uploadRequest.getPromise().then((newStatus: ResumableUploadStatus) => { @@ -304,7 +335,7 @@ export class UploadTask { const currentSize = RESUMABLE_UPLOAD_CHUNK_SIZE * this._chunkMultiplier; // Max chunk size is 32M. - if (currentSize < 32 * 1024 * 1024) { + if (currentSize * 2 < 32 * 1024 * 1024) { this._chunkMultiplier *= 2; } } @@ -491,8 +522,9 @@ export class UploadTask { | null | ((snapshot: UploadTaskSnapshot) => unknown), error?: ((a: StorageError) => unknown) | null, - completed?: Unsubscribe | null + completed?: CompleteFn | null ): Unsubscribe | Subscribe { + // Note: `type` isn't being used. Its type is also incorrect. TaskEvent should not be a string. const observer = new Observer( (nextOrObserver as | StorageObserverInternal diff --git a/packages/storage/test/unit/requests.test.ts b/packages/storage/test/unit/requests.test.ts index 8246a507e7d..dd42b1e84d0 100644 --- a/packages/storage/test/unit/requests.test.ts +++ b/packages/storage/test/unit/requests.test.ts @@ -632,6 +632,71 @@ describe('Firebase Storage > Requests', () => { return assertBodyEquals(requestInfo.body, smallBlobString); }); + it('populates requestInfo with the upload command when more chunks are left', () => { + const url = + 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; + const requestInfo = continueResumableUpload( + locationNormal, + storageService, + url, + bigBlob, + RESUMABLE_UPLOAD_CHUNK_SIZE, + mappings + ); + assertObjectIncludes( + { + url, + method: 'POST', + urlParams: {}, + headers: { + 'X-Goog-Upload-Command': 'upload', + 'X-Goog-Upload-Offset': '0' + } + }, + requestInfo + ); + + assert.deepEqual( + requestInfo.body, + bigBlob.slice(0, RESUMABLE_UPLOAD_CHUNK_SIZE)!.uploadData() + ); + }); + it('populates requestInfo with just the finalize command command when no more data needs to be uploaded', () => { + const url = + 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; + const blobSize = bigBlob.size(); + const requestInfo = continueResumableUpload( + locationNormal, + storageService, + url, + bigBlob, + RESUMABLE_UPLOAD_CHUNK_SIZE, + mappings, + { + current: blobSize, + total: blobSize, + finalized: false, + metadata: null + } + ); + assertObjectIncludes( + { + url, + method: 'POST', + urlParams: {}, + headers: { + 'X-Goog-Upload-Command': 'finalize', + 'X-Goog-Upload-Offset': blobSize.toString() + } + }, + requestInfo + ); + + assert.deepEqual( + requestInfo.body, + bigBlob.slice(blobSize, blobSize)!.uploadData() + ); + }); it('continueResumableUpload handler', () => { const url = 'https://this.is.totally.a.real.url.com/hello/upload?whatsgoingon'; diff --git a/packages/storage/test/unit/task.test.ts b/packages/storage/test/unit/task.test.ts index cdd57152cb0..16849e13998 100644 --- a/packages/storage/test/unit/task.test.ts +++ b/packages/storage/test/unit/task.test.ts @@ -14,22 +14,35 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { assert } from 'chai'; +import { assert, expect } from 'chai'; import { FbsBlob } from '../../src/implementation/blob'; import { Location } from '../../src/implementation/location'; import { Unsubscribe } from '../../src/implementation/observer'; import { TaskEvent, TaskState } from '../../src/implementation/taskenums'; import { Reference } from '../../src/reference'; import { UploadTask } from '../../src/task'; -import { fakeServerHandler, storageServiceWithHandler } from './testshared'; +import { + fake503ForFinalizeServerHandler, + fakeOneShot503ServerHandler, + fake503ForUploadServerHandler, + fakeServerHandler, + RequestHandler, + storageServiceWithHandler +} from './testshared'; import { injectTestConnection } from '../../src/platform/connection'; +import { Deferred } from '@firebase/util'; +import { retryLimitExceeded } from '../../src/implementation/error'; +import { SinonFakeTimers, useFakeTimers } from 'sinon'; const testLocation = new Location('bucket', 'object'); const smallBlob = new FbsBlob(new Uint8Array([97])); const bigBlob = new FbsBlob(new ArrayBuffer(1024 * 1024)); describe('Firebase Storage > Upload Task', () => { - after(() => injectTestConnection(null)); + let clock: SinonFakeTimers; + after(() => { + injectTestConnection(null); + }); it('Works for a small upload w/ an observer', done => { const storageService = storageServiceWithHandler(fakeServerHandler()); @@ -151,6 +164,12 @@ describe('Firebase Storage > Upload Task', () => { }); }); + /** + * Takes a blob, uploads the blob, and tracks the events in the `events` array. It asserts in the `onComplete` callback itself. + * + * @param blob Blob to upload + * @returns resolved/rejected promise + */ function runNormalUploadTest(blob: FbsBlob): Promise { const storageService = storageServiceWithHandler(fakeServerHandler()); const task = new UploadTask( @@ -272,6 +291,7 @@ describe('Firebase Storage > Upload Task', () => { fixedAssertTrue(allTotalsTheSame); fixedAssertTrue(lastIsAll); + // serves as the cancellation task. It will cancel immediately and upon completion, will check that an error occurred const task2 = new UploadTask( new Reference(storageService, testLocation), blob @@ -284,6 +304,7 @@ describe('Firebase Storage > Upload Task', () => { TaskEvent.STATE_CHANGED, snapshot => { const state = snapshot.state; + // TODO: This status update should probably be extracted out in a helper function. if ( lastState !== TaskState.RUNNING && state === TaskState.RUNNING @@ -298,9 +319,11 @@ describe('Firebase Storage > Upload Task', () => { lastState = state; }, () => { + // TODO: These assertions should be moved down. Adding them here makes it unclear what the assertions are. events2.push('failure'); fixedAssertEquals(events2.length, 2); fixedAssertEquals(events2[0], 'resume'); + // This is not enough. Simply asserting that there was some failure doesn't validate that the *right* error was thrown. fixedAssertEquals(events2[1], 'failure'); resolve(null); }, @@ -316,6 +339,79 @@ describe('Firebase Storage > Upload Task', () => { return promise; } + enum StateType { + RESUME = 'resume', + PAUSE = 'pause', + ERROR = 'error', + COMPLETE = 'complete' + } + interface State { + type: StateType; + data?: Error; + } + interface Progress { + bytesTransferred: number; + totalBytes: number; + } + interface TotalState { + events: State[]; + progress: Progress[]; + } + + function handleStateChange( + requestHandler: RequestHandler, + blob: FbsBlob + ): Promise { + const storageService = storageServiceWithHandler(requestHandler); + const task = new UploadTask( + new Reference(storageService, testLocation), + blob + ); + + const deferred = new Deferred(); + let lastState: TaskState; + + const events: State[] = []; + const progress: Progress[] = []; + + task.on( + TaskEvent.STATE_CHANGED, + snapshot => { + const { state } = snapshot; + if (lastState !== TaskState.RUNNING && state === TaskState.RUNNING) { + events.push({ type: StateType.RESUME }); + } else if ( + lastState !== TaskState.PAUSED && + state === TaskState.PAUSED + ) { + events.push({ type: StateType.PAUSE }); + } + const p = { + bytesTransferred: snapshot.bytesTransferred, + totalBytes: snapshot.totalBytes + }; + progress.push(p); + + lastState = state; + }, + function onError(e) { + events.push({ type: StateType.ERROR, data: e }); + deferred.resolve({ + events, + progress + }); + }, + function onComplete() { + events.push({ type: StateType.COMPLETE }); + deferred.resolve({ + events, + progress + }); + } + ); + + return deferred.promise; + } it('Calls callback sequences for small uploads correctly', () => { return runNormalUploadTest(smallBlob); @@ -323,4 +419,84 @@ describe('Firebase Storage > Upload Task', () => { it('Calls callback sequences for big uploads correctly', () => { return runNormalUploadTest(bigBlob); }); + it('properly times out if large blobs returns a 503 when finalizing', async () => { + clock = useFakeTimers(); + // Kick off upload + const promise = handleStateChange( + fake503ForFinalizeServerHandler(), + bigBlob + ); + // Run all timers + await clock.runAllAsync(); + const { events, progress } = await promise; + expect(events.length).to.equal(2); + expect(events[0]).to.deep.equal({ type: 'resume' }); + expect(events[1].type).to.deep.equal('error'); + const retryLimitError = retryLimitExceeded(); + expect(events[1].data!.name).to.deep.equal(retryLimitError.name); + expect(events[1].data!.message).to.deep.equal(retryLimitError.message); + const blobSize = bigBlob.size(); + expect(progress.length).to.equal(4); + expect(progress[0]).to.deep.equal({ + bytesTransferred: 0, + totalBytes: blobSize + }); + expect(progress[1]).to.deep.equal({ + bytesTransferred: 262144, + totalBytes: blobSize + }); + // Upon continueUpload the multiplier becomes * 2, so chunkSize becomes 2 * DEFAULT_CHUNK_SIZE(256 * 1024) + expect(progress[2]).to.deep.equal({ + bytesTransferred: 786432, + totalBytes: blobSize + }); + // Chunk size is smaller here since there are less bytes left to upload than the chunkSize. + expect(progress[3]).to.deep.equal({ + bytesTransferred: 1048576, + totalBytes: blobSize + }); + clock.restore(); + }); + it('properly times out if large blobs returns a 503 when uploading', async () => { + clock = useFakeTimers(); + // Kick off upload + const promise = handleStateChange(fake503ForUploadServerHandler(), bigBlob); + // Run all timers + await clock.runAllAsync(); + const { events, progress } = await promise; + expect(events.length).to.equal(2); + expect(events[0]).to.deep.equal({ type: 'resume' }); + expect(events[1].type).to.deep.equal('error'); + const retryLimitError = retryLimitExceeded(); + expect(events[1].data!.name).to.deep.equal(retryLimitError.name); + expect(events[1].data!.message).to.deep.equal(retryLimitError.message); + const blobSize = bigBlob.size(); + expect(progress.length).to.equal(1); + expect(progress[0]).to.deep.equal({ + bytesTransferred: 0, + totalBytes: blobSize + }); + clock.restore(); + }); + it('tests if small requests that respond with 500 retry correctly', async () => { + clock = useFakeTimers(); + // Kick off upload + const promise = handleStateChange(fakeOneShot503ServerHandler(), smallBlob); + // Run all timers + await clock.runAllAsync(); + const { events, progress } = await promise; + expect(events.length).to.equal(2); + expect(events[0]).to.deep.equal({ type: 'resume' }); + expect(events[1].type).to.deep.equal('error'); + const retryLimitError = retryLimitExceeded(); + expect(events[1].data!.name).to.deep.equal(retryLimitError.name); + expect(events[1].data!.message).to.deep.equal(retryLimitError.message); + const blobSize = smallBlob.size(); + expect(progress.length).to.equal(1); + expect(progress[0]).to.deep.equal({ + bytesTransferred: 0, + totalBytes: blobSize + }); + clock.restore(); + }); }); diff --git a/packages/storage/test/unit/testshared.ts b/packages/storage/test/unit/testshared.ts index 1aa6a53e32a..c6b07f62c32 100644 --- a/packages/storage/test/unit/testshared.ts +++ b/packages/storage/test/unit/testshared.ts @@ -197,7 +197,7 @@ interface Response { body: string; headers: Headers; } -type RequestHandler = ( +export type RequestHandler = ( url: string, method: string, body?: ArrayBufferView | Blob | string | null, @@ -353,3 +353,232 @@ export function fakeServerHandler( } return handler; } + +/** + * Responds with a 503 for finalize. + * @param fakeMetadata metadata to respond with for query + * @returns a handler for requests + */ +export function fake503ForFinalizeServerHandler( + fakeMetadata: Partial = defaultFakeMetadata +): RequestHandler { + const stats: { + [num: number]: { + currentSize: number; + finalSize: number; + }; + } = {}; + + let nextId: number = 0; + + function statusHeaders(status: string, existing?: Headers): Headers { + if (existing) { + existing['X-Goog-Upload-Status'] = status; + return existing; + } else { + return { 'X-Goog-Upload-Status': status }; + } + } + + function handler( + url: string, + method: string, + content?: ArrayBufferView | Blob | string | null, + headers?: Headers + ): Response { + method = method || 'GET'; + content = content || ''; + headers = headers || {}; + + // const contentLength = + // (content as Blob).size || (content as string).length || 0; + if (headers['X-Goog-Upload-Protocol'] === 'resumable') { + const thisId = nextId; + nextId++; + stats[thisId] = { + currentSize: 0, + finalSize: +headers['X-Goog-Upload-Header-Content-Length'] + }; + + return { + status: 200, + body: '', + headers: statusHeaders('active', { + 'X-Goog-Upload-URL': 'http://example.com?' + thisId + }) + }; + } + + const matches = url.match(/^http:\/\/example\.com\?([0-9]+)$/); + if (matches === null) { + return { status: 400, body: '', headers: {} }; + } + + const id = +matches[1]; + if (!stats[id]) { + return { status: 400, body: 'Invalid upload id', headers: {} }; + } + + if (headers['X-Goog-Upload-Command'] === 'query') { + return { + status: 200, + body: '', + headers: statusHeaders('active', { + 'X-Goog-Upload-Size-Received': stats[id].finalSize.toString() + }) + }; + } + + const commands = (headers['X-Goog-Upload-Command'] as string) + .split(',') + .map(str => { + return str.trim(); + }); + const isFinalize = commands.indexOf('finalize') !== -1; + + if (isFinalize) { + return { + status: 503, + body: JSON.stringify(fakeMetadata), + headers: statusHeaders('final') + }; + } else { + return { + status: 200, + body: JSON.stringify(fakeMetadata), + headers: statusHeaders('active') + }; + } + } + return handler; +} + +/** + * Responds with a 503 for upload. + * @param fakeMetadata metadata to respond with for query + * @returns a handler for requests + */ +export function fake503ForUploadServerHandler( + fakeMetadata: Partial = defaultFakeMetadata +): RequestHandler { + const stats: { + [num: number]: { + currentSize: number; + finalSize: number; + }; + } = {}; + + let nextId: number = 0; + + function statusHeaders(status: string, existing?: Headers): Headers { + if (existing) { + existing['X-Goog-Upload-Status'] = status; + return existing; + } else { + return { 'X-Goog-Upload-Status': status }; + } + } + + function handler( + url: string, + method: string, + content?: ArrayBufferView | Blob | string | null, + headers?: Headers + ): Response { + method = method || 'GET'; + content = content || ''; + headers = headers || {}; + + // const contentLength = + // (content as Blob).size || (content as string).length || 0; + if (headers['X-Goog-Upload-Protocol'] === 'resumable') { + const thisId = nextId; + nextId++; + stats[thisId] = { + currentSize: 0, + finalSize: +headers['X-Goog-Upload-Header-Content-Length'] + }; + + return { + status: 200, + body: '', + headers: statusHeaders('active', { + 'X-Goog-Upload-URL': 'http://example.com?' + thisId + }) + }; + } + + const matches = url.match(/^http:\/\/example\.com\?([0-9]+)$/); + if (matches === null) { + return { status: 400, body: '', headers: {} }; + } + + const id = +matches[1]; + if (!stats[id]) { + return { status: 400, body: 'Invalid upload id', headers: {} }; + } + + if (headers['X-Goog-Upload-Command'] === 'query') { + return { + status: 200, + body: '', + headers: statusHeaders('active', { + 'X-Goog-Upload-Size-Received': stats[id].currentSize.toString() + }) + }; + } + + const commands = (headers['X-Goog-Upload-Command'] as string) + .split(',') + .map(str => { + return str.trim(); + }); + const isUpload = commands.indexOf('upload') !== -1; + + if (isUpload) { + return { + status: 503, + body: JSON.stringify(fakeMetadata), + headers: statusHeaders('active') + }; + } else { + return { + status: 200, + body: JSON.stringify(fakeMetadata), + headers: statusHeaders('final') + }; + } + } + return handler; +} + +export function fakeOneShot503ServerHandler( + fakeMetadata: Partial = defaultFakeMetadata +): RequestHandler { + function statusHeaders(status: string, existing?: Headers): Headers { + if (existing) { + existing['X-Goog-Upload-Status'] = status; + return existing; + } else { + return { 'X-Goog-Upload-Status': status }; + } + } + + function handler( + url: string, + method: string, + content?: ArrayBufferView | Blob | string | null, + headers?: Headers + ): Response { + method = method || 'GET'; + content = content || ''; + headers = headers || {}; + + return { + status: 503, + body: JSON.stringify(fakeMetadata), + headers: statusHeaders('final') + }; + } + return handler; +} diff --git a/packages/storage/test/unit/utils.test.ts b/packages/storage/test/unit/utils.test.ts new file mode 100644 index 00000000000..e02393d13f9 --- /dev/null +++ b/packages/storage/test/unit/utils.test.ts @@ -0,0 +1,40 @@ +/** + * @license + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { expect } from 'chai'; +import { isRetryStatusCode } from '../../src/implementation/utils'; + +describe('Storage Utils', () => { + it('does not deem 400 as a retry status code', () => { + const isRetry = isRetryStatusCode(400, []); + expect(isRetry).to.be.false; + }); + it('deems extra retry codes as retryable ', () => { + let isRetry = isRetryStatusCode(408, []); + expect(isRetry).to.be.true; + isRetry = isRetryStatusCode(429, []); + expect(isRetry).to.be.true; + }); + it('deems error codes beyond 500 as retryable', () => { + const isRetry = isRetryStatusCode(503, []); + expect(isRetry).to.be.true; + }); + it('deems additional error codes as retryable', () => { + const isRetry = isRetryStatusCode(400, [400]); + expect(isRetry).to.be.true; + }); +});