From fb733b79ac2159710a326ab0c05c8e5681484862 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 1 May 2017 09:14:35 -0700 Subject: [PATCH 1/2] Adding support for resumable uploads of unknown size. --- google/resumable_media/_upload.py | 152 +++++++++++++++++--- google/resumable_media/requests/__init__.py | 1 + google/resumable_media/requests/upload.py | 28 +++- tests/unit/test__upload.py | 132 ++++++++++++++--- 4 files changed, 272 insertions(+), 41 deletions(-) diff --git a/google/resumable_media/_upload.py b/google/resumable_media/_upload.py index 74a23fa1..3bbe0843 100644 --- a/google/resumable_media/_upload.py +++ b/google/resumable_media/_upload.py @@ -38,6 +38,8 @@ _CONTENT_TYPE_HEADER = u'content-type' _CONTENT_RANGE_TEMPLATE = u'bytes {:d}-{:d}/{:d}' +_RANGE_UNKNOWN_TEMPLATE = u'bytes {:d}-{:d}/*' +_EMPTY_RANGE_TEMPLATE = u'bytes */{:d}' _BOUNDARY_WIDTH = len(repr(sys.maxsize - 1)) _BOUNDARY_FORMAT = u'==============={{:0{:d}d}}=='.format(_BOUNDARY_WIDTH) _MULTIPART_SEP = b'--' @@ -51,6 +53,9 @@ u'Bytes stream is in unexpected state. ' u'The local stream has had {:d} bytes read from it while ' u'{:d} bytes have already been updated (they should match).') +_STREAM_READ_PAST_TEMPLATE = ( + u'{:d} bytes have been read from the stream, which exceeds ' + u'the expected total {:d}.') _POST = u'POST' _PUT = u'PUT' @@ -127,6 +132,18 @@ def _get_headers(response): """ raise NotImplementedError(u'This implementation is virtual.') + @staticmethod + def _get_body(response): + """Access the response body from an HTTP response. + + Args: + response (object): The HTTP response object. + + Raises: + NotImplementedError: Always, since virtual. + """ + raise NotImplementedError(u'This implementation is virtual.') + class SimpleUpload(UploadBase): """Upload a resource to a Google API. @@ -336,10 +353,20 @@ def bytes_uploaded(self): @property def total_bytes(self): - """Optional[int]: The total number of bytes to be uploaded.""" + """Optional[int]: The total number of bytes to be uploaded. + + If this upload is initiated (via :meth:`initiate`) with + ``stream_final=True``, this value will be populated based on the size + of the ``stream`` being uploaded. (By default ``stream_final=True``.) + + If this upload is initiated with ``stream_final=False``, + :attr:`total_bytes` will be :data:`None` since it cannot be + determined from the stream. + """ return self._total_bytes - def _prepare_initiate_request(self, stream, metadata, content_type): + def _prepare_initiate_request(self, stream, metadata, content_type, + total_bytes=None, stream_final=True): """Prepare the contents of HTTP request to initiate upload. This is everything that must be done before a request that doesn't @@ -354,6 +381,13 @@ def _prepare_initiate_request(self, stream, metadata, content_type): ACL list. content_type (str): The content type of the resource, e.g. a JPEG image has content type ``image/jpeg``. + total_bytes (Optional[int]): The total number of bytes to be + uploaded. If specified, the upload size **will not** be + determined from the stream (even if ``stream_final=True``). + stream_final (Optional[bool]): Indicates if the ``stream`` is + "final" (i.e. no more bytes will be added to it). In this case + we determine the upload size from the size of the stream. If + ``total_bytes`` is passed, this argument will be ignored. Returns: Tuple[str, str, bytes, Mapping[str, str]]: The quadruple @@ -376,12 +410,20 @@ def _prepare_initiate_request(self, stream, metadata, content_type): self._stream = stream self._content_type = content_type - self._total_bytes = get_total_bytes(stream) headers = { _CONTENT_TYPE_HEADER: u'application/json; charset=UTF-8', u'x-upload-content-type': content_type, - u'x-upload-content-length': u'{:d}'.format(self._total_bytes), } + # Set the total bytes if possible. + if total_bytes is not None: + self._total_bytes = total_bytes + elif stream_final: + self._total_bytes = get_total_bytes(stream) + # Add the total bytes to the headers if set. + if self._total_bytes is not None: + content_length = u'{:d}'.format(self._total_bytes) + headers[u'x-upload-content-length'] = content_length + headers.update(self._headers) payload = json.dumps(metadata).encode(u'utf-8') return _POST, self.upload_url, payload, headers @@ -405,9 +447,21 @@ def _process_initiate_response(self, response): self._resumable_url = _helpers.header_required( response, u'location', self._get_headers) - def initiate(self, transport, stream, metadata, content_type): + def initiate(self, transport, stream, metadata, content_type, + total_bytes=None, stream_final=True): """Initiate a resumable upload. + By default, this method assumes your ``stream`` is in a "final" + state ready to transmit. However, ``stream_final=False`` can be used + to indicate that the size of the resource is not known. This can happen + if bytes are being dynamically fed into ``stream``, e.g. if the stream + is attached to application logs. + + If ``stream_final=False`` is used, :attr:`chunk_size` bytes will be + read from the stream every time :meth:`transmit_next_chunk` is called. + If one of those reads produces strictly fewer bites than the chunk + size, the upload will be concluded. + Args: transport (object): An object which can make authenticated requests. @@ -418,6 +472,13 @@ def initiate(self, transport, stream, metadata, content_type): ACL list. content_type (str): The content type of the resource, e.g. a JPEG image has content type ``image/jpeg``. + total_bytes (Optional[int]): The total number of bytes to be + uploaded. If specified, the upload size **will not** be + determined from the stream (even if ``stream_final=True``). + stream_final (Optional[bool]): Indicates if the ``stream`` is + "final" (i.e. no more bytes will be added to it). In this case + we determine the upload size from the size of the stream. If + ``total_bytes`` is passed, this argument will be ignored. Raises: NotImplementedError: Always, since virtual. @@ -464,15 +525,13 @@ def _prepare_request(self): u'This upload has not been initiated. Please call ' u'initiate() before beginning to transmit chunks.') - start_byte, end_byte, payload = get_next_chunk( - self._stream, self._chunk_size) + start_byte, end_byte, payload, content_range = get_next_chunk( + self._stream, self._chunk_size, self._total_bytes) if start_byte != self.bytes_uploaded: msg = _STREAM_ERROR_TEMPLATE.format( start_byte, self.bytes_uploaded) raise ValueError(msg) - content_range = _CONTENT_RANGE_TEMPLATE.format( - start_byte, end_byte, self._total_bytes) headers = { _CONTENT_TYPE_HEADER: self._content_type, _helpers.CONTENT_RANGE_HEADER: content_range, @@ -511,7 +570,8 @@ def _process_response(self, response): response, (http_client.OK, resumable_media.PERMANENT_REDIRECT), self._get_status_code, callback=self._make_invalid) if status_code == http_client.OK: - self._bytes_uploaded = self._total_bytes + json_response = json.loads(self._get_body(response)) + self._bytes_uploaded = int(json_response[u'size']) # Tombstone the current upload so it cannot be used again. self._finished = True else: @@ -529,6 +589,11 @@ def _process_response(self, response): def transmit_next_chunk(self, transport): """Transmit the next chunk of the resource to be uploaded. + If the current upload was initiated with ``stream_final=False``, + this method will dynamically determine if the upload has completed. + The upload will be considered complete if the stream produces + fewer than :attr:`chunk_size` bytes when a chunk is read from it. + Args: transport (object): An object which can make authenticated requests. @@ -695,7 +760,7 @@ def get_total_bytes(stream): return end_position -def get_next_chunk(stream, chunk_size): +def get_next_chunk(stream, chunk_size, total_bytes): """Get a chunk from an I/O stream. The ``stream`` may have fewer bytes remaining than ``chunk_size`` @@ -703,21 +768,72 @@ def get_next_chunk(stream, chunk_size): ``end_byte == start_byte + chunk_size - 1``. Args: - stream (IO[bytes]): The stream (i.e. file-like object). + stream (IO[bytes]): The stream (i.e. file-like object). + chunk_size (int): The size of the chunk to be read from the ``stream``. + total_bytes (Optional[int]): The (expected) total number of bytes + in the ``stream``. Returns: - Tuple[int, int, bytes]: Triple of the start byte index, the end byte - index and the content in between those bytes. + Tuple[int, int, bytes, str]: Quadruple of: + + * the start byte index + * the end byte index + * the content in between the start and end bytes (inclusive) + * content range header for the chunk (slice) that has been read Raises: ValueError: If there is no data left to consume. This corresponds exactly to the case ``end_byte < start_byte``, which can only occur if ``end_byte == start_byte - 1``. + ValueError: If the stream has been read past ``total_bytes`` (this + is in the case that ``total_bytes`` is not :data:`None`). """ start_byte = stream.tell() payload = stream.read(chunk_size) end_byte = stream.tell() - 1 - if end_byte < start_byte: - raise ValueError( - u'Stream is already exhausted. There is no content remaining.') - return start_byte, end_byte, payload + + num_bytes_read = len(payload) + if total_bytes is None: + if num_bytes_read < chunk_size: + # We now **KNOW** the total number of bytes. + total_bytes = end_byte + 1 + else: + if num_bytes_read == 0: + raise ValueError( + u'Stream is already exhausted. There is no content remaining.') + + if end_byte >= total_bytes: + msg = _STREAM_READ_PAST_TEMPLATE.format(end_byte + 1, total_bytes) + raise ValueError(msg) + + content_range = get_content_range(start_byte, end_byte, total_bytes) + return start_byte, end_byte, payload, content_range + + +def get_content_range(start_byte, end_byte, total_bytes): + """Convert start, end and total into content range header. + + If ``total_bytes`` is not known, uses "bytes {start}-{end}/*". + If we are dealing with an empty range (i.e. ``end_byte < start_byte``) + then "bytes */{total}" is used. + + This function **ASSUMES** that if the size is not known, the caller will + not also pass an empty range. + + Args: + start_byte (int): The start (inclusive) of the byte range. + end_byte (int): The end (inclusive) of the byte range. + total_bytes (Optional[int]): The number of bytes in the byte + range (if known). + + Returns: + str: The content range header. + """ + if total_bytes is None: + return _RANGE_UNKNOWN_TEMPLATE.format( + start_byte, end_byte) + elif end_byte < start_byte: + return _EMPTY_RANGE_TEMPLATE.format(total_bytes) + else: + return _CONTENT_RANGE_TEMPLATE.format( + start_byte, end_byte, total_bytes) diff --git a/google/resumable_media/requests/__init__.py b/google/resumable_media/requests/__init__.py index 64004b2b..1258973d 100644 --- a/google/resumable_media/requests/__init__.py +++ b/google/resumable_media/requests/__init__.py @@ -617,6 +617,7 @@ def dont_sleep(seconds): payload = { u'bucket': bucket, u'name': blob_name, + u'size': u'{:d}'.format(len(data)), } fake_response2._content = json.dumps(payload).encode(u'utf-8') diff --git a/google/resumable_media/requests/upload.py b/google/resumable_media/requests/upload.py index 235c6c5e..54bca79e 100644 --- a/google/resumable_media/requests/upload.py +++ b/google/resumable_media/requests/upload.py @@ -121,9 +121,21 @@ class ResumableUpload(_helpers.RequestsMixin, _upload.ResumableUpload): :data:`.UPLOAD_CHUNK_SIZE`. """ - def initiate(self, transport, stream, metadata, content_type): + def initiate(self, transport, stream, metadata, content_type, + total_bytes=None, stream_final=True): """Initiate a resumable upload. + By default, this method assumes your ``stream`` is in a "final" + state ready to transmit. However, ``stream_final=False`` can be used + to indicate that the size of the resource is not known. This can happen + if bytes are being dynamically fed into ``stream``, e.g. if the stream + is attached to application logs. + + If ``stream_final=False`` is used, :attr:`chunk_size` bytes will be + read from the stream every time :meth:`transmit_next_chunk` is called. + If one of those reads produces strictly fewer bites than the chunk + size, the upload will be concluded. + Args: transport (~requests.Session): A ``requests`` object which can make authenticated requests. @@ -134,12 +146,19 @@ def initiate(self, transport, stream, metadata, content_type): ACL list. content_type (str): The content type of the resource, e.g. a JPEG image has content type ``image/jpeg``. + total_bytes (Optional[int]): The total number of bytes to be + uploaded. If specified, the upload size **will not** be + determined from the stream (even if ``stream_final=True``). + stream_final (Optional[bool]): Indicates if the ``stream`` is + "final" (i.e. no more bytes will be added to it). In this case + we determine the upload size from the size of the stream. If + ``total_bytes`` is passed, this argument will be ignored. Returns: ~requests.Response: The HTTP response returned by ``transport``. """ method, url, payload, headers = self._prepare_initiate_request( - stream, metadata, content_type) + stream, metadata, content_type, stream_final=stream_final) result = _helpers.http_request( transport, method, url, data=payload, headers=headers) self._process_initiate_response(result) @@ -148,6 +167,11 @@ def initiate(self, transport, stream, metadata, content_type): def transmit_next_chunk(self, transport): """Transmit the next chunk of the resource to be uploaded. + If the current upload was initiated with ``stream_final=False``, + this method will dynamically determine if the upload has completed. + The upload will be considered complete if the stream produces + fewer than :attr:`chunk_size` bytes when a chunk is read from it. + In the case of failure, an exception is thrown that preserves the failed response: diff --git a/tests/unit/test__upload.py b/tests/unit/test__upload.py index 970b226e..f7d2a18d 100644 --- a/tests/unit/test__upload.py +++ b/tests/unit/test__upload.py @@ -110,6 +110,12 @@ def test__get_headers(self): exc_info.match(u'virtual') + def test__get_body(self): + with pytest.raises(NotImplementedError) as exc_info: + _upload.UploadBase._get_body(None) + + exc_info.match(u'virtual') + class TestSimpleUpload(object): @@ -316,7 +322,8 @@ def test_total_bytes_property(self): upload._total_bytes = 8192 assert upload.total_bytes == 8192 - def _prepare_initiate_request_helper(self, upload_headers=None): + def _prepare_initiate_request_helper(self, upload_headers=None, + **method_kwargs): data = b'some really big big data.' stream = io.BytesIO(data) metadata = {u'name': u'big-data-file.txt'} @@ -330,12 +337,15 @@ def _prepare_initiate_request_helper(self, upload_headers=None): assert upload._total_bytes is None # Call the method and check the output. method, url, payload, headers = upload._prepare_initiate_request( - stream, metadata, BASIC_CONTENT) + stream, metadata, BASIC_CONTENT, **method_kwargs) assert payload == b'{"name": "big-data-file.txt"}' # Make sure the ``upload``-s state was updated. assert upload._stream == stream assert upload._content_type == BASIC_CONTENT - assert upload._total_bytes == len(data) + if method_kwargs == {u'stream_final': False}: + assert upload._total_bytes is None + else: + assert upload._total_bytes == len(data) # Make sure headers are untouched. assert headers is not upload._headers assert upload._headers == orig_headers @@ -368,6 +378,27 @@ def test__prepare_initiate_request_with_headers(self): } assert new_headers == expected_headers + def test__prepare_initiate_request_known_size(self): + total_bytes = 25 + data, headers = self._prepare_initiate_request_helper( + total_bytes=total_bytes) + assert len(data) == total_bytes + expected_headers = { + u'content-type': u'application/json; charset=UTF-8', + u'x-upload-content-length': u'{:d}'.format(total_bytes), + u'x-upload-content-type': BASIC_CONTENT, + } + assert headers == expected_headers + + def test__prepare_initiate_request_unknown_size(self): + _, headers = self._prepare_initiate_request_helper( + stream_final=False) + expected_headers = { + u'content-type': u'application/json; charset=UTF-8', + u'x-upload-content-type': BASIC_CONTENT, + } + assert headers == expected_headers + def test__prepare_initiate_request_already_initiated(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) # Fake that the upload has been started. @@ -425,6 +456,7 @@ def test_initiate(self): def test__prepare_request_already_finished(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) + assert not upload.invalid upload._finished = True with pytest.raises(ValueError) as exc_info: upload._prepare_request() @@ -443,7 +475,8 @@ def test__prepare_request_invalid(self): def test__prepare_request_not_initiated(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) - assert not upload._finished + assert not upload.finished + assert not upload.invalid assert upload._resumable_url is None with pytest.raises(ValueError) as exc_info: upload._prepare_request() @@ -536,15 +569,18 @@ def test__process_response_success(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) _fix_up_virtual(upload) - upload._total_bytes = mock.sentinel.total_bytes + total_bytes = 158 + response_body = u'{{"size": "{:d}"}}'.format(total_bytes) # Check status before. assert upload._bytes_uploaded == 0 assert not upload._finished - response = _make_response() + response = mock.Mock( + content=response_body, status_code=http_client.OK, + spec=[u'content', u'status_code']) ret_val = upload._process_response(response) assert ret_val is None # Check status after. - assert upload._bytes_uploaded is mock.sentinel.total_bytes + assert upload._bytes_uploaded == total_bytes assert upload._finished def test__process_response_partial_no_range(self): @@ -806,26 +842,75 @@ def test_get_total_bytes(): class Test_get_next_chunk(object): - def test_exhausted(self): + def test_exhausted_known_size(self): data = b'the end' stream = io.BytesIO(data) stream.seek(len(data)) - with pytest.raises(ValueError): - _upload.get_next_chunk(stream, 1) + with pytest.raises(ValueError) as exc_info: + _upload.get_next_chunk(stream, 1, len(data)) + + exc_info.match( + u'Stream is already exhausted. There is no content remaining.') + + def test_read_past_known_size(self): + data = b'more content than we expected' + stream = io.BytesIO(data) + chunk_size = len(data) + total_bytes = chunk_size - 3 + + with pytest.raises(ValueError) as exc_info: + _upload.get_next_chunk(stream, chunk_size, total_bytes) + + exc_info.match(u'bytes have been read from the stream') + exc_info.match(u'exceeds the expected total') - def test_success(self): - stream = io.BytesIO(b'0123456789') + def test_success_known_size(self): + data = b'0123456789' + stream = io.BytesIO(data) + total_bytes = len(data) chunk_size = 3 # Splits into 4 chunks: 012, 345, 678, 9 - result0 = _upload.get_next_chunk(stream, chunk_size) - result1 = _upload.get_next_chunk(stream, chunk_size) - result2 = _upload.get_next_chunk(stream, chunk_size) - result3 = _upload.get_next_chunk(stream, chunk_size) - assert result0 == (0, 2, b'012') - assert result1 == (3, 5, b'345') - assert result2 == (6, 8, b'678') - assert result3 == (9, 9, b'9') - assert stream.tell() == 10 + result0 = _upload.get_next_chunk(stream, chunk_size, total_bytes) + result1 = _upload.get_next_chunk(stream, chunk_size, total_bytes) + result2 = _upload.get_next_chunk(stream, chunk_size, total_bytes) + result3 = _upload.get_next_chunk(stream, chunk_size, total_bytes) + assert result0 == (0, 2, b'012', u'bytes 0-2/10') + assert result1 == (3, 5, b'345', u'bytes 3-5/10') + assert result2 == (6, 8, b'678', u'bytes 6-8/10') + assert result3 == (9, 9, b'9', u'bytes 9-9/10') + assert stream.tell() == total_bytes + + def test_success_unknown_size(self): + data = b'abcdefghij' + stream = io.BytesIO(data) + chunk_size = 6 + # Splits into 4 chunks: abcdef, ghij + result0 = _upload.get_next_chunk(stream, chunk_size, None) + result1 = _upload.get_next_chunk(stream, chunk_size, None) + assert result0 == (0, chunk_size - 1, b'abcdef', u'bytes 0-5/*') + assert result1 == (chunk_size, len(data) - 1, b'ghij', u'bytes 6-9/10') + assert stream.tell() == len(data) + + # Do the same when the chunk size evenly divides len(data) + stream.seek(0) + chunk_size = len(data) + # Splits into 2 chunks: `data` and empty string + result0 = _upload.get_next_chunk(stream, chunk_size, None) + result1 = _upload.get_next_chunk(stream, chunk_size, None) + assert result0 == (0, len(data) - 1, data, u'bytes 0-9/*') + assert result1 == (len(data), len(data) - 1, b'', u'bytes */10') + assert stream.tell() == len(data) + + +class Test_get_content_range(object): + + def test_known_size(self): + result = _upload.get_content_range(5, 10, 40) + assert result == u'bytes 5-10/40' + + def test_unknown_size(self): + result = _upload.get_content_range(1000, 10000, None) + assert result == u'bytes 1000-10000/*' def _make_response(status_code=http_client.OK, headers=None): @@ -843,6 +928,11 @@ def _get_headers(response): return response.headers +def _get_body(response): + return response.content + + def _fix_up_virtual(upload): upload._get_status_code = _get_status_code upload._get_headers = _get_headers + upload._get_body = _get_body From 412acefc91778367c0b1b3127361b7af7370d4fd Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 1 May 2017 09:15:09 -0700 Subject: [PATCH 2/2] Updating docs after adding support for unknown size upload. --- .../google.resumable_media.requests.html | 28 ++++++++++++++++++- ...oogle.resumable_media.requests.upload.html | 28 ++++++++++++++++++- docs/latest/searchindex.js | 2 +- 3 files changed, 55 insertions(+), 3 deletions(-) diff --git a/docs/latest/google.resumable_media.requests.html b/docs/latest/google.resumable_media.requests.html index 87c24c32..e994d61f 100644 --- a/docs/latest/google.resumable_media.requests.html +++ b/docs/latest/google.resumable_media.requests.html @@ -808,8 +808,17 @@

Resumable Uploads
-initiate(transport, stream, metadata, content_type)
+initiate(transport, stream, metadata, content_type, total_bytes=None, stream_final=True)

Initiate a resumable upload.

+

By default, this method assumes your stream is in a “final” +state ready to transmit. However, stream_final=False can be used +to indicate that the size of the resource is not known. This can happen +if bytes are being dynamically fed into stream, e.g. if the stream +is attached to application logs.

+

If stream_final=False is used, chunk_size bytes will be +read from the stream every time transmit_next_chunk() is called. +If one of those reads produces strictly fewer bites than the chunk +size, the upload will be concluded.

@@ -824,6 +833,13 @@

Resumable Uploadsstr) – The content type of the resource, e.g. a JPEG image has content type image/jpeg. +
  • total_bytes (Optional [ int ]) – The total number of bytes to be +uploaded. If specified, the upload size will not be +determined from the stream (even if stream_final=True).
  • +
  • stream_final (Optional [ bool ]) – Indicates if the stream is +“final” (i.e. no more bytes will be added to it). In this case +we determine the upload size from the size of the stream. If +total_bytes is passed, this argument will be ignored.
  • @@ -879,12 +895,22 @@

    Resumable Uploads total_bytes

    Optional [ int ] – The total number of bytes to be uploaded.

    +

    If this upload is initiated (via initiate()) with +stream_final=True, this value will be populated based on the size +of the stream being uploaded. (By default stream_final=True.)

    +

    If this upload is initiated with stream_final=False, +total_bytes will be None since it cannot be +determined from the stream.

    transmit_next_chunk(transport)

    Transmit the next chunk of the resource to be uploaded.

    +

    If the current upload was initiated with stream_final=False, +this method will dynamically determine if the upload has completed. +The upload will be considered complete if the stream produces +fewer than chunk_size bytes when a chunk is read from it.

    In the case of failure, an exception is thrown that preserves the failed response:

    >>> error = None
    diff --git a/docs/latest/google.resumable_media.requests.upload.html b/docs/latest/google.resumable_media.requests.upload.html
    index dda4c58d..8046a4aa 100644
    --- a/docs/latest/google.resumable_media.requests.upload.html
    +++ b/docs/latest/google.resumable_media.requests.upload.html
    @@ -295,8 +295,17 @@
     
     
    -initiate(transport, stream, metadata, content_type)
    +initiate(transport, stream, metadata, content_type, total_bytes=None, stream_final=True)

    Initiate a resumable upload.

    +

    By default, this method assumes your stream is in a “final” +state ready to transmit. However, stream_final=False can be used +to indicate that the size of the resource is not known. This can happen +if bytes are being dynamically fed into stream, e.g. if the stream +is attached to application logs.

    +

    If stream_final=False is used, chunk_size bytes will be +read from the stream every time transmit_next_chunk() is called. +If one of those reads produces strictly fewer bites than the chunk +size, the upload will be concluded.

    @@ -311,6 +320,13 @@ ACL list.
  • content_type (str) – The content type of the resource, e.g. a JPEG image has content type image/jpeg.
  • +
  • total_bytes (Optional [ int ]) – The total number of bytes to be +uploaded. If specified, the upload size will not be +determined from the stream (even if stream_final=True).
  • +
  • stream_final (Optional [ bool ]) – Indicates if the stream is +“final” (i.e. no more bytes will be added to it). In this case +we determine the upload size from the size of the stream. If +total_bytes is passed, this argument will be ignored.
  • @@ -366,12 +382,22 @@
    total_bytes

    Optional [ int ] – The total number of bytes to be uploaded.

    +

    If this upload is initiated (via initiate()) with +stream_final=True, this value will be populated based on the size +of the stream being uploaded. (By default stream_final=True.)

    +

    If this upload is initiated with stream_final=False, +total_bytes will be None since it cannot be +determined from the stream.

    transmit_next_chunk(transport)

    Transmit the next chunk of the resource to be uploaded.

    +

    If the current upload was initiated with stream_final=False, +this method will dynamically determine if the upload has completed. +The upload will be considered complete if the stream produces +fewer than chunk_size bytes when a chunk is read from it.

    In the case of failure, an exception is thrown that preserves the failed response:

    >>> error = None
    diff --git a/docs/latest/searchindex.js b/docs/latest/searchindex.js
    index e9459c10..da8692e2 100644
    --- a/docs/latest/searchindex.js
    +++ b/docs/latest/searchindex.js
    @@ -1 +1 @@
    -Search.setIndex({docnames:["google.resumable_media.constants","google.resumable_media.exceptions","google.resumable_media.requests","google.resumable_media.requests.download","google.resumable_media.requests.upload","index"],envversion:51,filenames:["google.resumable_media.constants.rst","google.resumable_media.exceptions.rst","google.resumable_media.requests.rst","google.resumable_media.requests.download.rst","google.resumable_media.requests.upload.rst","index.rst"],objects:{"google.resumable_media":{constants:[0,0,0,"-"],exceptions:[1,0,0,"-"],requests:[2,0,0,"-"]},"google.resumable_media.constants":{PERMANENT_REDIRECT:[0,1,1,""],TOO_MANY_REQUESTS:[0,1,1,""],UPLOAD_CHUNK_SIZE:[0,1,1,""]},"google.resumable_media.exceptions":{InvalidResponse:[1,2,1,""]},"google.resumable_media.exceptions.InvalidResponse":{args:[1,3,1,""],response:[1,3,1,""],with_traceback:[1,4,1,""]},"google.resumable_media.requests":{ChunkedDownload:[2,5,1,""],Download:[2,5,1,""],MultipartUpload:[2,5,1,""],ResumableUpload:[2,5,1,""],SimpleUpload:[2,5,1,""],download:[3,0,0,"-"],upload:[4,0,0,"-"]},"google.resumable_media.requests.ChunkedDownload":{bytes_downloaded:[2,3,1,""],chunk_size:[2,3,1,""],consume_next_chunk:[2,4,1,""],end:[2,3,1,""],finished:[2,3,1,""],invalid:[2,3,1,""],media_url:[2,3,1,""],start:[2,3,1,""],total_bytes:[2,3,1,""]},"google.resumable_media.requests.Download":{consume:[2,4,1,""],end:[2,3,1,""],finished:[2,3,1,""],media_url:[2,3,1,""],start:[2,3,1,""]},"google.resumable_media.requests.MultipartUpload":{finished:[2,3,1,""],transmit:[2,4,1,""],upload_url:[2,3,1,""]},"google.resumable_media.requests.ResumableUpload":{bytes_uploaded:[2,3,1,""],chunk_size:[2,3,1,""],finished:[2,3,1,""],initiate:[2,4,1,""],invalid:[2,3,1,""],recover:[2,4,1,""],resumable_url:[2,3,1,""],total_bytes:[2,3,1,""],transmit_next_chunk:[2,4,1,""],upload_url:[2,3,1,""]},"google.resumable_media.requests.SimpleUpload":{finished:[2,3,1,""],transmit:[2,4,1,""],upload_url:[2,3,1,""]},"google.resumable_media.requests.download":{ChunkedDownload:[3,5,1,""],Download:[3,5,1,""]},"google.resumable_media.requests.download.ChunkedDownload":{bytes_downloaded:[3,3,1,""],chunk_size:[3,3,1,""],consume_next_chunk:[3,4,1,""],end:[3,3,1,""],finished:[3,3,1,""],invalid:[3,3,1,""],media_url:[3,3,1,""],start:[3,3,1,""],total_bytes:[3,3,1,""]},"google.resumable_media.requests.download.Download":{consume:[3,4,1,""],end:[3,3,1,""],finished:[3,3,1,""],media_url:[3,3,1,""],start:[3,3,1,""]},"google.resumable_media.requests.upload":{MultipartUpload:[4,5,1,""],ResumableUpload:[4,5,1,""],SimpleUpload:[4,5,1,""]},"google.resumable_media.requests.upload.MultipartUpload":{finished:[4,3,1,""],transmit:[4,4,1,""],upload_url:[4,3,1,""]},"google.resumable_media.requests.upload.ResumableUpload":{bytes_uploaded:[4,3,1,""],chunk_size:[4,3,1,""],finished:[4,3,1,""],initiate:[4,4,1,""],invalid:[4,3,1,""],recover:[4,4,1,""],resumable_url:[4,3,1,""],total_bytes:[4,3,1,""],transmit_next_chunk:[4,4,1,""],upload_url:[4,3,1,""]},"google.resumable_media.requests.upload.SimpleUpload":{finished:[4,3,1,""],transmit:[4,4,1,""],upload_url:[4,3,1,""]},google:{resumable_media:[5,0,0,"-"]}},objnames:{"0":["py","module","Python module"],"1":["py","data","Python data"],"2":["py","exception","Python exception"],"3":["py","attribute","Python attribute"],"4":["py","method","Python method"],"5":["py","class","Python class"]},objtypes:{"0":"py:module","1":"py:data","2":"py:exception","3":"py:attribute","4":"py:method","5":"py:class"},terms:{"1gb":2,"1mb":2,"4pb4caq":2,"50mb":2,"byte":[2,3,4],"case":[2,4],"class":[1,2,3,4],"default":[2,3],"final":2,"import":2,"int":[0,2,3,4],"public":5,"return":[1,2,3,4],"short":2,"true":2,"try":[2,4],For:[0,2],GCS:2,QPS:2,The:[1,2,3,4],These:[2,4],Useful:0,Using:[2,4],__traceback__:1,_download:[2,3],_helper:[2,3,4],_upload:[2,4],abcdef189xy_super_seri:2,abl:[2,3],about:2,accept:2,access:2,achiev:2,acl:[2,4],actual:2,added:0,addit:2,after:2,again:[2,4],all:[0,2,4],allow:2,along:2,alreadi:[2,3],also:[2,4],alt:2,among:2,ani:2,api:[2,3,4],arg:1,argument:[1,2],assign:[2,4],associ:2,assum:2,auth:2,authent:[2,3,4],authorizedsess:2,avoid:2,base:[0,1,2,3,4],basic:2,been:[0,2,3,4],befor:[2,4],begin:[2,3,4],being:2,best:2,big:2,blob_nam:2,bool:[2,3,4],both:[2,4],bucket:2,bytes_download:[2,3],bytes_upload:[2,4],bytesio:2,call:[2,3,4],caller:2,can:[0,2,3,4],cannot:2,caught_exc:[2,4],caus:[1,2],chang:2,check:2,chunk:[0,3,4],chunk_siz:[2,3,4],chunkeddownload:[2,3],client:[0,2],cloud:2,code:[0,2,4],color:2,com:2,come:[0,2],complet:[0,2,3,4],concaten:[2,3],connect:2,consid:2,construct:2,consum:[2,3],consume_next_chunk:[2,3],contain:[2,3,4,5],content:4,content_typ:[2,4],contenttyp:2,contrast:2,correct:1,cours:2,creat:2,credenti:2,current:[2,3,4],custom:1,data:[2,3,4],determin:[2,4],deviat:2,devstorag:2,disk:2,doesn:2,done:[2,4],download:[0,1,5],drop:2,due:[2,4],each:[2,3,4,5],enclos:2,encrypt:[2,3,4],end:[2,3],error:[1,2,4],essenti:2,even:2,except:[2,4,5],expect:[2,3,4],extra:[2,3,4],fail:[2,3,4],failur:[1,2,4],fals:2,file:[2,3,4],finish:[2,3,4],first:[2,3],fit:2,flag:[2,3,4],fly:2,format:2,from:[2,3,4],fulfil:[2,4],gener:[0,2,5],get:[2,4],googleapi:2,grurpl:2,gupload:2,has:[0,2,3,4,5],have:[2,3,4],header:[2,3,4],helper:[2,3],here:4,how:2,howev:[2,3],http:[0,1,2,3,4],httpstatu:[2,4],imag:[2,4],implement:2,indic:[0,2,3,4],inform:0,initi:[2,4],input:2,instanc:2,intend:2,interfac:[2,5],internet:2,invalid:[2,3,4],invalidrespons:[1,2,4],jpeg:[2,4],json:2,json_respons:2,kei:[2,3],know:2,known:2,larg:2,last:[2,3],latenc:2,len:2,length:2,let:2,librari:[2,5],like:[2,3,4],limit:[0,2],list:[2,4],live:2,locat:2,m0xlesx9:2,mai:2,major:5,make:[2,3,4],manag:[2,3],map:[2,3,4],md5hash:2,media:[0,2,3,4,5],media_url:[2,3],memori:2,metadata:[2,4],method:[2,4],modul:5,more:[0,2],much:2,multi:2,multipart:4,multipartupload:[2,4],multipl:[0,2,4],must:[0,2,4],name:2,need:0,neg:[2,3],neither:[2,3],next:[2,3,4],none:[1,2,3,4],nor:[2,3],number:[2,3,4],object:[1,2,3,4],obtain:2,occur:[2,3,4],onc:2,one:[2,4],onli:2,option:[2,3,4],other:2,out:2,over:2,packag:5,parallel:2,paramet:[1,2,3,4],part:2,pass:1,payload:4,perman:0,permanent_redirect:0,pip:5,plain:2,poor:2,portion:2,posit:[1,2],practic:2,preserv:[2,4],progress:[2,4],provid:[2,3],purpos:5,python:0,rais:[2,3,4],rang:[2,3],rare:2,rate:0,rather:2,raw:2,read:2,read_onli:2,recov:[2,4],redirect:0,reduc:2,request:5,requestsmixin:[2,3,4],requir:2,resouc:2,resourc:[2,3,4],respons:[1,2,3,4],response0:2,response1:2,response2:2,resum:[0,4,5],resumable_url:[2,4],resumableupload:[2,4],retriev:[2,3],rfc:0,ro_scop:2,same:2,scope:2,second:2,see:[0,2],seek:2,self:1,send:[2,4],sens:2,sent:[2,3,4],seri:2,server:[2,4],servic:0,session:[2,3,4],set:1,should:[2,3,4],similar:2,simpl:4,simplest:2,simpleupload:[2,4],singl:[2,4],size:[2,4],slice:[2,3],small:[2,4],smdii:2,some:[0,2,5],specif:5,specifi:[2,3],speed:2,start:[2,3],state:[1,2,3,4],statu:[0,2,4],storag:2,str:[2,3,4],stream:[2,3,4],sub:2,support:[2,3,4],sure:[2,4],tailor:5,take:2,task:2,tell:[2,4],text:2,than:2,thei:[2,3],thi:[2,3,4,5],thing:2,three:2,thrown:[2,4],too:2,too_many_request:0,total:[2,3,4],total_byt:[2,3,4],tr_request:2,transmit:[2,4],transmit_next_chunk:[2,4],transport:[3,4,5],tupl:1,two:2,type:[1,2,3,4],typic:[1,2,3],unknown:2,until:[2,4],upgrad:5,upload:[0,1,5],upload_chunk_s:[0,2,4],upload_id:2,upload_url:[2,4],uploadid:2,uploadtyp:2,url:[2,3,4],url_templ:2,usag:[2,3],use:2,used:[0,2,3,4],user:[2,4],uses:[2,5],using:2,util:[2,5],valid:[2,4],valueerror:[2,3,4],veri:2,verifi:[2,4],well:2,when:[2,4],where:[2,4],which:[1,2,3,4],with_traceback:1,without:2,would:2,write:[2,3],written:2,www:2},titles:["google.resumable_media.constants module","google.resumable_media.exceptions module","google.resumable_media.requests package","google.resumable_media.requests.download module","google.resumable_media.requests.upload module","google.resumable_media"],titleterms:{author:2,chunk:2,constant:0,content:2,download:[2,3],except:1,googl:[0,1,2,3,4,5],instal:5,modul:[0,1,2,3,4],multipart:2,packag:2,request:[2,3,4],resum:2,resumable_media:[0,1,2,3,4,5],simpl:2,submodul:2,subpackag:5,transport:2,upload:[2,4]}})
    \ No newline at end of file
    +Search.setIndex({docnames:["google.resumable_media.constants","google.resumable_media.exceptions","google.resumable_media.requests","google.resumable_media.requests.download","google.resumable_media.requests.upload","index"],envversion:51,filenames:["google.resumable_media.constants.rst","google.resumable_media.exceptions.rst","google.resumable_media.requests.rst","google.resumable_media.requests.download.rst","google.resumable_media.requests.upload.rst","index.rst"],objects:{"google.resumable_media":{constants:[0,0,0,"-"],exceptions:[1,0,0,"-"],requests:[2,0,0,"-"]},"google.resumable_media.constants":{PERMANENT_REDIRECT:[0,1,1,""],TOO_MANY_REQUESTS:[0,1,1,""],UPLOAD_CHUNK_SIZE:[0,1,1,""]},"google.resumable_media.exceptions":{InvalidResponse:[1,2,1,""]},"google.resumable_media.exceptions.InvalidResponse":{args:[1,3,1,""],response:[1,3,1,""],with_traceback:[1,4,1,""]},"google.resumable_media.requests":{ChunkedDownload:[2,5,1,""],Download:[2,5,1,""],MultipartUpload:[2,5,1,""],ResumableUpload:[2,5,1,""],SimpleUpload:[2,5,1,""],download:[3,0,0,"-"],upload:[4,0,0,"-"]},"google.resumable_media.requests.ChunkedDownload":{bytes_downloaded:[2,3,1,""],chunk_size:[2,3,1,""],consume_next_chunk:[2,4,1,""],end:[2,3,1,""],finished:[2,3,1,""],invalid:[2,3,1,""],media_url:[2,3,1,""],start:[2,3,1,""],total_bytes:[2,3,1,""]},"google.resumable_media.requests.Download":{consume:[2,4,1,""],end:[2,3,1,""],finished:[2,3,1,""],media_url:[2,3,1,""],start:[2,3,1,""]},"google.resumable_media.requests.MultipartUpload":{finished:[2,3,1,""],transmit:[2,4,1,""],upload_url:[2,3,1,""]},"google.resumable_media.requests.ResumableUpload":{bytes_uploaded:[2,3,1,""],chunk_size:[2,3,1,""],finished:[2,3,1,""],initiate:[2,4,1,""],invalid:[2,3,1,""],recover:[2,4,1,""],resumable_url:[2,3,1,""],total_bytes:[2,3,1,""],transmit_next_chunk:[2,4,1,""],upload_url:[2,3,1,""]},"google.resumable_media.requests.SimpleUpload":{finished:[2,3,1,""],transmit:[2,4,1,""],upload_url:[2,3,1,""]},"google.resumable_media.requests.download":{ChunkedDownload:[3,5,1,""],Download:[3,5,1,""]},"google.resumable_media.requests.download.ChunkedDownload":{bytes_downloaded:[3,3,1,""],chunk_size:[3,3,1,""],consume_next_chunk:[3,4,1,""],end:[3,3,1,""],finished:[3,3,1,""],invalid:[3,3,1,""],media_url:[3,3,1,""],start:[3,3,1,""],total_bytes:[3,3,1,""]},"google.resumable_media.requests.download.Download":{consume:[3,4,1,""],end:[3,3,1,""],finished:[3,3,1,""],media_url:[3,3,1,""],start:[3,3,1,""]},"google.resumable_media.requests.upload":{MultipartUpload:[4,5,1,""],ResumableUpload:[4,5,1,""],SimpleUpload:[4,5,1,""]},"google.resumable_media.requests.upload.MultipartUpload":{finished:[4,3,1,""],transmit:[4,4,1,""],upload_url:[4,3,1,""]},"google.resumable_media.requests.upload.ResumableUpload":{bytes_uploaded:[4,3,1,""],chunk_size:[4,3,1,""],finished:[4,3,1,""],initiate:[4,4,1,""],invalid:[4,3,1,""],recover:[4,4,1,""],resumable_url:[4,3,1,""],total_bytes:[4,3,1,""],transmit_next_chunk:[4,4,1,""],upload_url:[4,3,1,""]},"google.resumable_media.requests.upload.SimpleUpload":{finished:[4,3,1,""],transmit:[4,4,1,""],upload_url:[4,3,1,""]},google:{resumable_media:[5,0,0,"-"]}},objnames:{"0":["py","module","Python module"],"1":["py","data","Python data"],"2":["py","exception","Python exception"],"3":["py","attribute","Python attribute"],"4":["py","method","Python method"],"5":["py","class","Python class"]},objtypes:{"0":"py:module","1":"py:data","2":"py:exception","3":"py:attribute","4":"py:method","5":"py:class"},terms:{"1gb":2,"1mb":2,"4pb4caq":2,"50mb":2,"byte":[2,3,4],"case":[2,4],"class":[1,2,3,4],"default":[2,3,4],"final":[2,4],"import":2,"int":[0,2,3,4],"public":5,"return":[1,2,3,4],"short":2,"true":[2,4],"try":[2,4],For:[0,2],GCS:2,QPS:2,The:[1,2,3,4],These:[2,4],Useful:0,Using:[2,4],__traceback__:1,_download:[2,3],_helper:[2,3,4],_upload:[2,4],abcdef189xy_super_seri:2,abl:[2,3],about:2,accept:2,access:2,achiev:2,acl:[2,4],actual:2,added:[0,2,4],addit:2,after:2,again:[2,4],all:[0,2,4],allow:2,along:2,alreadi:[2,3],also:[2,4],alt:2,among:2,ani:2,api:[2,3,4],applic:[2,4],arg:1,argument:[1,2,4],assign:[2,4],associ:2,assum:[2,4],attach:[2,4],auth:2,authent:[2,3,4],authorizedsess:2,avoid:2,base:[0,1,2,3,4],basic:2,been:[0,2,3,4],befor:[2,4],begin:[2,3,4],being:[2,4],best:2,big:2,bite:[2,4],blob_nam:2,bool:[2,3,4],both:[2,4],bucket:2,bytes_download:[2,3],bytes_upload:[2,4],bytesio:2,call:[2,3,4],caller:2,can:[0,2,3,4],cannot:[2,4],caught_exc:[2,4],caus:[1,2],chang:2,check:2,chunk:[0,3,4],chunk_siz:[2,3,4],chunkeddownload:[2,3],client:[0,2],cloud:2,code:[0,2,4],color:2,com:2,come:[0,2],complet:[0,2,3,4],concaten:[2,3],conclud:[2,4],connect:2,consid:[2,4],construct:2,consum:[2,3],consume_next_chunk:[2,3],contain:[2,3,4,5],content:4,content_typ:[2,4],contenttyp:2,contrast:2,correct:1,cours:2,creat:2,credenti:2,current:[2,3,4],custom:1,data:[2,3,4],determin:[2,4],deviat:2,devstorag:2,disk:2,doesn:2,done:[2,4],download:[0,1,5],drop:2,due:[2,4],dynam:[2,4],each:[2,3,4,5],enclos:2,encrypt:[2,3,4],end:[2,3],error:[1,2,4],essenti:2,even:[2,4],everi:[2,4],except:[2,4,5],expect:[2,3,4],extra:[2,3,4],fail:[2,3,4],failur:[1,2,4],fals:[2,4],fed:[2,4],fewer:[2,4],file:[2,3,4],finish:[2,3,4],first:[2,3],fit:2,flag:[2,3,4],fly:2,format:2,from:[2,3,4],fulfil:[2,4],gener:[0,2,5],get:[2,4],googleapi:2,grurpl:2,gupload:2,happen:[2,4],has:[0,2,3,4,5],have:[2,3,4],header:[2,3,4],helper:[2,3],here:4,how:2,howev:[2,3,4],http:[0,1,2,3,4],httpstatu:[2,4],ignor:[2,4],imag:[2,4],implement:2,indic:[0,2,3,4],inform:0,initi:[2,4],input:2,instanc:2,intend:2,interfac:[2,5],internet:2,invalid:[2,3,4],invalidrespons:[1,2,4],jpeg:[2,4],json:2,json_respons:2,kei:[2,3],know:2,known:[2,4],larg:2,last:[2,3],latenc:2,len:2,length:2,let:2,librari:[2,5],like:[2,3,4],limit:[0,2],list:[2,4],live:2,locat:2,log:[2,4],m0xlesx9:2,mai:2,major:5,make:[2,3,4],manag:[2,3],map:[2,3,4],md5hash:2,media:[0,2,3,4,5],media_url:[2,3],memori:2,metadata:[2,4],method:[2,4],modul:5,more:[0,2,4],much:2,multi:2,multipart:4,multipartupload:[2,4],multipl:[0,2,4],must:[0,2,4],name:2,need:0,neg:[2,3],neither:[2,3],next:[2,3,4],none:[1,2,3,4],nor:[2,3],number:[2,3,4],object:[1,2,3,4],obtain:2,occur:[2,3,4],onc:2,one:[2,4],onli:2,option:[2,3,4],other:2,out:2,over:2,packag:5,parallel:2,paramet:[1,2,3,4],part:2,pass:[1,2,4],payload:4,perman:0,permanent_redirect:0,pip:5,plain:2,poor:2,popul:[2,4],portion:2,posit:[1,2],practic:2,preserv:[2,4],produc:[2,4],progress:[2,4],provid:[2,3],purpos:5,python:0,rais:[2,3,4],rang:[2,3],rare:2,rate:0,rather:2,raw:2,read:[2,4],read_onli:2,readi:[2,4],recov:[2,4],redirect:0,reduc:2,request:5,requestsmixin:[2,3,4],requir:2,resouc:2,resourc:[2,3,4],respons:[1,2,3,4],response0:2,response1:2,response2:2,resum:[0,4,5],resumable_url:[2,4],resumableupload:[2,4],retriev:[2,3],rfc:0,ro_scop:2,same:2,scope:2,second:2,see:[0,2],seek:2,self:1,send:[2,4],sens:2,sent:[2,3,4],seri:2,server:[2,4],servic:0,session:[2,3,4],set:1,should:[2,3,4],similar:2,simpl:4,simplest:2,simpleupload:[2,4],sinc:[2,4],singl:[2,4],size:[2,4],slice:[2,3],small:[2,4],smdii:2,some:[0,2,5],specif:5,specifi:[2,3,4],speed:2,start:[2,3],state:[1,2,3,4],statu:[0,2,4],storag:2,str:[2,3,4],stream:[2,3,4],stream_fin:[2,4],strictli:[2,4],sub:2,support:[2,3,4],sure:[2,4],tailor:5,take:2,task:2,tell:[2,4],text:2,than:[2,4],thei:[2,3],thi:[2,3,4,5],thing:2,those:[2,4],three:2,thrown:[2,4],time:[2,4],too:2,too_many_request:0,total:[2,3,4],total_byt:[2,3,4],tr_request:2,transmit:[2,4],transmit_next_chunk:[2,4],transport:[3,4,5],tupl:1,two:2,type:[1,2,3,4],typic:[1,2,3],unknown:2,until:[2,4],upgrad:5,upload:[0,1,5],upload_chunk_s:[0,2,4],upload_id:2,upload_url:[2,4],uploadid:2,uploadtyp:2,url:[2,3,4],url_templ:2,usag:[2,3],use:2,used:[0,2,3,4],user:[2,4],uses:[2,5],using:2,util:[2,5],valid:[2,4],valu:[2,4],valueerror:[2,3,4],veri:2,verifi:[2,4],via:[2,4],well:2,when:[2,4],where:[2,4],which:[1,2,3,4],with_traceback:1,without:2,would:2,write:[2,3],written:2,www:2,your:[2,4]},titles:["google.resumable_media.constants module","google.resumable_media.exceptions module","google.resumable_media.requests package","google.resumable_media.requests.download module","google.resumable_media.requests.upload module","google.resumable_media"],titleterms:{author:2,chunk:2,constant:0,content:2,download:[2,3],except:1,googl:[0,1,2,3,4,5],instal:5,modul:[0,1,2,3,4],multipart:2,packag:2,request:[2,3,4],resum:2,resumable_media:[0,1,2,3,4,5],simpl:2,submodul:2,subpackag:5,transport:2,upload:[2,4]}})
    \ No newline at end of file