Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

GCS uploading: disable chunked transfer encoding if contentLength is set #853

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 41 additions & 26 deletions lib/common/util.js
Original file line number Diff line number Diff line change
Expand Up @@ -197,57 +197,57 @@ util.parseApiResp = parseApiResp;
* outgoing writable stream.
*
* @param {Duplexify} dup - Duplexify stream.
* @param {object} options - Configuration object.
* @param {module:common/connection} options.connection - A connection instance,
* @param {object} config - Configuration object.
* @param {module:common/connection} config.connection - A connection instance,
* used to get a token with and send the request through.
* @param {object} options.metadata - Metadata to send at the head of the
* @param {object} config.metadata - Metadata to send at the head of the
* request.
* @param {object} options.request - Request object, in the format of a standard
* @param {object} config.request - Request object, in the format of a standard
* Node.js http.request() object.
* @param {string=} options.request.method - Default: "POST".
* @param {string=} options.request.qs.uploadType - Default: "multipart".
* @param {string=} options.streamContentType - Default:
* @param {string=} config.request.method - Default: "POST".
* @param {string=} config.request.qs.uploadType - Default: "multipart".
* @param {string=} config.streamContentType - Default:
* "application/octet-stream".
* @param {function} onComplete - Callback, executed after the writable Request
* stream has completed.
*/
function makeWritableStream(dup, options, onComplete) {
function makeWritableStream(dup, config, onComplete) {
onComplete = onComplete || noop;
options = options || {};

var writeStream = through();
dup.setWritable(writeStream);

var metadata = config.metadata || {};
var simpleUpload = config.simpleUpload;
var contentType = metadata.contentType || 'application/octet-stream';

var defaultReqOpts = {
method: 'POST',
qs: {
uploadType: 'multipart'
uploadType: simpleUpload ? 'media' : 'multipart'
}
};

var metadata = options.metadata || {};

var reqOpts = extend(true, defaultReqOpts, options.request, {
multipart: [
{
'Content-Type': 'application/json',
body: JSON.stringify(metadata)
},
{
'Content-Type': metadata.contentType || 'application/octet-stream',
body: writeStream
}
]
});
if (simpleUpload) {
defaultReqOpts.headers = {
'Content-Type': contentType
};

options.makeAuthorizedRequest(reqOpts, {
if (metadata.contentLength) {
defaultReqOpts.headers['Content-Length'] = metadata.contentLength;
}
}

var reqOpts = extend(true, defaultReqOpts, config.request);

config.makeAuthorizedRequest(reqOpts, {
onAuthorized: function(err, authorizedReqOpts) {
if (err) {
dup.destroy(err);
return;
}

request(authorizedReqOpts, function(err, resp, body) {
var req = request(authorizedReqOpts, function(err, resp, body) {
util.handleResp(err, resp, body, function(err, data) {
if (err) {
dup.destroy(err);
Expand All @@ -257,6 +257,21 @@ function makeWritableStream(dup, options, onComplete) {
onComplete(data);
});
});

if (simpleUpload) {
writeStream.pipe(req);
} else {
req.multipart([
{
'Content-Type': 'application/json',
body: JSON.stringify(metadata || {})
},
{
'Content-Type': contentType,
body: writeStream
}
]);
}
}
});
}
Expand Down
1 change: 1 addition & 0 deletions lib/storage/bucket.js
Original file line number Diff line number Diff line change
Expand Up @@ -996,6 +996,7 @@ Bucket.prototype.upload = function(localPath, options, callback) {
}

resumable = fd.size > RESUMABLE_THRESHOLD;
metadata.contentLength = fd.size;

upload();
});
Expand Down
61 changes: 50 additions & 11 deletions lib/storage/file.js
Original file line number Diff line number Diff line change
Expand Up @@ -620,17 +620,25 @@ File.prototype.createResumableUpload = function(metadata, callback) {
* NOTE: Writable streams will emit the `complete` event when the file is fully
* uploaded.
*
* @resource [Upload Options (Simple or Resumable)]{@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads}
* @resource [Upload Options (Simple, Multipart, or Resumable)]{@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads}
* @resource [Objects: insert API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert}
*
* @param {object=} options - Configuration object.
* @param {boolean} options.gzip - Automatically gzip the file. This will set
* `options.metadata.contentEncoding` to `gzip`.
* @param {object} options.metadata - Set the metadata for this file.
* @param {number} options.metadata.contentLength - The length of the content
* being uploaded.
* @param {string} options.metadata.contentType - The type of content that
* represents the file being uploaded. Default: `application/octet-stream`.
* @param {object} options.metadata.metadata - Custom metadata to set for this
* file.
* @param {boolean} options.gzip - Automatically gzip the file. This will set
* @param {boolean} options.resumable - Force a resumable upload. NOTE: When
* working with streams, the file format and size is unknown until it's
* completely consumed. Because of this, it's best for you to be explicit
* for what makes sense given your input.
* @param {boolean} options.simple - Use a simple upload technique. Metadata
* other than
* @param {string|boolean} options.validation - Possible values: `"md5"`,
* `"crc32c"`, or `false`. By default, data integrity is validated with an
* MD5 checksum for maximum reliability. CRC32c will provide better
Expand All @@ -648,7 +656,7 @@ File.prototype.createResumableUpload = function(metadata, callback) {
* var fs = require('fs');
* var image = myBucket.file('image.png');
*
* fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
* fs.createReadStream('panda.jpg')
* .pipe(image.createWriteStream())
* .on('error', function(err) {})
* .on('finish', function() {
Expand All @@ -661,7 +669,7 @@ File.prototype.createResumableUpload = function(metadata, callback) {
* var fs = require('fs');
* var htmlFile = myBucket.file('index.html');
*
* fs.createReadStream('/Users/stephen/site/index.html')
* fs.createReadStream('site/index.html')
* .pipe(htmlFile.createWriteStream({ gzip: true }))
* .on('error', function(err) {})
* .on('finish', function() {
Expand All @@ -684,7 +692,7 @@ File.prototype.createResumableUpload = function(metadata, callback) {
* var fs = require('fs');
* var image = myBucket.file('image.png');
*
* fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
* fs.createReadStream('panda.jpg')
* .pipe(image.createWriteStream({
* metadata: {
* contentType: 'image/jpeg',
Expand All @@ -697,6 +705,35 @@ File.prototype.createResumableUpload = function(metadata, callback) {
* .on('finish', function() {
* // The file upload is complete.
* });
*
* //-
* // <h4>Uploading a File using the Simple Upload Technique</h4>
* //
* // By default, streams will use a multipart upload with chunked transfer-
* // encoding when being piped to the remote endpoint. This can be convenient,
* // but unnecessary when you're only transferring small files, don't need to
* // set any metadata, and you know the size of the content ahead of time. You
* // can use a simple upload technique by setting `simple: true` and disable
* // chunked transfer-encoding by setting `metadata.contentLength`.
* //
* // *Metadata properties other than `metadata.contentLength` and
* // `metadata.contentType` will be ignored when using simple uploads.*
* //-
* var fs = require('fs');
* var image = myBucket.file('image.png');
*
* fs.createReadStream('panda.jpg')
* .pipe(image.createWriteStream({
* simple: true,
* metadata: {
* contentLength: 134888,
* contentType: 'image/jpg'
* }
* }))
* .on('error', function(err) {})
* .on('finish', function() {
* // The file upload is complete.
* });
*/
File.prototype.createWriteStream = function(options) {
options = options || {};
Expand Down Expand Up @@ -736,8 +773,8 @@ File.prototype.createWriteStream = function(options) {

// Wait until we've received data to determine what upload technique to use.
stream.on('writing', function() {
if (options.resumable === false) {
self.startSimpleUpload_(fileWriteStream, metadata);
if (options.resumable === false || options.simple === true) {
self.startSimpleUpload_(fileWriteStream, metadata, options);
} else {
self.startResumableUpload_(fileWriteStream, metadata);
}
Expand Down Expand Up @@ -1397,14 +1434,15 @@ File.prototype.startResumableUpload_ = function(dup, metadata) {
/**
* Takes a readable stream and pipes it to a remote file. Unlike
* `startResumableUpload_`, which uses the resumable upload technique, this
* method uses a simple upload (all or nothing).
* method uses a simple (or multipart) upload (all or nothing).
*
* @param {Duplexify} dup - Duplexify stream of data to pipe to the file.
* @param {object=} metadata - Optional metadata to set on the file.
* @param {object} metadata - Optional metadata to set on the file.
* @param {object} options - The original options passed to `createWriteStream`.
*
* @private
*/
File.prototype.startSimpleUpload_ = function(dup, metadata) {
File.prototype.startSimpleUpload_ = function(dup, metadata, options) {
var self = this;

var reqOpts = {
Expand All @@ -1424,7 +1462,8 @@ File.prototype.startSimpleUpload_ = function(dup, metadata) {
util.makeWritableStream(dup, {
makeAuthorizedRequest: self.bucket.storage.makeAuthorizedRequest_,
metadata: metadata,
request: reqOpts
request: reqOpts,
simpleUpload: options.simple
}, function(data) {
self.metadata = data;
dup.emit('complete');
Expand Down
25 changes: 25 additions & 0 deletions system-test/storage.js
Original file line number Diff line number Diff line change
Expand Up @@ -590,6 +590,31 @@ describe('storage', function() {
});
});

it('should write without chunked transfer-encoding', function(done) {
var file = bucket.file('LargeFile');

fs.stat(files.big.path, function(err, fd) {
var metadata = {
contentLength: fd.size,
contentType: 'image/png'
};

var ws = file.createWriteStream({
simple: true,
metadata: metadata
});

fs.createReadStream(files.big.path)
.pipe(ws)
.on('error', done)
.on('finish', function() {
assert.equal(file.metadata.size, metadata.contentLength);
assert.equal(file.metadata.contentType, metadata.contentType);
file.delete(done);
});
});
});

it('should write metadata', function(done) {
var options = {
metadata: { contentType: 'image/png' },
Expand Down