Skip to content
This repository has been archived by the owner on Feb 12, 2021. It is now read-only.

Commit

Permalink
Merge pull request #76 from kacole2/fixForNoContentLength
Browse files Browse the repository at this point in the history
fix for no content-length in header for download
  • Loading branch information
faceleg committed Oct 9, 2015
2 parents 8e9cd92 + 0b7ecef commit 6dca5b8
Show file tree
Hide file tree
Showing 2 changed files with 58 additions and 36 deletions.
93 changes: 57 additions & 36 deletions lib/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -460,47 +460,68 @@ Client.prototype.downloadFile = function(params) {
if (statusCode >= 300) {
handleError(new Error("http status code " + statusCode));
return;
}
var contentLength = parseInt(headers['content-length'], 10);
downloader.progressTotal = contentLength;
downloader.progressAmount = 0;
downloader.emit('progress');
downloader.emit('httpHeaders', statusCode, headers, resp);
var eTag = cleanETag(headers.etag);
var eTagCount = getETagCount(eTag);
}
if (headers['content-length'] == undefined) {
var outStream = fs.createWriteStream(localFile);
outStream.on('error', handleError);
downloader.progressTotal = 0
downloader.progressAmount = -1;
request.on('httpData', function(chunk) {
downloader.progressTotal += chunk.length;
downloader.progressAmount += chunk.length;
downloader.emit('progress');
outStream.write(chunk);
})

request.on('httpDone', function() {
if (errorOccurred) return;
downloader.progressAmount += 1;
downloader.emit('progress');
outStream.end();
cb();
})
} else {
var contentLength = parseInt(headers['content-length'], 10);
downloader.progressTotal = contentLength;
downloader.progressAmount = 0;
downloader.emit('progress');
downloader.emit('httpHeaders', statusCode, headers, resp);
var eTag = cleanETag(headers.etag);
var eTagCount = getETagCount(eTag);

var outStream = fs.createWriteStream(localFile);
var multipartETag = new MultipartETag({size: contentLength, count: eTagCount});
var httpStream = resp.httpResponse.createUnbufferedStream();
var outStream = fs.createWriteStream(localFile);
var multipartETag = new MultipartETag({size: contentLength, count: eTagCount});
var httpStream = resp.httpResponse.createUnbufferedStream();

httpStream.on('error', handleError);
outStream.on('error', handleError);
httpStream.on('error', handleError);
outStream.on('error', handleError);

hashCheckPend.go(function(cb) {
multipartETag.on('end', function() {
if (multipartETag.bytes !== contentLength) {
handleError(new Error("Downloaded size does not match Content-Length"));
return;
}
if (eTagCount === 1 && !multipartETag.anyMatch(eTag)) {
handleError(new Error("ETag does not match MD5 checksum"));
return;
}
cb();
hashCheckPend.go(function(cb) {
multipartETag.on('end', function() {
if (multipartETag.bytes !== contentLength) {
handleError(new Error("Downloaded size does not match Content-Length"));
return;
}
if (eTagCount === 1 && !multipartETag.anyMatch(eTag)) {
handleError(new Error("ETag does not match MD5 checksum"));
return;
}
cb();
});
});
multipartETag.on('progress', function() {
downloader.progressAmount = multipartETag.bytes;
downloader.emit('progress');
});
outStream.on('close', function() {
if (errorOccurred) return;
hashCheckPend.wait(cb);
});
});
multipartETag.on('progress', function() {
downloader.progressAmount = multipartETag.bytes;
downloader.emit('progress');
});
outStream.on('close', function() {
if (errorOccurred) return;
hashCheckPend.wait(cb);
});

httpStream.pipe(multipartETag);
httpStream.pipe(outStream);
multipartETag.resume();
httpStream.pipe(multipartETag);
httpStream.pipe(outStream);
multipartETag.resume();
}
});

request.send(handleError);
Expand Down
1 change: 1 addition & 0 deletions test/test.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ function createClient() {
s3Options: {
accessKeyId: process.env.S3_KEY,
secretAccessKey: process.env.S3_SECRET,
endpoint: process.env.S3_ENDPOINT,
},
});
}
Expand Down

0 comments on commit 6dca5b8

Please sign in to comment.