Skip to content

Commit

Permalink
cli: replace request package
Browse files Browse the repository at this point in the history
`requestretry` looks very bogus and forced us to put buffering in place,
which slowed down the whole script drastically.

This commit replaces `requestretry` by `node-fetch` which better handles
node streams.

Signed-off-by: Paul Maréchal <[email protected]>
  • Loading branch information
paul-marechal committed Apr 27, 2020
1 parent bea6be6 commit 1b009d5
Show file tree
Hide file tree
Showing 3 changed files with 65 additions and 138 deletions.
3 changes: 2 additions & 1 deletion dev-packages/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,16 +34,17 @@
"@types/chai": "^4.2.7",
"@types/mkdirp": "^0.5.2",
"@types/mocha": "^5.2.7",
"@types/node-fetch": "^2.5.7",
"@types/puppeteer": "^2.0.0",
"@types/requestretry": "^1.12.3",
"@types/tar": "^4.0.3",
"chai": "^4.2.0",
"colors": "^1.4.0",
"mkdirp": "^0.5.0",
"mocha": "^7.0.0",
"node-fetch": "^2.6.0",
"puppeteer": "^2.0.0",
"puppeteer-to-istanbul": "^1.2.2",
"requestretry": "^3.1.0",
"tar": "^4.0.0",
"unzip-stream": "^0.3.0",
"yargs": "^11.1.0"
Expand Down
176 changes: 40 additions & 136 deletions dev-packages/cli/src/download-plugins.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,25 +16,21 @@

/* eslint-disable @typescript-eslint/no-explicit-any */

import fetch, { Response } from 'node-fetch';
import * as fs from 'fs';
import * as http from 'http';
import * as mkdirp from 'mkdirp';
import * as path from 'path';
import * as process from 'process';
import * as request from 'requestretry';
import * as stream from 'stream';
import * as tar from 'tar';
import * as zlib from 'zlib';

import { green, red, bold } from 'colors/safe';
import { green, red } from 'colors/safe';

import { promisify } from 'util';
const mkdirpAsPromised = promisify<string, mkdirp.Made>(mkdirp);

const unzip = require('unzip-stream');

type RetryResponse = http.IncomingMessage & { attempts: number };

/**
* Available options when downloading.
*/
Expand All @@ -51,7 +47,7 @@ export default async function downloadPlugins(options: DownloadPluginsOptions =
packed = false,
} = options;

console.log('--- downloading plugins ---');
console.warn('--- fetching plugins ---');

// Resolve the `package.json` at the current working directory.
const pck = require(path.resolve(process.cwd(), 'package.json'));
Expand All @@ -73,62 +69,49 @@ export default async function downloadPlugins(options: DownloadPluginsOptions =
} else if (pluginUrl.endsWith('vsix')) {
fileExt = '.vsix';
} else {
console.error(bold(red(`error: '${plugin}' has an unsupported file type: '${pluginUrl}'`)));
console.error(red(`error: '${plugin}' has an unsupported file type: '${pluginUrl}'`));
return;
}

const targetPath = path.join(process.cwd(), pluginsDir, `${plugin}${packed === true ? fileExt : ''}`);

// Skip plugins which have previously been downloaded.
if (isDownloaded(targetPath)) {
console.log('- ' + plugin + ': already downloaded - skipping');
console.warn('- ' + plugin + ': already downloaded - skipping');
return;
}

// requestretry makes our life difficult: it supposedly hands back a readable stream,
// but if we try to use it later it will be too late and somehow the stream will already
// be consumed. Since we cannot handle said stream later, we'll buffer it to be able
// to replay it once we know everything went ok with the download.
const bufferingStream = new BufferingStream();

let download!: { res: RetryResponse, body: string };
try {
download = await new Promise<typeof download>((resolve, reject) => {
const req = request({
...pck.requestOptions,
url: pluginUrl,
maxAttempts: 5,
retryDelay: 2000,
retryStrategy: request.RetryStrategies.HTTPOrNetworkError,
}, (err: any, _res: any, body: string) => {
const res: RetryResponse = _res;
if (err) {
reject({ res, err });
} else {
if (typeof res.statusCode !== 'number' || res.statusCode < 200 || res.statusCode > 299) {
reject({ res, err });
} else {
resolve({ res, body });
}
}
});
// Buffer the stream right away:
req.pipe(bufferingStream);
});
} catch (object) {
const { err, res } = object as { err?: Error, res?: RetryResponse };
const status: string = res ? buildStatusStr(res.statusCode, res.statusMessage) : '';
console.error(bold(red(`x ${plugin}: failed to download ${res && res.attempts > 1 ? `(after ${res.attempts} attempts)` : ''} ${status}`)));
if (err) {
console.error(err);
const maxAttempts = 5;
const retryDelay = 2000;

let response!: Response;
let attempts: number;
let lastError: Error | undefined;

for (attempts = 0; attempts < maxAttempts; attempts++, lastError = undefined) {
if (attempts > 0) {
await new Promise(resolve => setTimeout(resolve, retryDelay));
}
try {
response = await fetch(pluginUrl);
} catch (error) {
lastError = error;
continue;
}
const retry = response.status === 439 || response.status >= 500;
if (!retry) {
break;
}
}
if (lastError) {
console.error(red(`x ${plugin}: failed to download, last error:`));
console.error(lastError);
return;
}
if (!response || response.status !== 200) {
console.error(red(`x ${plugin}: failed to download with: ${response.status} ${response.statusText}`));
return;
}

console.log(green(`+ ${plugin}: downloaded successfully ${download.res.attempts > 1 ? `(after ${download.res.attempts} attempts)` : ''}`));

// Get ready to re-stream downloaded data:
const replayStream = bufferingStream.replay();

if (fileExt === '.tar.gz') {
// Decompress .tar.gz files.
Expand All @@ -138,22 +121,24 @@ export default async function downloadPlugins(options: DownloadPluginsOptions =
flush: zlib.Z_SYNC_FLUSH
});
const untar = tar.x({ cwd: targetPath });
replayStream.pipe(gunzip).pipe(untar);
response.body.pipe(gunzip).pipe(untar);
} else {
if (packed === true) {
// Download .vsix without decompressing.
const file = fs.createWriteStream(targetPath);
replayStream.pipe(file);
response.body.pipe(file);
} else {
// Decompress .vsix.
replayStream.pipe(unzip.Extract({ path: targetPath }));
response.body.pipe(unzip.Extract({ path: targetPath }));
}
}

await new Promise((resolve, reject) => {
replayStream.on('end', resolve);
replayStream.on('error', reject);
response.body.on('end', resolve);
response.body.on('error', reject);
});

console.warn(green(`+ ${plugin}: downloaded successfully ${attempts > 1 ? `(after ${attempts} attempts)` : ''}`));
}));
}

Expand All @@ -166,84 +151,3 @@ export default async function downloadPlugins(options: DownloadPluginsOptions =
function isDownloaded(filePath: string): boolean {
return fs.existsSync(filePath);
}

/**
* Build a human-readable message about the response.
* @param code the status code of the response.
* @param message the status message of the response.
*/
function buildStatusStr(code: number | undefined, message: string | undefined): string {
if (code && message) {
return `{ statusCode: ${code}, statusMessage: ${message} }`;
} else if (code && !message) {
return `{ statusCode: ${code} }`;
} else if (!code && message) {
return `{ statusMessage: ${message} }`;
} else {
return '';
}
}

/**
* Stores everything you write into it.
* You can then create a new readable stream based on the buffered data.'
* When getting the replay stream, the current instance will be invalidated.
*/
class BufferingStream extends stream.Writable {

protected _buffer: Buffer = Buffer.alloc(0);
protected _replay: ReplayStream | undefined;

replay(): ReplayStream {
if (typeof this._replay === 'undefined') {
this._replay = new ReplayStream(this._buffer);
}
return this._replay;
}

_write(chunk: Buffer | string, encoding: any, callback: Function): void {
if (typeof this._replay !== 'undefined') {
callback(new Error('unexpected write: replay is ongoing'));
return;
}
let data: Buffer;
if (typeof chunk === 'string' && Buffer.isEncoding(encoding)) {
data = Buffer.from(chunk, encoding);
} else if (Buffer.isBuffer(chunk)) {
data = chunk;
} else {
callback(new TypeError('cannot get a buffer from chunk'));
return;
}
this._buffer = Buffer.concat([this._buffer, data], this._buffer.length + data.length);
// eslint-disable-next-line no-null/no-null
callback(null);
}

}

/**
* Stream the content of a buffer.
*/
class ReplayStream extends stream.Readable {

protected _buffer: Buffer;
protected _head = 0;

constructor(buffer: Buffer) {
super();
this._buffer = buffer;
}

_read(size: number): void {
if (this._head > this._buffer.length - 1) {
// eslint-disable-next-line no-null/no-null
this.push(null); // end.
} else {
const chunk = this._buffer.slice(this._head, this._head + size);
this._head += size;
this.push(chunk);
}
}

}
24 changes: 23 additions & 1 deletion yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1290,6 +1290,14 @@
resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-5.2.7.tgz#315d570ccb56c53452ff8638738df60726d5b6ea"
integrity sha512-NYrtPht0wGzhwe9+/idPaBB+TqkY9AhTvOLMkThm0IoEfLaiVQZwBwyJ5puCkO3AUCWrmcoePjp2mbFocKy4SQ==

"@types/node-fetch@^2.5.7":
version "2.5.7"
resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.5.7.tgz#20a2afffa882ab04d44ca786449a276f9f6bbf3c"
integrity sha512-o2WVNf5UhWRkxlf6eq+jMZDu7kjgpgJfl4xVNlvryc95O/6F2ld8ztKX+qu+Rjyet93WAWm5LjeX9H5FGkODvw==
dependencies:
"@types/node" "*"
form-data "^3.0.0"

"@types/node@*", "@types/node@^10.12.18", "@types/node@^10.14.22", "@types/node@~10.3.6":
version "10.3.6"
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.3.6.tgz#ea8aab9439b59f40d19ec5f13b44642344872b11"
Expand Down Expand Up @@ -3857,7 +3865,7 @@ columnify@^1.5.4:
strip-ansi "^3.0.0"
wcwidth "^1.0.0"

combined-stream@^1.0.6, combined-stream@~1.0.6:
combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6:
version "1.0.8"
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==
Expand Down Expand Up @@ -5989,6 +5997,15 @@ form-data@^2.5.0:
combined-stream "^1.0.6"
mime-types "^2.1.12"

form-data@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.0.tgz#31b7e39c85f1355b7139ee0c647cf0de7f83c682"
integrity sha512-CKMFDglpbMi6PyN+brwB9Q/GOw0eAnsrEZDgcsH5Krhz5Od/haKHAX0NmQfha2zPPz0JpWzA7GJHGSnvCRLWsg==
dependencies:
asynckit "^0.4.0"
combined-stream "^1.0.8"
mime-types "^2.1.12"

form-data@~2.3.2:
version "2.3.3"
resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6"
Expand Down Expand Up @@ -8893,6 +8910,11 @@ [email protected]:
object.getownpropertydescriptors "^2.0.3"
semver "^5.7.0"

node-fetch@^2.6.0:
version "2.6.0"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==

node-gyp@^3.6.0:
version "3.8.0"
resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-3.8.0.tgz#540304261c330e80d0d5edce253a68cb3964218c"
Expand Down

0 comments on commit 1b009d5

Please sign in to comment.