From 5323ead1f36183b325a72c348ce73a6e9ed09a50 Mon Sep 17 00:00:00 2001 From: kohta ito Date: Sun, 23 Sep 2018 03:11:21 +0900 Subject: [PATCH] child_process: use non-infinite maxBuffer defaults Set the default maxBuffer size to 204,800 bytes for execSync, execFileSync, and spawnSync. APIs that return the child output as a string should have non-infinite defaults for maxBuffer sizes to avoid out-of-memory error conditions. A non-infinite default used to be the documented behaviour for all relevant APIs, but the implemented behaviour for execSync, execFileSync and spawnSync was to have no maxBuffer limits. --- doc/api/child_process.md | 6 +-- lib/child_process.js | 10 +++- .../test-async-wrap-pop-id-during-load.js | 3 +- .../test-child-process-exec-maxbuf.js | 24 +++++++++ ...st-child-process-execfilesync-maxBuffer.js | 50 +++++++++++++++++++ .../test-child-process-execfilesync-maxbuf.js | 20 +++++--- .../test-child-process-execsync-maxbuf.js | 22 ++++++++ .../test-child-process-spawnsync-maxbuf.js | 17 ++++++- .../parallel/test-tick-processor-arguments.js | 2 +- 9 files changed, 138 insertions(+), 16 deletions(-) create mode 100644 test/parallel/test-child-process-execfilesync-maxBuffer.js diff --git a/doc/api/child_process.md b/doc/api/child_process.md index 4fdf6d77b3f44e..b2208c006ccaeb 100644 --- a/doc/api/child_process.md +++ b/doc/api/child_process.md @@ -721,7 +721,7 @@ changes: process will be killed. **Default:** `'SIGTERM'`. * `maxBuffer` {number} Largest amount of data in bytes allowed on stdout or stderr. If exceeded, the child process is terminated. See caveat at - [`maxBuffer` and Unicode][]. **Default:** `Infinity`. + [`maxBuffer` and Unicode][]. **Default:** `200 * 1024`. * `encoding` {string} The encoding used for all stdio inputs and outputs. **Default:** `'buffer'`. * `windowsHide` {boolean} Hide the subprocess console window that would @@ -788,7 +788,7 @@ changes: * `maxBuffer` {number} Largest amount of data in bytes allowed on stdout or stderr. If exceeded, the child process is terminated and any output is truncated. See caveat at [`maxBuffer` and Unicode][]. - **Default:** `Infinity`. + **Default:** `200 * 1024`. * `encoding` {string} The encoding used for all stdio inputs and outputs. **Default:** `'buffer'`. * `windowsHide` {boolean} Hide the subprocess console window that would @@ -852,7 +852,7 @@ changes: * `maxBuffer` {number} Largest amount of data in bytes allowed on stdout or stderr. If exceeded, the child process is terminated and any output is truncated. See caveat at [`maxBuffer` and Unicode][]. - **Default:** `Infinity`. + **Default:** `200 * 1024`. * `encoding` {string} The encoding used for all stdio inputs and outputs. **Default:** `'buffer'`. * `shell` {boolean|string} If `true`, runs `command` inside of a shell. Uses diff --git a/lib/child_process.js b/lib/child_process.js index 7d0b692141a86d..9e8067784a7da1 100644 --- a/lib/child_process.js +++ b/lib/child_process.js @@ -46,6 +46,8 @@ const { ChildProcess } = child_process; +const MAX_BUFFER = 200 * 1024; + exports.ChildProcess = ChildProcess; function stdioStringToArray(option) { @@ -206,7 +208,7 @@ exports.execFile = function execFile(file /* , args, options, callback */) { options = { encoding: 'utf8', timeout: 0, - maxBuffer: 200 * 1024, + maxBuffer: MAX_BUFFER, killSignal: 'SIGTERM', cwd: null, env: null, @@ -560,7 +562,11 @@ var spawn = exports.spawn = function spawn(file, args, options) { function spawnSync(file, args, options) { const opts = normalizeSpawnArguments(file, args, options); - options = opts.options; + const defaults = { + maxBuffer: MAX_BUFFER, + ...opts.options + }; + options = opts.options = Object.assign(defaults, opts.options); debug('spawnSync', opts.args, options); diff --git a/test/parallel/test-async-wrap-pop-id-during-load.js b/test/parallel/test-async-wrap-pop-id-during-load.js index cff7e85fdffef4..1ab98104796557 100644 --- a/test/parallel/test-async-wrap-pop-id-during-load.js +++ b/test/parallel/test-async-wrap-pop-id-during-load.js @@ -16,7 +16,8 @@ const { spawnSync } = require('child_process'); const ret = spawnSync( process.execPath, - ['--stack_size=150', __filename, 'async'] + ['--stack_size=150', __filename, 'async'], + { maxBuffer: Infinity } ); assert.strictEqual(ret.status, 0, `EXIT CODE: ${ret.status}, STDERR:\n${ret.stderr}`); diff --git a/test/parallel/test-child-process-exec-maxbuf.js b/test/parallel/test-child-process-exec-maxbuf.js index 1a47cbee3c2cf1..2291227b5ef2b4 100644 --- a/test/parallel/test-child-process-exec-maxbuf.js +++ b/test/parallel/test-child-process-exec-maxbuf.js @@ -56,6 +56,30 @@ function runChecks(err, stdio, streamName, expected) { ); } +// default value +{ + const cmd = `"${process.execPath}" -e "console.log('a'.repeat(200 * 1024))"`; + + cp.exec( + cmd, + common.mustCall((err, stdout, stderr) => { + runChecks(err, { stdout, stderr }, 'stdout', 'a'.repeat(200 * 1024)); + }) + ); +} + +// default value +{ + const cmd = + `"${process.execPath}" -e "console.log('a'.repeat(200 * 1024 - 1))"`; + + cp.exec(cmd, common.mustCall((err, stdout, stderr) => { + assert.ifError(err); + assert.strictEqual(stdout.trim(), 'a'.repeat(200 * 1024 - 1)); + assert.strictEqual(stderr, ''); + })); +} + const unicode = '中文测试'; // length = 4, byte length = 12 { diff --git a/test/parallel/test-child-process-execfilesync-maxBuffer.js b/test/parallel/test-child-process-execfilesync-maxBuffer.js new file mode 100644 index 00000000000000..2145849f0a5c9f --- /dev/null +++ b/test/parallel/test-child-process-execfilesync-maxBuffer.js @@ -0,0 +1,50 @@ +'use strict'; +require('../common'); + +// This test checks that the maxBuffer option for child_process.spawnFileSync() +// works as expected. + +const assert = require('assert'); +const { execFileSync } = require('child_process'); +const msgOut = 'this is stdout'; +const msgOutBuf = Buffer.from(`${msgOut}\n`); + +const args = [ + '-e', + `console.log("${msgOut}");` +]; + +// Verify that an error is returned if maxBuffer is surpassed. +{ + assert.throws(() => { + execFileSync(process.execPath, args, { maxBuffer: 1 }); + }, (e) => { + assert.ok(e, 'maxBuffer should error'); + assert.strictEqual(e.errno, 'ENOBUFS'); + // We can have buffers larger than maxBuffer because underneath we alloc 64k + // that matches our read sizes. + assert.deepStrictEqual(e.stdout, msgOutBuf); + return true; + }); +} + +// Verify that a maxBuffer size of Infinity works. +{ + const ret = execFileSync(process.execPath, args, { maxBuffer: Infinity }); + + assert.deepStrictEqual(ret, msgOutBuf); +} + +// Default maxBuffer size is 200 * 1024. +{ + assert.throws(() => { + execFileSync( + process.execPath, + ['-e', "console.log('a'.repeat(200 * 1024))"] + ); + }, (e) => { + assert.ok(e, 'maxBuffer should error'); + assert.strictEqual(e.errno, 'ENOBUFS'); + return true; + }); +} diff --git a/test/parallel/test-child-process-execfilesync-maxbuf.js b/test/parallel/test-child-process-execfilesync-maxbuf.js index 7ea7a0645d65cb..8b576654a1fd07 100644 --- a/test/parallel/test-child-process-execfilesync-maxbuf.js +++ b/test/parallel/test-child-process-execfilesync-maxbuf.js @@ -34,13 +34,19 @@ const args = [ assert.deepStrictEqual(ret, msgOutBuf); } -// maxBuffer size is Infinity at default. +// maxBuffer size is 200 * 1024 at default. { - const ret = execFileSync( - process.execPath, - ['-e', "console.log('a'.repeat(200 * 1024))"], - { encoding: 'utf-8' } + assert.throws( + () => { + execFileSync( + process.execPath, + ['-e', "console.log('a'.repeat(200 * 1024))"], + { encoding: 'utf-8' } + ); + }, (e) => { + assert.ok(e, 'maxBuffer should error'); + assert.strictEqual(e.errno, 'ENOBUFS'); + return true; + } ); - - assert.ifError(ret.error); } diff --git a/test/parallel/test-child-process-execsync-maxbuf.js b/test/parallel/test-child-process-execsync-maxbuf.js index 0d2fa306939b59..67ccf0c7bb9d47 100644 --- a/test/parallel/test-child-process-execsync-maxbuf.js +++ b/test/parallel/test-child-process-execsync-maxbuf.js @@ -21,6 +21,8 @@ const args = [ }, (e) => { assert.ok(e, 'maxBuffer should error'); assert.strictEqual(e.errno, 'ENOBUFS'); + // We can have buffers larger than maxBuffer because underneath we alloc 64k + // that matches our read sizes. assert.deepStrictEqual(e.stdout, msgOutBuf); return true; }); @@ -35,3 +37,23 @@ const args = [ assert.deepStrictEqual(ret, msgOutBuf); } + +// Default maxBuffer size is 200 * 1024. +{ + assert.throws(() => { + execSync(`"${process.execPath}" -e "console.log('a'.repeat(200 * 1024))"`); + }, (e) => { + assert.ok(e, 'maxBuffer should error'); + assert.strictEqual(e.errno, 'ENOBUFS'); + return true; + }); +} + +// Default maxBuffer size is 200 * 1024. +{ + const ret = execSync( + `"${process.execPath}" -e "console.log('a'.repeat(200 * 1024 - 1))"` + ); + + assert.deepStrictEqual(ret.toString().trim(), 'a'.repeat(200 * 1024 - 1)); +} diff --git a/test/parallel/test-child-process-spawnsync-maxbuf.js b/test/parallel/test-child-process-spawnsync-maxbuf.js index aec42b555bcd47..0808dfd56f4b24 100644 --- a/test/parallel/test-child-process-spawnsync-maxbuf.js +++ b/test/parallel/test-child-process-spawnsync-maxbuf.js @@ -33,10 +33,23 @@ const args = [ assert.deepStrictEqual(ret.stdout, msgOutBuf); } -// maxBuffer size is Infinity at default. +// Default maxBuffer size is 200 * 1024. { const args = ['-e', "console.log('a'.repeat(200 * 1024))"]; - const ret = spawnSync(process.execPath, args, { encoding: 'utf-8' }); + const ret = spawnSync(process.execPath, args); + + assert.ok(ret.error, 'maxBuffer should error'); + assert.strictEqual(ret.error.errno, 'ENOBUFS'); +} + +// Default maxBuffer size is 200 * 1024. +{ + const args = ['-e', "console.log('a'.repeat(200 * 1024 - 1))"]; + const ret = spawnSync(process.execPath, args); assert.ifError(ret.error); + assert.deepStrictEqual( + ret.stdout.toString().trim(), + 'a'.repeat(200 * 1024 - 1) + ); } diff --git a/test/parallel/test-tick-processor-arguments.js b/test/parallel/test-tick-processor-arguments.js index 3f0ac73596e677..28c1c68fe3704e 100644 --- a/test/parallel/test-tick-processor-arguments.js +++ b/test/parallel/test-tick-processor-arguments.js @@ -26,7 +26,7 @@ assert(logfile); const { stdout } = spawnSync( process.execPath, [ '--prof-process', '--preprocess', logfile ], - { cwd: tmpdir.path, encoding: 'utf8' }); + { cwd: tmpdir.path, encoding: 'utf8', maxBuffer: Infinity }); // Make sure that the result is valid JSON. JSON.parse(stdout);