Skip to content

Commit

Permalink
Update tools to support Node 10 (#536)
Browse files Browse the repository at this point in the history
* Updated travis, build and cli test fixture for node 10
* Updated appveyor to use node 10.
* Updated CHANGELOG for build.
* Convert 'new Buffer(' to 'Buffer.from(' as constructor is deprecated in Node 10
  • Loading branch information
usergenic authored Jun 22, 2018
1 parent 11a1f07 commit dd1c8bb
Show file tree
Hide file tree
Showing 16 changed files with 55 additions and 47 deletions.
5 changes: 2 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,7 @@ matrix:
env:
- TEST_COMMAND="npm run test:unit"

# TODO(https://github.com/Polymer/tools/issues/280): update to node 10
- node_js: '9'
- node_js: '10'
env:
- TEST_COMMAND="npm run test:unit"

Expand All @@ -47,7 +46,7 @@ matrix:
env:
- TEST_COMMAND="xvfb-run npm run test:integration"

- node_js: '9'
- node_js: '10'
addons:
chrome: stable
firefox: latest
Expand Down
2 changes: 1 addition & 1 deletion appveyor.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
environment:
nodejs_version: "9"
nodejs_version: "10"

branches:
only:
Expand Down
5 changes: 4 additions & 1 deletion packages/build/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).

<!-- ## Unreleased -->
## Unreleased
* Service Worker generation uses a consistent spacing for anonymous
functions (i.e. space between keyword and parentheses `function ()`)
ensuring Node 8 and 10 output are identical.
<!-- Add new, unreleased changes here. -->

## [3.0.2] - 2018-06-19
Expand Down
2 changes: 1 addition & 1 deletion packages/build/src/base-tag-updater.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ export class BaseTagUpdater extends AsyncTransformStream<File, File> {
dom5.setAttribute(base, 'href', this.newHref);
dom5.removeFakeRootElements(parsed);
const updatedFile = file.clone();
updatedFile.contents = new Buffer(parse5.serialize(parsed), 'utf-8');
updatedFile.contents = Buffer.from(parse5.serialize(parsed), 'utf-8');
yield updatedFile;
}
}
Expand Down
2 changes: 1 addition & 1 deletion packages/build/src/bundle.ts
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ export class BuildBundler extends AsyncTransformStream<File, File> {
path: pathFromUrl(
this.config.root as LocalFsPath,
this._bundler.analyzer.urlResolver.relative(url)),
contents: new Buffer(document.content),
contents: Buffer.from(document.content),
}));
}
}
Expand Down
2 changes: 1 addition & 1 deletion packages/build/src/custom-elements-es5-adapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ export class CustomElementsEs5AdapterInjector extends
yield file;
} else {
const updatedFile = file.clone();
updatedFile.contents = new Buffer(updatedContents, 'utf-8');
updatedFile.contents = Buffer.from(updatedContents, 'utf-8');
yield updatedFile;
}
}
Expand Down
6 changes: 3 additions & 3 deletions packages/build/src/html-splitter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ class HtmlSplitTransform extends AsyncTransformStream<File, File> {
cwd: file.cwd,
base: file.base,
path: childPath,
contents: new Buffer(source),
contents: Buffer.from(source),
});
scriptFile.fromHtmlSplitter = true;
scriptFile.isModule = typeAttribute === 'module';
Expand All @@ -210,7 +210,7 @@ class HtmlSplitTransform extends AsyncTransformStream<File, File> {
cwd: file.cwd,
base: file.base,
path: filePath,
contents: new Buffer(splitContents),
contents: Buffer.from(splitContents),
});
yield newFile;
}
Expand Down Expand Up @@ -291,7 +291,7 @@ class HtmlRejoinTransform extends AsyncTransformStream<File, File> {
cwd: file.cwd,
base: file.base,
path: filePath,
contents: new Buffer(joinedContents),
contents: Buffer.from(joinedContents),
});
}
}
2 changes: 1 addition & 1 deletion packages/build/src/inject-babel-helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ export class BabelHelpersInjector extends AsyncTransformStream<File, File> {
const contents = await getFileContents(file);
const transformed = htmlTransform(contents, {injectBabelHelpers: 'full'});
const newFile = file.clone();
newFile.contents = new Buffer(transformed, 'utf-8');
newFile.contents = Buffer.from(transformed, 'utf-8');
return newFile;
}
}
2 changes: 1 addition & 1 deletion packages/build/src/optimize-streams.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ export class GenericOptimizeTransform extends Transform {
try {
let contents = file.contents.toString();
contents = this.optimizer(contents, file);
file.contents = new Buffer(contents);
file.contents = Buffer.from(contents);
} catch (error) {
logger.warn(
`${this.optimizerName}: Unable to optimize ${file.path}`,
Expand Down
2 changes: 1 addition & 1 deletion packages/build/src/prefetch-links.ts
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ export class AddPrefetchLinks extends AsyncTransformStream<File, File> {
const filePath = pathFromUrl(
this._config.root as LocalFsPath,
this._analyzer.urlResolver.relative(documentUrl));
yield new File({contents: new Buffer(html, 'utf-8'), path: filePath});
yield new File({contents: Buffer.from(html, 'utf-8'), path: filePath});
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion packages/build/src/push-manifest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ export class AddPushManifest extends AsyncTransformStream<File, File> {
// Push the new push manifest into the stream.
yield new File({
path: this.outPath,
contents: new Buffer(pushManifestContents),
contents: Buffer.from(pushManifestContents),
});
}

Expand Down
8 changes: 7 additions & 1 deletion packages/build/src/service-worker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,13 @@ export async function generateServiceWorker(options: AddServiceWorkerOptions):
if (err || fileContents == null) {
reject(err || 'No file contents provided.');
} else {
resolve(new Buffer(fileContents));
// Note: Node 10 Function.prototype.toString() produces output
// like `function() { }` where earlier versions produce
// `function () { }` (note the space between function keyword)
// and parentheses. To ensure the output is consistent across
// versions, we will correctively insert missing space here.
fileContents = fileContents.replace(/\bfunction\(/g, 'function (');
resolve(Buffer.from(fileContents));
}
});
}));
Expand Down
4 changes: 2 additions & 2 deletions packages/build/src/test/bundle_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,10 +122,10 @@ suite('BuildBundler', () => {
const addHeaders = new FileTransform((stream, file) => {
if (path.extname(file.path) === '.html') {
file.contents =
new Buffer(`<!-- ${path.basename(file.path)} -->${file.contents}`);
Buffer.from(`<!-- ${path.basename(file.path)} -->${file.contents}`);
} else if (path.extname(file.path).match(/^\.(js|css)$/)) {
file.contents =
new Buffer(`/* ${path.basename(file.path)} */${file.contents}`);
Buffer.from(`/* ${path.basename(file.path)} */${file.contents}`);
}
stream.push(file);
});
Expand Down
2 changes: 1 addition & 1 deletion packages/build/src/test/html-splitter_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ suite('HtmlSplitter', () => {
cwd: root,
base: root,
path: filepath,
contents: new Buffer(source),
contents: Buffer.from(source),
});

sourceStream.pipe(htmlSplitter.split())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ var cleanResponse = function (originalResponse) {
Promise.resolve(originalResponse.body) :
originalResponse.blob();

return bodyPromise.then(function(body) {
return bodyPromise.then(function (body) {
// new Response() is happy when passed either a stream or a Blob.
return new Response(body, {
headers: originalResponse.headers,
Expand Down Expand Up @@ -99,7 +99,7 @@ var isPathWhitelisted = function (whitelist, absoluteUrlString) {

// Otherwise compare each path regex to the path of the URL passed in.
var path = (new URL(absoluteUrlString)).pathname;
return whitelist.some(function(whitelistedPathRegex) {
return whitelist.some(function (whitelistedPathRegex) {
return path.match(whitelistedPathRegex);
});
};
Expand All @@ -112,15 +112,15 @@ var stripIgnoredUrlParameters = function (originalUrl,

url.search = url.search.slice(1) // Exclude initial '?'
.split('&') // Split into an array of 'key=value' strings
.map(function(kv) {
.map(function (kv) {
return kv.split('='); // Split each 'key=value' string into a [key, value] array
})
.filter(function(kv) {
return ignoreUrlParametersMatching.every(function(ignoredRegex) {
.filter(function (kv) {
return ignoreUrlParametersMatching.every(function (ignoredRegex) {
return !ignoredRegex.test(kv[0]); // Return true iff the key doesn't match any of the regexes.
});
})
.map(function(kv) {
.map(function (kv) {
return kv.join('='); // Join each [key, value] array into a 'key=value' string
})
.join('&'); // Join the array of 'key=value' strings into a string with '&' in between each
Expand All @@ -131,7 +131,7 @@ var stripIgnoredUrlParameters = function (originalUrl,

var hashParamName = '_sw-precache';
var urlsToCacheKeys = new Map(
precacheConfig.map(function(item) {
precacheConfig.map(function (item) {
var relativeUrl = item[0];
var hash = item[1];
var absoluteUrl = new URL(relativeUrl, self.location);
Expand All @@ -141,41 +141,41 @@ var urlsToCacheKeys = new Map(
);

function setOfCachedUrls(cache) {
return cache.keys().then(function(requests) {
return requests.map(function(request) {
return cache.keys().then(function (requests) {
return requests.map(function (request) {
return request.url;
});
}).then(function(urls) {
}).then(function (urls) {
return new Set(urls);
});
}

self.addEventListener('install', function(event) {
self.addEventListener('install', function (event) {
event.waitUntil(
caches.open(cacheName).then(function(cache) {
return setOfCachedUrls(cache).then(function(cachedUrls) {
caches.open(cacheName).then(function (cache) {
return setOfCachedUrls(cache).then(function (cachedUrls) {
return Promise.all(
Array.from(urlsToCacheKeys.values()).map(function(cacheKey) {
Array.from(urlsToCacheKeys.values()).map(function (cacheKey) {
// If we don't have a key matching url in the cache already, add it.
if (!cachedUrls.has(cacheKey)) {
var request = new Request(cacheKey, {credentials: 'same-origin'});
return fetch(request).then(function(response) {
return fetch(request).then(function (response) {
// Bail out of installation unless we get back a 200 OK for
// every request.
if (!response.ok) {
throw new Error('Request for ' + cacheKey + ' returned a ' +
'response with status ' + response.status);
}

return cleanResponse(response).then(function(responseToCache) {
return cleanResponse(response).then(function (responseToCache) {
return cache.put(cacheKey, responseToCache);
});
});
}
})
);
});
}).then(function() {
}).then(function () {

// Force the SW to transition from installing -> active state
return self.skipWaiting();
Expand All @@ -184,21 +184,21 @@ self.addEventListener('install', function(event) {
);
});

self.addEventListener('activate', function(event) {
self.addEventListener('activate', function (event) {
var setOfExpectedUrls = new Set(urlsToCacheKeys.values());

event.waitUntil(
caches.open(cacheName).then(function(cache) {
return cache.keys().then(function(existingRequests) {
caches.open(cacheName).then(function (cache) {
return cache.keys().then(function (existingRequests) {
return Promise.all(
existingRequests.map(function(existingRequest) {
existingRequests.map(function (existingRequest) {
if (!setOfExpectedUrls.has(existingRequest.url)) {
return cache.delete(existingRequest);
}
})
);
});
}).then(function() {
}).then(function () {

return self.clients.claim();

Expand All @@ -207,7 +207,7 @@ self.addEventListener('activate', function(event) {
});


self.addEventListener('fetch', function(event) {
self.addEventListener('fetch', function (event) {
if (event.request.method === 'GET') {
// Should we call event.respondWith() inside this fetch event handler?
// This needs to be determined synchronously, which will give other fetch
Expand Down Expand Up @@ -242,14 +242,14 @@ self.addEventListener('fetch', function(event) {
// event.respondWith(), using the appropriate cache key.
if (shouldRespond) {
event.respondWith(
caches.open(cacheName).then(function(cache) {
return cache.match(urlsToCacheKeys.get(url)).then(function(response) {
caches.open(cacheName).then(function (cache) {
return cache.match(urlsToCacheKeys.get(url)).then(function (response) {
if (response) {
return response;
}
throw Error('The cached response that was expected is missing.');
});
}).catch(function(e) {
}).catch(function (e) {
// Fall back to just fetch()ing the request if some unexpected error
// prevented the cached response from being valid.
console.warn('Couldn\'t serve response for "%s" from cache: %O', event.request.url, e);
Expand Down
4 changes: 2 additions & 2 deletions packages/polyserve/src/transform-middleware.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ export function transformResponse(transformer: ResponseTransformer):

if (shouldTransform()) {
const buffer = (typeof chunk === 'string') ?
new Buffer(chunk, cbOrEncoding as string) :
Buffer.from(chunk, cbOrEncoding as string) :
chunk;
chunks.push(buffer);
return true;
Expand All @@ -68,7 +68,7 @@ export function transformResponse(transformer: ResponseTransformer):
if (Buffer.isBuffer(cbOrChunk)) {
chunks.push(cbOrChunk);
} else if (typeof cbOrChunk === 'string') {
chunks.push(new Buffer(cbOrChunk, cbOrEncoding as string));
chunks.push(Buffer.from(cbOrChunk, cbOrEncoding as string));
}
const body = Buffer.concat(chunks).toString('utf8');
let newBody = body;
Expand Down

0 comments on commit dd1c8bb

Please sign in to comment.