From d5123ca04b552479a8875ba098026935e311f8de Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Wed, 19 May 2021 15:50:14 -0700 Subject: [PATCH 01/32] report: encode data even if very large --- build/build-treemap.js | 1 + .../report/html/html-report-assets.js | 1 + .../report/html/renderer/base64.js | 68 +++++++++++++++++++ .../html/renderer/report-ui-features.js | 19 +----- .../test/report/html/renderer/base64-test.js | 29 ++++++++ lighthouse-treemap/app/src/main.js | 16 +---- lighthouse-treemap/types/treemap.d.ts | 2 + types/html-renderer.d.ts | 2 + 8 files changed, 108 insertions(+), 30 deletions(-) create mode 100644 lighthouse-core/report/html/renderer/base64.js create mode 100644 lighthouse-core/test/report/html/renderer/base64-test.js diff --git a/build/build-treemap.js b/build/build-treemap.js index 5e6f334b27be..8743c02d28cf 100644 --- a/build/build-treemap.js +++ b/build/build-treemap.js @@ -57,6 +57,7 @@ async function run() { /* eslint-enable max-len */ buildStrings(), {path: '../../lighthouse-core/report/html/renderer/i18n.js'}, + {path: '../../lighthouse-core/report/html/renderer/base64.js'}, {path: 'src/**/*'}, ], assets: [ diff --git a/lighthouse-core/report/html/html-report-assets.js b/lighthouse-core/report/html/html-report-assets.js index 32863013a395..a176e5be4472 100644 --- a/lighthouse-core/report/html/html-report-assets.js +++ b/lighthouse-core/report/html/html-report-assets.js @@ -23,6 +23,7 @@ const REPORT_JAVASCRIPT = [ fs.readFileSync(__dirname + '/renderer/pwa-category-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/report-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/i18n.js', 'utf8'), + fs.readFileSync(__dirname + '/renderer/base64.js', 'utf8'), ].join(';\n'); const REPORT_CSS = fs.readFileSync(__dirname + '/report-styles.css', 'utf8'); const REPORT_TEMPLATES = fs.readFileSync(__dirname + '/templates.html', 'utf8'); diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js new file mode 100644 index 000000000000..89211d33895c --- /dev/null +++ b/lighthouse-core/report/html/renderer/base64.js @@ -0,0 +1,68 @@ +/** + * @license Copyright 2020 The Lighthouse Authors. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + */ +'use strict'; + +/* global self btoa atob */ + +const encode = typeof btoa !== 'undefined' ? + btoa : + /** @param {string} str */ + (str) => Buffer.from(str).toString('base64'); +const decode = typeof btoa !== 'undefined' ? + atob : + /** @param {string} str */ + (str) => Buffer.from(str, 'base64').toString(); + +/** + * @param {string} string + */ +function toBinary(string) { + // const codePoints = [...string].map(c => c.codePointAt(0) || 0); + // return encode(String.fromCharCode(...new Uint8Array(codePoints))); + + const chunkSize = 10000; + let str = ''; + for (let i = 0; i < string.length; i += chunkSize) { + const codeUnits = new Uint16Array(Math.min(chunkSize, string.length - i)); + for (let i = 0; i < codeUnits.length; i++) { + codeUnits[i] = string.charCodeAt(i); + } + str += String.fromCharCode(...new Uint8Array(codeUnits.buffer)); + } + + return encode(str); +} + +/** + * @param {string} encoded + */ +function fromBinary(encoded) { + // const binary = decode(encoded); + // const bytes = new Uint8Array(binary.length); + // for (let i = 0; i < bytes.length; i++) { + // bytes[i] = binary.charCodeAt(i); + // } + // return String.fromCodePoint(...new Uint16Array(bytes.buffer)); + + const chunkSize = 10000; + let str = ''; + const decoded = decode(encoded); + for (let i = 0; i < decoded.length; i += chunkSize) { + const bytes = new Uint8Array(Math.min(chunkSize, decoded.length - i)); + for (let j = 0; j < bytes.length; j++) { + bytes[j] = decoded.charCodeAt(i + j); + } + str += String.fromCharCode(...new Uint16Array(bytes.buffer)); + } + + return str; +} + +if (typeof module !== 'undefined' && module.exports) { + module.exports = {toBinary, fromBinary}; +} else { + self.Base64 = {toBinary, fromBinary}; +} diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index 9debb07f5844..1d79c5a2a56b 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -23,7 +23,7 @@ * the report. */ -/* globals getFilenamePrefix Util ElementScreenshotRenderer */ +/* globals getFilenamePrefix Util Base64 ElementScreenshotRenderer */ /** @typedef {import('./dom')} DOM */ @@ -573,6 +573,7 @@ class ReportUIFeatures { const url = getAppsOrigin() + '/treemap/'; const windowName = `treemap-${json.requestedUrl}`; + method= 'url'; if (method === 'postMessage') { ReportUIFeatures.openTabAndSendData(treemapOptions, url, windowName); } else { @@ -603,7 +604,6 @@ class ReportUIFeatures { } }); - // The popup's window.name is keyed by version+url+fetchTime, so we reuse/select tabs correctly const popup = window.open(url, windowName); } @@ -616,21 +616,8 @@ class ReportUIFeatures { */ static openTabWithUrlData(data, url_, windowName) { const url = new URL(url_); - url.hash = toBinary(JSON.stringify(data)); - - // The popup's window.name is keyed by version+url+fetchTime, so we reuse/select tabs correctly + url.hash = Base64.toBinary(JSON.stringify(data)); window.open(url.toString(), windowName); - - /** - * @param {string} string - */ - function toBinary(string) { - const codeUnits = new Uint16Array(string.length); - for (let i = 0; i < codeUnits.length; i++) { - codeUnits[i] = string.charCodeAt(i); - } - return btoa(String.fromCharCode(...new Uint8Array(codeUnits.buffer))); - } } /** diff --git a/lighthouse-core/test/report/html/renderer/base64-test.js b/lighthouse-core/test/report/html/renderer/base64-test.js new file mode 100644 index 000000000000..ad22f06b2449 --- /dev/null +++ b/lighthouse-core/test/report/html/renderer/base64-test.js @@ -0,0 +1,29 @@ +/** + * @license Copyright 2021 The Lighthouse Authors. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + */ +'use strict'; + +const Base64 = require('../../../../report/html/renderer/base64.js'); + +/* eslint-env jest */ + +describe('base64', () => { + /** @type {string} */ + function test(str) { + const roundtrip = Base64.fromBinary(Base64.toBinary(str)); + expect(roundtrip.length).toEqual(str.length); + expect(roundtrip).toEqual(str); + } + + it('works', () => { + test(''); + test('hello'); + test('{åß∂œ∑´}'); + test('Some examples of emoji are 😃, 🧘🏻‍♂️, 🌍, 🍞, 🚗, 📞, 🎉, ♥️, 🍆, and 🏁.'); + test('.'.repeat(125183)); + test('😃'.repeat(125183)); + test(JSON.stringify(require('../../../../../lighthouse-treemap/app/debug.json'))); + }); +}); diff --git a/lighthouse-treemap/app/src/main.js b/lighthouse-treemap/app/src/main.js index 28329a2bc63b..9712dd18fc5e 100644 --- a/lighthouse-treemap/app/src/main.js +++ b/lighthouse-treemap/app/src/main.js @@ -7,7 +7,7 @@ /* eslint-env browser */ -/* globals I18n webtreemap strings TreemapUtil Tabulator Cell Row */ +/* globals I18n webtreemap strings TreemapUtil Base64 Tabulator Cell Row */ const DUPLICATED_MODULES_IGNORE_THRESHOLD = 1024; const DUPLICATED_MODULES_IGNORE_ROOT_RATIO = 0.01; @@ -734,24 +734,12 @@ function showError(message) { document.body.textContent = message; } -/** - * @param {string} encoded - */ -function fromBinary(encoded) { - const binary = atob(encoded); - const bytes = new Uint8Array(binary.length); - for (let i = 0; i < bytes.length; i++) { - bytes[i] = binary.charCodeAt(i); - } - return String.fromCharCode(...new Uint16Array(bytes.buffer)); -} - async function main() { /** @type {Record} */ let params = {}; if (Object.fromEntries) { const queryParams = new URLSearchParams(window.location.search); - const hashParams = location.hash ? JSON.parse(fromBinary(location.hash.substr(1))) : {}; + const hashParams = location.hash ? JSON.parse(Base64.fromBinary(location.hash.substr(1))) : {}; params = { ...Object.fromEntries(queryParams.entries()), ...hashParams, diff --git a/lighthouse-treemap/types/treemap.d.ts b/lighthouse-treemap/types/treemap.d.ts index 6043d599dc64..17fbbb7acecb 100644 --- a/lighthouse-treemap/types/treemap.d.ts +++ b/lighthouse-treemap/types/treemap.d.ts @@ -1,4 +1,5 @@ import _TreemapUtil = require('../app/src/util.js'); +import _Base64 = require('../../lighthouse-core/report/html/renderer/base64.js'); export type Strings = Record; @@ -33,6 +34,7 @@ declare global { sort(data: any): void; }; var TreemapUtil: typeof _TreemapUtil; + var Base64: typeof _Base64; var strings: Strings; interface Window { diff --git a/types/html-renderer.d.ts b/types/html-renderer.d.ts index daf4c75ed4ff..c0a10ca39325 100644 --- a/types/html-renderer.d.ts +++ b/types/html-renderer.d.ts @@ -16,6 +16,7 @@ import _PwaCategoryRenderer = require('../lighthouse-core/report/html/renderer/p import _ReportRenderer = require('../lighthouse-core/report/html/renderer/report-renderer.js'); import _ReportUIFeatures = require('../lighthouse-core/report/html/renderer/report-ui-features.js'); import _Util = require('../lighthouse-core/report/html/renderer/util.js'); +import _Base64 = require('../lighthouse-core/report/html/renderer/base64.js'); import _prepareLabData = require('../lighthouse-core/report/html/renderer/psi.js'); import _FileNamer = require('../lighthouse-core/lib/file-namer.js'); @@ -33,6 +34,7 @@ declare global { var ReportRenderer: typeof _ReportRenderer; var ReportUIFeatures: typeof _ReportUIFeatures; var Util: typeof _Util; + var Base64: typeof _Base64; var prepareLabData: typeof _prepareLabData; interface Window { From 267d5783ee7146b35127c962282745392cab172a Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Wed, 19 May 2021 16:21:06 -0700 Subject: [PATCH 02/32] ok --- .../report/html/renderer/base64.js | 38 +++++-------------- .../test/report/html/renderer/base64-test.js | 1 + lighthouse-treemap/app/src/main.js | 15 +++----- 3 files changed, 17 insertions(+), 37 deletions(-) diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index 89211d33895c..b14e2cae989a 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -20,45 +20,27 @@ const decode = typeof btoa !== 'undefined' ? * @param {string} string */ function toBinary(string) { - // const codePoints = [...string].map(c => c.codePointAt(0) || 0); - // return encode(String.fromCharCode(...new Uint8Array(codePoints))); - + const bytes = new TextEncoder().encode(string); + let bytesAsString = ''; const chunkSize = 10000; - let str = ''; - for (let i = 0; i < string.length; i += chunkSize) { - const codeUnits = new Uint16Array(Math.min(chunkSize, string.length - i)); - for (let i = 0; i < codeUnits.length; i++) { - codeUnits[i] = string.charCodeAt(i); - } - str += String.fromCharCode(...new Uint8Array(codeUnits.buffer)); + for (let i = 0; i < bytes.length; i += chunkSize) { + bytesAsString += String.fromCharCode(...new Uint8Array(bytes.buffer.slice(i, i + chunkSize))); } - return encode(str); + const encoded = encode(bytesAsString); + return encoded; } /** * @param {string} encoded */ function fromBinary(encoded) { - // const binary = decode(encoded); - // const bytes = new Uint8Array(binary.length); - // for (let i = 0; i < bytes.length; i++) { - // bytes[i] = binary.charCodeAt(i); - // } - // return String.fromCodePoint(...new Uint16Array(bytes.buffer)); - - const chunkSize = 10000; - let str = ''; const decoded = decode(encoded); - for (let i = 0; i < decoded.length; i += chunkSize) { - const bytes = new Uint8Array(Math.min(chunkSize, decoded.length - i)); - for (let j = 0; j < bytes.length; j++) { - bytes[j] = decoded.charCodeAt(i + j); - } - str += String.fromCharCode(...new Uint16Array(bytes.buffer)); + const bytes = new Uint8Array(decoded.length); + for (let i = 0; i < bytes.length; i++) { + bytes[i] = decoded.charCodeAt(i); } - - return str; + return new TextDecoder().decode(bytes); } if (typeof module !== 'undefined' && module.exports) { diff --git a/lighthouse-core/test/report/html/renderer/base64-test.js b/lighthouse-core/test/report/html/renderer/base64-test.js index ad22f06b2449..2b9cfdf2cfaa 100644 --- a/lighthouse-core/test/report/html/renderer/base64-test.js +++ b/lighthouse-core/test/report/html/renderer/base64-test.js @@ -20,6 +20,7 @@ describe('base64', () => { it('works', () => { test(''); test('hello'); + test('😃'); test('{åß∂œ∑´}'); test('Some examples of emoji are 😃, 🧘🏻‍♂️, 🌍, 🍞, 🚗, 📞, 🎉, ♥️, 🍆, and 🏁.'); test('.'.repeat(125183)); diff --git a/lighthouse-treemap/app/src/main.js b/lighthouse-treemap/app/src/main.js index 9712dd18fc5e..f27f54398e9c 100644 --- a/lighthouse-treemap/app/src/main.js +++ b/lighthouse-treemap/app/src/main.js @@ -735,16 +735,13 @@ function showError(message) { } async function main() { + const queryParams = new URLSearchParams(window.location.search); + const hashParams = location.hash ? JSON.parse(Base64.fromBinary(location.hash.substr(1))) : {}; /** @type {Record} */ - let params = {}; - if (Object.fromEntries) { - const queryParams = new URLSearchParams(window.location.search); - const hashParams = location.hash ? JSON.parse(Base64.fromBinary(location.hash.substr(1))) : {}; - params = { - ...Object.fromEntries(queryParams.entries()), - ...hashParams, - }; - } + const params = { + ...Object.fromEntries(queryParams.entries()), + ...hashParams, + }; if (window.__treemapOptions) { // Prefer the hardcoded options from a saved HTML file above all. From d7281c0d7a4dbd4addeded1dda71676bcbcb338e Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Wed, 19 May 2021 18:47:00 -0700 Subject: [PATCH 03/32] rm --- lighthouse-core/report/html/renderer/report-ui-features.js | 1 - 1 file changed, 1 deletion(-) diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index 1d79c5a2a56b..49f9243a163a 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -573,7 +573,6 @@ class ReportUIFeatures { const url = getAppsOrigin() + '/treemap/'; const windowName = `treemap-${json.requestedUrl}`; - method= 'url'; if (method === 'postMessage') { ReportUIFeatures.openTabAndSendData(treemapOptions, url, windowName); } else { From ff0bfd16a382b19d82925b42785bd4138b5fac06 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 12:31:51 -0700 Subject: [PATCH 04/32] no btoa lol --- .../report/html/renderer/base64.js | 28 +++++-------------- 1 file changed, 7 insertions(+), 21 deletions(-) diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index b14e2cae989a..b5cc193b44c0 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -5,40 +5,26 @@ */ 'use strict'; -/* global self btoa atob */ - -const encode = typeof btoa !== 'undefined' ? - btoa : - /** @param {string} str */ - (str) => Buffer.from(str).toString('base64'); -const decode = typeof btoa !== 'undefined' ? - atob : - /** @param {string} str */ - (str) => Buffer.from(str, 'base64').toString(); - /** * @param {string} string */ function toBinary(string) { const bytes = new TextEncoder().encode(string); - let bytesAsString = ''; + let binaryString = ''; const chunkSize = 10000; for (let i = 0; i < bytes.length; i += chunkSize) { - bytesAsString += String.fromCharCode(...new Uint8Array(bytes.buffer.slice(i, i + chunkSize))); + binaryString += String.fromCharCode(...new Uint8Array(bytes.buffer.slice(i, i + chunkSize))); } - - const encoded = encode(bytesAsString); - return encoded; + return binaryString; } /** - * @param {string} encoded + * @param {string} binaryString */ -function fromBinary(encoded) { - const decoded = decode(encoded); - const bytes = new Uint8Array(decoded.length); +function fromBinary(binaryString) { + const bytes = new Uint8Array(binaryString.length); for (let i = 0; i < bytes.length; i++) { - bytes[i] = decoded.charCodeAt(i); + bytes[i] = binaryString.charCodeAt(i); } return new TextDecoder().decode(bytes); } From c417fcbc409944e1ed3d10267b1ac7dc7dc8676d Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 12:49:06 -0700 Subject: [PATCH 05/32] benchmark --- lighthouse-core/report/html/renderer/base64.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index b5cc193b44c0..37aa8e0f5fb2 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -11,7 +11,9 @@ function toBinary(string) { const bytes = new TextEncoder().encode(string); let binaryString = ''; - const chunkSize = 10000; + // This is ~25% faster than building the string one character at a time. + // https://jsbench.me/2gkoxazvjl + const chunkSize = 5000; for (let i = 0; i < bytes.length; i += chunkSize) { binaryString += String.fromCharCode(...new Uint8Array(bytes.buffer.slice(i, i + chunkSize))); } From 8ac3a39028efa2fce8a84769bb06086be36add3f Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 13:05:16 -0700 Subject: [PATCH 06/32] lol --- lighthouse-core/report/html/renderer/base64.js | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index 37aa8e0f5fb2..68aeb47c8e2e 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -5,6 +5,17 @@ */ 'use strict'; +/* global self btoa atob */ + +const encode = typeof btoa !== 'undefined' ? + btoa : + /** @param {string} str */ + (str) => Buffer.from(str).toString('base64'); +const decode = typeof btoa !== 'undefined' ? + atob : + /** @param {string} str */ + (str) => Buffer.from(str, 'base64').toString(); + /** * @param {string} string */ @@ -17,13 +28,14 @@ function toBinary(string) { for (let i = 0; i < bytes.length; i += chunkSize) { binaryString += String.fromCharCode(...new Uint8Array(bytes.buffer.slice(i, i + chunkSize))); } - return binaryString; + return encode(binaryString); } /** - * @param {string} binaryString + * @param {string} encoded */ -function fromBinary(binaryString) { +function fromBinary(encoded) { + const binaryString = decode(encoded); const bytes = new Uint8Array(binaryString.length); for (let i = 0; i < bytes.length; i++) { bytes[i] = binaryString.charCodeAt(i); From 720c427b11d6d9e6ce0ddfa2a03d0fa6fd18d08a Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 14:13:43 -0700 Subject: [PATCH 07/32] test --- lighthouse-treemap/test/treemap-test-pptr.js | 30 ++++++++++++++++---- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index 9283552e15cd..e3398b77970b 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -9,6 +9,7 @@ /* global document, window */ +const fs = require('fs'); const puppeteer = require('../../node_modules/puppeteer/index.js'); const {server} = require('../../lighthouse-cli/test/fixtures/static-server.js'); const portNumber = 10200; @@ -77,23 +78,42 @@ describe('Lighthouse Treemap', () => { await new Promise(resolve => browser.on('targetcreated', resolve)); const target = (await browser.targets()).find(target => target.url() === treemapUrl); page = await target.page(); - await openerPage.close(); await page.waitForFunction( () => window.__treemapOptions || document.body.textContent.startsWith('Error')); } it('from window postMessage', async () => { await loadFromPostMessage(debugOptions); - const options = await page.evaluate(() => window.__treemapOptions); - expect(options.lhr.requestedUrl).toBe(debugOptions.lhr.requestedUrl); + const optionsInPage = await page.evaluate(() => window.__treemapOptions); + expect(optionsInPage.lhr.requestedUrl).toBe(debugOptions.lhr.requestedUrl); }); it('handles errors', async () => { await loadFromPostMessage({}); - const options = await page.evaluate(() => window.__treemapOptions); - expect(options).toBeUndefined(); + const optionsInPage = await page.evaluate(() => window.__treemapOptions); + expect(optionsInPage).toBeUndefined(); const error = await page.evaluate(() => document.body.textContent); expect(error).toBe('Error: Invalid options'); }); + + async function loadFromFragment(options) { + const json = JSON.stringify(options); + const encoded = await page.evaluate(` + ${fs.readFileSync( + require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} + Base64.toBinary(${JSON.stringify(json)}); + `); + await page.goto(`${treemapUrl}#${encoded}`); + await page.waitForFunction( + () => window.__treemapOptions || document.body.textContent.startsWith('Error')); + } + + it('from encoded fragment', async () => { + const options = JSON.parse(JSON.stringify(debugOptions)); + options.lhr.requestedUrl += '😃😃😃'; + await loadFromFragment(options); + const optionsInPage = await page.evaluate(() => window.__treemapOptions); + expect(optionsInPage.lhr.requestedUrl).toBe(options.lhr.requestedUrl); + }); }); }); From f0e89490b0a59fef6376e8c7c386e25b903ba5e7 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 14:47:05 -0700 Subject: [PATCH 08/32] pako --- build/build-treemap.js | 1 + .../report/html/html-report-assets.js | 1 + .../report/html/renderer/base64.js | 43 ++++++++++++++++--- .../html/renderer/report-ui-features.js | 6 +++ lighthouse-treemap/test/treemap-test-pptr.js | 1 + package.json | 1 + types/html-renderer.d.ts | 1 + yarn.lock | 5 +++ 8 files changed, 52 insertions(+), 7 deletions(-) diff --git a/build/build-treemap.js b/build/build-treemap.js index 8743c02d28cf..6fc73338fbb1 100644 --- a/build/build-treemap.js +++ b/build/build-treemap.js @@ -54,6 +54,7 @@ async function run() { fs.readFileSync(require.resolve('tabulator-tables/dist/js/modules/sort.js'), 'utf8'), fs.readFileSync(require.resolve('tabulator-tables/dist/js/modules/format.js'), 'utf8'), fs.readFileSync(require.resolve('tabulator-tables/dist/js/modules/resize_columns.js'), 'utf8'), + fs.readFileSync(require.resolve('pako/dist/pako_inflate.js'), 'utf-8'), /* eslint-enable max-len */ buildStrings(), {path: '../../lighthouse-core/report/html/renderer/i18n.js'}, diff --git a/lighthouse-core/report/html/html-report-assets.js b/lighthouse-core/report/html/html-report-assets.js index a176e5be4472..e4f25e4234b9 100644 --- a/lighthouse-core/report/html/html-report-assets.js +++ b/lighthouse-core/report/html/html-report-assets.js @@ -24,6 +24,7 @@ const REPORT_JAVASCRIPT = [ fs.readFileSync(__dirname + '/renderer/report-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/i18n.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/base64.js', 'utf8'), + fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'), 'utf-8'), ].join(';\n'); const REPORT_CSS = fs.readFileSync(__dirname + '/report-styles.css', 'utf8'); const REPORT_TEMPLATES = fs.readFileSync(__dirname + '/templates.html', 'utf8'); diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index 68aeb47c8e2e..2f5089d93481 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -5,7 +5,7 @@ */ 'use strict'; -/* global self btoa atob */ +/* global self btoa atob pako */ const encode = typeof btoa !== 'undefined' ? btoa : @@ -16,11 +16,38 @@ const decode = typeof btoa !== 'undefined' ? /** @param {string} str */ (str) => Buffer.from(str, 'base64').toString(); +// /** +// * @param {string} string +// */ +// function toBinary(string) { +// const bytes = new TextEncoder().encode(string); +// let binaryString = ''; +// // This is ~25% faster than building the string one character at a time. +// // https://jsbench.me/2gkoxazvjl +// const chunkSize = 5000; +// for (let i = 0; i < bytes.length; i += chunkSize) { +// binaryString += String.fromCharCode(...new Uint8Array(bytes.buffer.slice(i, i + chunkSize))); +// } +// return encode(binaryString); +// } + +// /** +// * @param {string} encoded +// */ +// function fromBinary(encoded) { +// const binaryString = decode(encoded); +// const bytes = new Uint8Array(binaryString.length); +// for (let i = 0; i < bytes.length; i++) { +// bytes[i] = binaryString.charCodeAt(i); +// } +// return new TextDecoder().decode(bytes); +// } + /** * @param {string} string */ -function toBinary(string) { - const bytes = new TextEncoder().encode(string); +function toBinaryGzip(string) { + const bytes = pako.gzip(string); let binaryString = ''; // This is ~25% faster than building the string one character at a time. // https://jsbench.me/2gkoxazvjl @@ -34,17 +61,19 @@ function toBinary(string) { /** * @param {string} encoded */ -function fromBinary(encoded) { +function fromBinaryGzip(encoded) { const binaryString = decode(encoded); const bytes = new Uint8Array(binaryString.length); for (let i = 0; i < bytes.length; i++) { bytes[i] = binaryString.charCodeAt(i); } - return new TextDecoder().decode(bytes); + return pako.ungzip(bytes, {to: 'string'}); } if (typeof module !== 'undefined' && module.exports) { - module.exports = {toBinary, fromBinary}; + // module.exports = {toBinary, fromBinary}; + module.exports = {toBinary: toBinaryGzip, fromBinary: fromBinaryGzip}; } else { - self.Base64 = {toBinary, fromBinary}; + // self.Base64 = {toBinary, fromBinary}; + self.Base64 = {toBinary: toBinaryGzip, fromBinary: fromBinaryGzip}; } diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index 49f9243a163a..ac44c3432b49 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -573,6 +573,12 @@ class ReportUIFeatures { const url = getAppsOrigin() + '/treemap/'; const windowName = `treemap-${json.requestedUrl}`; + // ~~~~~~~~~~~~~~~~~~~~ + // SUPER TODO HEY ME DONT PUSH THIS TO MASTER. + // ~~~~~~~~~~~~~~~~~~~~ + method = 'url'; + treemapOptions.lhr.finalUrl += '😃😃'; + treemapOptions.lhr.requestedUrl += '😃😃'; if (method === 'postMessage') { ReportUIFeatures.openTabAndSendData(treemapOptions, url, windowName); } else { diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index e3398b77970b..48b8cc279f20 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -99,6 +99,7 @@ describe('Lighthouse Treemap', () => { async function loadFromFragment(options) { const json = JSON.stringify(options); const encoded = await page.evaluate(` + ${fs.readFileSync('pako/dist/pako_inflate.js')} ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} Base64.toBinary(${JSON.stringify(json)}); diff --git a/package.json b/package.json index 475b38f7f149..fda6687a438a 100644 --- a/package.json +++ b/package.json @@ -107,6 +107,7 @@ "@types/lodash.isequal": "^4.5.2", "@types/lodash.set": "^4.3.6", "@types/node": "*", + "@types/pako": "^1.0.1", "@types/puppeteer": "1.19.x", "@types/raven": "^2.5.1", "@types/resize-observer-browser": "^0.1.1", diff --git a/types/html-renderer.d.ts b/types/html-renderer.d.ts index c0a10ca39325..4e09ff436dc7 100644 --- a/types/html-renderer.d.ts +++ b/types/html-renderer.d.ts @@ -36,6 +36,7 @@ declare global { var Util: typeof _Util; var Base64: typeof _Base64; var prepareLabData: typeof _prepareLabData; + var pako: typeof import('pako'); interface Window { CategoryRenderer: typeof _CategoryRenderer; diff --git a/yarn.lock b/yarn.lock index 32184ba92b52..26519307be36 100644 --- a/yarn.lock +++ b/yarn.lock @@ -878,6 +878,11 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== +"@types/pako@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@types/pako/-/pako-1.0.1.tgz#33b237f3c9aff44d0f82fe63acffa4a365ef4a61" + integrity sha512-GdZbRSJ3Cv5fiwT6I0SQ3ckeN2PWNqxd26W9Z2fCK1tGrrasGy4puvNFtnddqH9UJFMQYXxEuuB7B8UK+LLwSg== + "@types/prettier@^2.0.0": version "2.2.3" resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.2.3.tgz#ef65165aea2924c9359205bf748865b8881753c0" From 6d6e4536ed207d2ccde9cad592c6fe0d7c7f4713 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 14:53:00 -0700 Subject: [PATCH 09/32] fixtest --- lighthouse-treemap/test/treemap-test-pptr.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index 48b8cc279f20..648c6d183a3d 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -99,7 +99,7 @@ describe('Lighthouse Treemap', () => { async function loadFromFragment(options) { const json = JSON.stringify(options); const encoded = await page.evaluate(` - ${fs.readFileSync('pako/dist/pako_inflate.js')} + ${fs.readFileSync(require.resolve('pako/dist/pako_inflate.js'))} ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} Base64.toBinary(${JSON.stringify(json)}); From ef3e69ee1ad2de94defe427c67131a870c165ca5 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 14:55:13 -0700 Subject: [PATCH 10/32] test --- lighthouse-treemap/test/treemap-test-pptr.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index 648c6d183a3d..f86a35c3832a 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -99,7 +99,7 @@ describe('Lighthouse Treemap', () => { async function loadFromFragment(options) { const json = JSON.stringify(options); const encoded = await page.evaluate(` - ${fs.readFileSync(require.resolve('pako/dist/pako_inflate.js'))} + ${fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'))} ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} Base64.toBinary(${JSON.stringify(json)}); From c9c60df54d9e9509fb1fbd7e1cc750355969efb5 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 15:05:48 -0700 Subject: [PATCH 11/32] minify --- lighthouse-core/report/html/html-report-assets.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lighthouse-core/report/html/html-report-assets.js b/lighthouse-core/report/html/html-report-assets.js index e4f25e4234b9..65ea0471fe3a 100644 --- a/lighthouse-core/report/html/html-report-assets.js +++ b/lighthouse-core/report/html/html-report-assets.js @@ -24,7 +24,7 @@ const REPORT_JAVASCRIPT = [ fs.readFileSync(__dirname + '/renderer/report-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/i18n.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/base64.js', 'utf8'), - fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'), 'utf-8'), + fs.readFileSync(require.resolve('pako/dist/pako_deflate.min.js'), 'utf-8'), ].join(';\n'); const REPORT_CSS = fs.readFileSync(__dirname + '/report-styles.css', 'utf8'); const REPORT_TEMPLATES = fs.readFileSync(__dirname + '/templates.html', 'utf8'); From a63827021389eca49924cb735df648b31e1250c6 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 15:25:23 -0700 Subject: [PATCH 12/32] tweak --- lighthouse-core/report/html/html-report-assets.js | 2 +- lighthouse-core/report/html/renderer/base64.js | 5 +++-- lighthouse-treemap/app/src/main.js | 5 +++++ 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/lighthouse-core/report/html/html-report-assets.js b/lighthouse-core/report/html/html-report-assets.js index 65ea0471fe3a..79e14f174eb3 100644 --- a/lighthouse-core/report/html/html-report-assets.js +++ b/lighthouse-core/report/html/html-report-assets.js @@ -23,8 +23,8 @@ const REPORT_JAVASCRIPT = [ fs.readFileSync(__dirname + '/renderer/pwa-category-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/report-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/i18n.js', 'utf8'), - fs.readFileSync(__dirname + '/renderer/base64.js', 'utf8'), fs.readFileSync(require.resolve('pako/dist/pako_deflate.min.js'), 'utf-8'), + fs.readFileSync(__dirname + '/renderer/base64.js', 'utf8'), ].join(';\n'); const REPORT_CSS = fs.readFileSync(__dirname + '/report-styles.css', 'utf8'); const REPORT_TEMPLATES = fs.readFileSync(__dirname + '/templates.html', 'utf8'); diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index 2f5089d93481..abb950f6c4b7 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -15,6 +15,7 @@ const decode = typeof btoa !== 'undefined' ? atob : /** @param {string} str */ (str) => Buffer.from(str, 'base64').toString(); +const pako_ = typeof pako !== 'undefined' ? pako : require('pako'); // /** // * @param {string} string @@ -47,7 +48,7 @@ const decode = typeof btoa !== 'undefined' ? * @param {string} string */ function toBinaryGzip(string) { - const bytes = pako.gzip(string); + const bytes = pako_.gzip(string); let binaryString = ''; // This is ~25% faster than building the string one character at a time. // https://jsbench.me/2gkoxazvjl @@ -67,7 +68,7 @@ function fromBinaryGzip(encoded) { for (let i = 0; i < bytes.length; i++) { bytes[i] = binaryString.charCodeAt(i); } - return pako.ungzip(bytes, {to: 'string'}); + return pako_.ungzip(bytes, {to: 'string'}); } if (typeof module !== 'undefined' && module.exports) { diff --git a/lighthouse-treemap/app/src/main.js b/lighthouse-treemap/app/src/main.js index 358565b2c2c5..3a9b3b7d5bc9 100644 --- a/lighthouse-treemap/app/src/main.js +++ b/lighthouse-treemap/app/src/main.js @@ -900,6 +900,11 @@ async function main() { } else if ('debug' in params) { const response = await fetch('debug.json'); app.init(await response.json()); + } else if (params.lhr) { + const options = { + lhr: params.lhr, + }; + app.init(options); } else if (params.gist) { let json; let options; From 9eb78f69191b4f709c2f9bb293c1573315769804 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 15:32:06 -0700 Subject: [PATCH 13/32] dont use minified --- lighthouse-core/report/html/html-report-assets.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lighthouse-core/report/html/html-report-assets.js b/lighthouse-core/report/html/html-report-assets.js index 79e14f174eb3..1506f2f4e8a3 100644 --- a/lighthouse-core/report/html/html-report-assets.js +++ b/lighthouse-core/report/html/html-report-assets.js @@ -23,7 +23,8 @@ const REPORT_JAVASCRIPT = [ fs.readFileSync(__dirname + '/renderer/pwa-category-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/report-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/i18n.js', 'utf8'), - fs.readFileSync(require.resolve('pako/dist/pako_deflate.min.js'), 'utf-8'), + // TODO: minified version breaks utf8 + fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'), 'utf-8'), fs.readFileSync(__dirname + '/renderer/base64.js', 'utf8'), ].join(';\n'); const REPORT_CSS = fs.readFileSync(__dirname + '/report-styles.css', 'utf8'); From d985f61a883657000313dbf85728600aac0689f9 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 16:10:24 -0700 Subject: [PATCH 14/32] gzip optional --- .../report/html/html-report-assets.js | 4 +- .../report/html/renderer/base64.js | 39 ++++++++++++++----- .../html/renderer/report-ui-features.js | 10 ++++- .../test/report/html/renderer/base64-test.js | 33 +++++++++------- lighthouse-treemap/app/src/main.js | 5 ++- lighthouse-treemap/test/treemap-test-pptr.js | 33 +++++++++++----- types/html-renderer.d.ts | 8 ++++ 7 files changed, 95 insertions(+), 37 deletions(-) diff --git a/lighthouse-core/report/html/html-report-assets.js b/lighthouse-core/report/html/html-report-assets.js index 1506f2f4e8a3..73c42ac30513 100644 --- a/lighthouse-core/report/html/html-report-assets.js +++ b/lighthouse-core/report/html/html-report-assets.js @@ -23,8 +23,8 @@ const REPORT_JAVASCRIPT = [ fs.readFileSync(__dirname + '/renderer/pwa-category-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/report-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/i18n.js', 'utf8'), - // TODO: minified version breaks utf8 - fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'), 'utf-8'), + // // TODO: minified version breaks utf8 + // fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'), 'utf-8'), fs.readFileSync(__dirname + '/renderer/base64.js', 'utf8'), ].join(';\n'); const REPORT_CSS = fs.readFileSync(__dirname + '/report-styles.css', 'utf8'); diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index abb950f6c4b7..fac9dc5c7e06 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -5,7 +5,7 @@ */ 'use strict'; -/* global self btoa atob pako */ +/* global self btoa atob pako CompressionStream Response */ const encode = typeof btoa !== 'undefined' ? btoa : @@ -15,7 +15,6 @@ const decode = typeof btoa !== 'undefined' ? atob : /** @param {string} str */ (str) => Buffer.from(str, 'base64').toString(); -const pako_ = typeof pako !== 'undefined' ? pako : require('pako'); // /** // * @param {string} string @@ -44,11 +43,27 @@ const pako_ = typeof pako !== 'undefined' ? pako : require('pako'); // return new TextDecoder().decode(bytes); // } +function getPako() { + return typeof pako !== 'undefined' ? pako : require('pako'); +} + /** * @param {string} string + * @param {{gzip: boolean}} options */ -function toBinaryGzip(string) { - const bytes = pako_.gzip(string); +async function toBinary(string, options) { + let bytes; + if (options.gzip) { + const cs = new CompressionStream('gzip'); + const writer = cs.writable.getWriter(); + writer.write(new TextEncoder().encode(string)); + writer.close(); + const compAb = await new Response(cs.readable).arrayBuffer(); + bytes = new Uint8Array(compAb); + } else { + bytes = new TextEncoder().encode(string); + } + let binaryString = ''; // This is ~25% faster than building the string one character at a time. // https://jsbench.me/2gkoxazvjl @@ -61,20 +76,24 @@ function toBinaryGzip(string) { /** * @param {string} encoded + * @param {{gzip: boolean}} options */ -function fromBinaryGzip(encoded) { +function fromBinary(encoded, options) { const binaryString = decode(encoded); const bytes = new Uint8Array(binaryString.length); for (let i = 0; i < bytes.length; i++) { bytes[i] = binaryString.charCodeAt(i); } - return pako_.ungzip(bytes, {to: 'string'}); + + if (options.gzip) { + return getPako().ungzip(bytes, {to: 'string'}); + } else { + return new TextDecoder().decode(bytes); + } } if (typeof module !== 'undefined' && module.exports) { - // module.exports = {toBinary, fromBinary}; - module.exports = {toBinary: toBinaryGzip, fromBinary: fromBinaryGzip}; + module.exports = {toBinary, fromBinary}; } else { - // self.Base64 = {toBinary, fromBinary}; - self.Base64 = {toBinary: toBinaryGzip, fromBinary: fromBinaryGzip}; + self.Base64 = {toBinary, fromBinary}; } diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index 720f9ea3d126..9cec0998ce1d 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -581,6 +581,7 @@ class ReportUIFeatures { method = 'url'; treemapOptions.lhr.finalUrl += '😃😃'; treemapOptions.lhr.requestedUrl += '😃😃'; + debugger; if (method === 'postMessage') { ReportUIFeatures.openTabAndSendData(treemapOptions, url, windowName); } else { @@ -621,9 +622,14 @@ class ReportUIFeatures { * @param {string} windowName * @protected */ - static openTabWithUrlData(data, url_, windowName) { + static async openTabWithUrlData(data, url_, windowName) { + debugger; const url = new URL(url_); - url.hash = Base64.toBinary(JSON.stringify(data)); + const gzip = Boolean(window.CompressionStream); + url.hash = await Base64.toBinary(JSON.stringify(data), { + gzip, + }); + if (gzip) url.searchParams.set('gzip', '1'); window.open(url.toString(), windowName); } diff --git a/lighthouse-core/test/report/html/renderer/base64-test.js b/lighthouse-core/test/report/html/renderer/base64-test.js index 2b9cfdf2cfaa..860af2a02570 100644 --- a/lighthouse-core/test/report/html/renderer/base64-test.js +++ b/lighthouse-core/test/report/html/renderer/base64-test.js @@ -11,20 +11,27 @@ const Base64 = require('../../../../report/html/renderer/base64.js'); describe('base64', () => { /** @type {string} */ - function test(str) { - const roundtrip = Base64.fromBinary(Base64.toBinary(str)); - expect(roundtrip.length).toEqual(str.length); - expect(roundtrip).toEqual(str); + async function test(str) { + for (const gzip of [false, true]) { + // Already tested in treemap-test-pptr.js + // TODO: can we test this in Node? + if (gzip) continue; + + const binary = await Base64.toBinary(str, {gzip}); + const roundtrip = Base64.fromBinary(binary, {gzip}); + expect(roundtrip.length).toEqual(str.length); + expect(roundtrip).toEqual(str); + } } - it('works', () => { - test(''); - test('hello'); - test('😃'); - test('{åß∂œ∑´}'); - test('Some examples of emoji are 😃, 🧘🏻‍♂️, 🌍, 🍞, 🚗, 📞, 🎉, ♥️, 🍆, and 🏁.'); - test('.'.repeat(125183)); - test('😃'.repeat(125183)); - test(JSON.stringify(require('../../../../../lighthouse-treemap/app/debug.json'))); + it('works', async () => { + await test(''); + await test('hello'); + await test('😃'); + await test('{åß∂œ∑´}'); + await test('Some examples of emoji are 😃, 🧘🏻‍♂️, 🌍, 🍞, 🚗, 📞, 🎉, ♥️, 🍆, and 🏁.'); + await test('.'.repeat(125183)); + await test('😃'.repeat(125183)); + await test(JSON.stringify(require('../../../../../lighthouse-treemap/app/debug.json'))); }); }); diff --git a/lighthouse-treemap/app/src/main.js b/lighthouse-treemap/app/src/main.js index 3a9b3b7d5bc9..057734e8ba3f 100644 --- a/lighthouse-treemap/app/src/main.js +++ b/lighthouse-treemap/app/src/main.js @@ -887,7 +887,10 @@ class LighthouseTreemap { async function main() { const app = new LighthouseTreemap(); const queryParams = new URLSearchParams(window.location.search); - const hashParams = location.hash ? JSON.parse(Base64.fromBinary(location.hash.substr(1))) : {}; + const gzip = queryParams.get('gzip') === '1'; + const hashParams = location.hash ? + JSON.parse(Base64.fromBinary(location.hash.substr(1), {gzip})) : + {}; /** @type {Record} */ const params = { ...Object.fromEntries(queryParams.entries()), diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index b8ea86e51d78..5e9e1cde1236 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -100,23 +100,38 @@ describe('Lighthouse Treemap', () => { expect(error).toBe('Error: Invalid options'); }); - async function loadFromFragment(options) { - const json = JSON.stringify(options); + async function loadFromFragment(fragment) { + await page.goto(`${treemapUrl}#${fragment}`); + await page.waitForFunction( + () => window.__treemapOptions || document.body.textContent.startsWith('Error')); + } + + it('from encoded fragment (gzip)', async () => { + const options = JSON.parse(JSON.stringify(debugOptions)); + options.lhr.requestedUrl += '😃😃😃'; const encoded = await page.evaluate(` ${fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'))} ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} - Base64.toBinary(${JSON.stringify(json)}); + Base64.toBinary(${JSON.stringify(options)}, {gzip: true}); `); - await page.goto(`${treemapUrl}#${encoded}`); - await page.waitForFunction( - () => window.__treemapOptions || document.body.textContent.startsWith('Error')); - } - it('from encoded fragment', async () => { + await loadFromFragment(encoded); + const optionsInPage = await page.evaluate(() => window.__treemapOptions); + expect(optionsInPage.lhr.requestedUrl).toBe(options.lhr.requestedUrl); + }); + + it('from encoded fragment (no gzip)', async () => { const options = JSON.parse(JSON.stringify(debugOptions)); options.lhr.requestedUrl += '😃😃😃'; - await loadFromFragment(options); + const encoded = await page.evaluate(` + ${fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'))} + ${fs.readFileSync( + require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} + Base64.toBinary(${JSON.stringify(options)}, {gzip: false}); + `); + + await loadFromFragment(encoded); const optionsInPage = await page.evaluate(() => window.__treemapOptions); expect(optionsInPage.lhr.requestedUrl).toBe(options.lhr.requestedUrl); }); diff --git a/types/html-renderer.d.ts b/types/html-renderer.d.ts index 4e09ff436dc7..614c02b4e229 100644 --- a/types/html-renderer.d.ts +++ b/types/html-renderer.d.ts @@ -37,6 +37,14 @@ declare global { var Base64: typeof _Base64; var prepareLabData: typeof _prepareLabData; var pako: typeof import('pako'); + var CompressionStream: { + prototype: CompressionStream, + new (format: string): CompressionStream, + }; + + interface CompressionStream extends GenericTransformStream { + readonly format: string; + } interface Window { CategoryRenderer: typeof _CategoryRenderer; From 13a467134afc0e99878f0ca99787d53100626c28 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 16:19:04 -0700 Subject: [PATCH 15/32] upgrade puppeteer for compression stream --- .../html/renderer/report-ui-features.js | 2 - lighthouse-treemap/test/treemap-test-pptr.js | 3 +- package.json | 2 +- yarn.lock | 194 ++++++++++++------ 4 files changed, 132 insertions(+), 69 deletions(-) diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index 9cec0998ce1d..f4a41bc2f6db 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -581,7 +581,6 @@ class ReportUIFeatures { method = 'url'; treemapOptions.lhr.finalUrl += '😃😃'; treemapOptions.lhr.requestedUrl += '😃😃'; - debugger; if (method === 'postMessage') { ReportUIFeatures.openTabAndSendData(treemapOptions, url, windowName); } else { @@ -623,7 +622,6 @@ class ReportUIFeatures { * @protected */ static async openTabWithUrlData(data, url_, windowName) { - debugger; const url = new URL(url_); const gzip = Boolean(window.CompressionStream); url.hash = await Base64.toBinary(JSON.stringify(data), { diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index 5e9e1cde1236..2e1b408b65f5 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -10,7 +10,7 @@ /* global document, window */ const fs = require('fs'); -const puppeteer = require('../../node_modules/puppeteer/index.js'); +const puppeteer = require('puppeteer'); const {server} = require('../../lighthouse-cli/test/fixtures/static-server.js'); const portNumber = 10200; const treemapUrl = `http://localhost:${portNumber}/dist/gh-pages/treemap/index.html`; @@ -125,7 +125,6 @@ describe('Lighthouse Treemap', () => { const options = JSON.parse(JSON.stringify(debugOptions)); options.lhr.requestedUrl += '😃😃😃'; const encoded = await page.evaluate(` - ${fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'))} ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} Base64.toBinary(${JSON.stringify(options)}, {gzip: false}); diff --git a/package.json b/package.json index efbef304f71b..cb8ea5ae0f4c 100644 --- a/package.json +++ b/package.json @@ -152,7 +152,7 @@ "package-json-versionify": "^1.0.4", "prettier": "^1.14.3", "pretty-json-stringify": "^0.0.2", - "puppeteer": "^1.19.0", + "puppeteer": "^9.1.1", "tabulator-tables": "^4.9.3", "terser": "^5.3.8", "typed-query-selector": "^2.4.0", diff --git a/yarn.lock b/yarn.lock index d6d365abc038..77fc0caec8d9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -957,6 +957,13 @@ dependencies: "@types/yargs-parser" "*" +"@types/yauzl@^2.9.1": + version "2.9.1" + resolved "https://registry.yarnpkg.com/@types/yauzl/-/yauzl-2.9.1.tgz#d10f69f9f522eef3cf98e30afb684a1e1ec923af" + integrity sha512-A1b8SU4D10uoPjwb0lnHmmu8wZhR9d+9o2PKBQT2jU5YPTKsxac6M2qGAdY7VcL+dHHhARVUDmeg0rOrcd9EjA== + dependencies: + "@types/node" "*" + "@typescript-eslint/parser@^4.21.0": version "4.21.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.21.0.tgz#a227fc2af4001668c3e3f7415d4feee5093894c1" @@ -1096,12 +1103,12 @@ add-stream@^1.0.0: resolved "https://registry.yarnpkg.com/add-stream/-/add-stream-1.0.0.tgz#6a7990437ca736d5e1288db92bd3266d5f5cb2aa" integrity sha1-anmQQ3ynNtXhKI25K9MmbV9csqo= -agent-base@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.3.0.tgz#8165f01c436009bccad0b1d122f05ed770efc6ee" - integrity sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg== +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== dependencies: - es6-promisify "^5.0.0" + debug "4" aggregate-error@^3.0.0: version "3.0.1" @@ -1451,6 +1458,11 @@ base64-js@^1.0.2: resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.0.tgz#cab1e6118f051095e58b5281aea8c1cd22bfc0e3" integrity sha512-ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw== +base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" @@ -1479,6 +1491,15 @@ bl@^1.0.0: readable-stream "^2.3.5" safe-buffer "^5.1.1" +bl@^4.0.3: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" @@ -1761,6 +1782,14 @@ buffer@^5.1.0, buffer@~5.2.1: base64-js "^1.0.2" ieee754 "^1.1.4" +buffer@^5.2.1, buffer@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.1.13" + builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" @@ -1963,6 +1992,11 @@ chardet@^0.7.0: resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== +chownr@^1.1.1: + version "1.1.4" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" + integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== + chrome-devtools-frontend@1.0.727089: version "1.0.727089" resolved "https://registry.yarnpkg.com/chrome-devtools-frontend/-/chrome-devtools-frontend-1.0.727089.tgz#5663edd32b445826c51a0d6332f5072b21f7b895" @@ -2179,7 +2213,7 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -concat-stream@^1.6.0, concat-stream@^1.6.1, concat-stream@^1.6.2, concat-stream@~1.6.0: +concat-stream@^1.6.0, concat-stream@^1.6.1, concat-stream@~1.6.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== @@ -2628,27 +2662,27 @@ dateformat@^1.0.11, dateformat@^1.0.12: get-stdin "^4.0.1" meow "^3.3.0" -debug@^2.2.0, debug@^2.3.3, debug@^2.6.8, debug@^2.6.9: +debug@4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" + integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== + dependencies: + ms "2.1.2" + +debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" -debug@^3.0.1, debug@^3.1.0: +debug@^3.0.1: version "3.2.6" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== dependencies: ms "^2.1.1" -debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" - integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== - dependencies: - ms "2.1.2" - decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" @@ -2774,6 +2808,11 @@ devtools-protocol@0.0.859327: resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.859327.tgz#4ea4a9cfc8a4ba492f3ba57b4109d2c5fe99474a" integrity sha512-vRlLFY8Y2p3UnuDPRF0tsjHgXFM9JTS8T/p2F2YB9ukfpV+HByJU4kJc/Ks2/PozQ2bIQqmcYhDrSaFvs1Cy8Q== +devtools-protocol@0.0.869402: + version "0.0.869402" + resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.869402.tgz#03ade701761742e43ae4de5dc188bcd80f156d8d" + integrity sha512-VvlVYY+VDJe639yHs5PHISzdWTLL3Aw8rO4cvUtwvoxFd6FHbE4OpHHcde52M6096uYYazAmd4l0o5VuFRO2WA== + diff-sequences@^26.6.2: version "26.6.2" resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-26.6.2.tgz#48ba99157de1923412eed41db6b6d4aa9ca7c0b1" @@ -2905,7 +2944,7 @@ emoji-regex@^8.0.0: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== -end-of-stream@^1.0.0, end-of-stream@^1.1.0: +end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== @@ -2987,18 +3026,6 @@ es6-map@^0.1.5: es6-symbol "~3.1.1" event-emitter "~0.3.5" -es6-promise@^4.0.3: - version "4.2.8" - resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a" - integrity sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w== - -es6-promisify@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-5.0.0.tgz#5109d62f3e56ea967c4b63505aef08291c8a5203" - integrity sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM= - dependencies: - es6-promise "^4.0.3" - es6-set@^0.1.5, es6-set@~0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.5.tgz#d2b3ec5d4d800ced818db538d28974db0a73ccb1" @@ -3345,15 +3372,16 @@ extglob@^2.0.4: snapdragon "^0.8.1" to-regex "^3.0.1" -extract-zip@^1.6.6: - version "1.7.0" - resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-1.7.0.tgz#556cc3ae9df7f452c493a0cfb51cc30277940927" - integrity sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA== +extract-zip@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a" + integrity sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg== dependencies: - concat-stream "^1.6.2" - debug "^2.6.9" - mkdirp "^0.5.4" + debug "^4.1.1" + get-stream "^5.1.0" yauzl "^2.10.0" + optionalDependencies: + "@types/yauzl" "^2.9.1" extsprintf@1.0.2: version "1.0.2" @@ -4027,13 +4055,13 @@ https-browserify@^1.0.0: resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= -https-proxy-agent@^2.2.1: - version "2.2.4" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz#4ee7a737abd92678a293d9b34a1af4d0d08c787b" - integrity sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg== +https-proxy-agent@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" + integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== dependencies: - agent-base "^4.3.0" - debug "^3.1.0" + agent-base "6" + debug "4" human-signals@^1.1.1: version "1.1.1" @@ -4060,6 +4088,11 @@ idb-keyval@2.2.0: resolved "https://registry.yarnpkg.com/idb-keyval/-/idb-keyval-2.2.0.tgz#b28020d53e3cf1621e3ec605e57e5305f37b195e" integrity sha1-soAg1T488WIePsYF5X5TBfN7GV4= +ieee754@^1.1.13: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + ieee754@^1.1.4: version "1.1.12" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.12.tgz#50bf24e5b9c8bb98af4964c941cdb0918da7b60b" @@ -5632,11 +5665,6 @@ mime-types@^2.1.12, mime-types@^2.1.30, mime-types@~2.1.19: dependencies: mime-db "1.47.0" -mime@^2.0.3: - version "2.4.4" - resolved "https://registry.yarnpkg.com/mime/-/mime-2.4.4.tgz#bd7b91135fc6b01cde3e9bae33d659b63d8857e5" - integrity sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA== - mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -5687,7 +5715,7 @@ mkdirp-classic@^0.5.2: resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== -mkdirp@^0.5.4, mkdirp@~0.5.1: +mkdirp@~0.5.1: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== @@ -6442,10 +6470,10 @@ prompts@^2.0.1: kleur "^3.0.2" sisteransi "^1.0.0" -proxy-from-env@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.0.0.tgz#33c50398f70ea7eb96d21f7b817630a55791c7ee" - integrity sha1-M8UDmPcOp+uW0h97gXYwpVeRx+4= +proxy-from-env@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== ps-list@^7.2.0: version "7.2.0" @@ -6499,19 +6527,23 @@ pupa@^2.0.1: dependencies: escape-goat "^2.0.0" -puppeteer@^1.19.0: - version "1.20.0" - resolved "https://registry.yarnpkg.com/puppeteer/-/puppeteer-1.20.0.tgz#e3d267786f74e1d87cf2d15acc59177f471bbe38" - integrity sha512-bt48RDBy2eIwZPrkgbcwHtb51mj2nKvHOPMaSH2IsWiv7lOG9k9zhaRzpDZafrk05ajMc3cu+lSQYYOfH2DkVQ== +puppeteer@^9.1.1: + version "9.1.1" + resolved "https://registry.yarnpkg.com/puppeteer/-/puppeteer-9.1.1.tgz#f74b7facf86887efd6c6b9fabb7baae6fdce012c" + integrity sha512-W+nOulP2tYd/ZG99WuZC/I5ljjQQ7EUw/jQGcIb9eu8mDlZxNY2SgcJXTLG9h5gRvqA3uJOe4hZXYsd3EqioMw== dependencies: debug "^4.1.0" - extract-zip "^1.6.6" - https-proxy-agent "^2.2.1" - mime "^2.0.3" + devtools-protocol "0.0.869402" + extract-zip "^2.0.0" + https-proxy-agent "^5.0.0" + node-fetch "^2.6.1" + pkg-dir "^4.2.0" progress "^2.0.1" - proxy-from-env "^1.0.0" - rimraf "^2.6.1" - ws "^6.1.0" + proxy-from-env "^1.1.0" + rimraf "^3.0.2" + tar-fs "^2.0.0" + unbzip2-stream "^1.3.3" + ws "^7.2.3" q@^1.4.1: version "1.5.1" @@ -6641,7 +6673,7 @@ read-pkg@^5.2.0: parse-json "^5.0.0" type-fest "^0.6.0" -"readable-stream@2 || 3", readable-stream@^3.5.0, readable-stream@^3.6.0: +"readable-stream@2 || 3", readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.5.0, readable-stream@^3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -6856,7 +6888,7 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== -rimraf@^2.6.1, rimraf@^2.6.2: +rimraf@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== @@ -7555,6 +7587,16 @@ tabulator-tables@^4.9.3: resolved "https://registry.yarnpkg.com/tabulator-tables/-/tabulator-tables-4.9.3.tgz#89ea8f9bffc11ba9a789369b5165ac82da26f4f0" integrity sha512-iwwQqAEGGxlgrBpcmJJvMJrfjGLcCXOB3AOb/DGkXqBy1YKoYA36hIl7qXGp6Jo8dSkzFAlDT6pKLZgyhs9OnQ== +tar-fs@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" + integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== + dependencies: + chownr "^1.1.1" + mkdirp-classic "^0.5.2" + pump "^3.0.0" + tar-stream "^2.1.4" + tar-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.6.2.tgz#8ea55dab37972253d9a9af90fdcd559ae435c555" @@ -7568,6 +7610,17 @@ tar-stream@^1.5.0: to-buffer "^1.1.1" xtend "^4.0.0" +tar-stream@^2.1.4: + version "2.2.0" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" + integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== + dependencies: + bl "^4.0.3" + end-of-stream "^1.4.1" + fs-constants "^1.0.0" + inherits "^2.0.3" + readable-stream "^3.1.1" + tempfile@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/tempfile/-/tempfile-1.1.1.tgz#5bcc4eaecc4ab2c707d8bc11d99ccc9a2cb287f2" @@ -7642,7 +7695,7 @@ through2@^3.0.1: dependencies: readable-stream "2 || 3" -through@2, "through@>=2.2.7 <3", through@^2.3.6: +through@2, "through@>=2.2.7 <3", through@^2.3.6, through@^2.3.8: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= @@ -7899,6 +7952,14 @@ unbox-primitive@^1.0.0: has-symbols "^1.0.2" which-boxed-primitive "^1.0.2" +unbzip2-stream@^1.3.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz#b0da04c4371311df771cdc215e87f2130991ace7" + integrity sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg== + dependencies: + buffer "^5.2.1" + through "^2.3.8" + undeclared-identifiers@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/undeclared-identifiers/-/undeclared-identifiers-1.1.3.tgz#9254c1d37bdac0ac2b52de4b6722792d2a91e30f" @@ -8281,6 +8342,11 @@ ws@^6.1.0: dependencies: async-limiter "~1.0.0" +ws@^7.2.3: + version "7.4.5" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.5.tgz#a484dd851e9beb6fdb420027e3885e8ce48986c1" + integrity sha512-xzyu3hFvomRfXKH8vOFMU3OguG6oOvhXMo3xsGy3xWExqaM2dxBbVxuD99O7m3ZUFMvvscsZDqxfgMaRr/Nr1g== + ws@^7.4.4: version "7.4.4" resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.4.tgz#383bc9742cb202292c9077ceab6f6047b17f2d59" From 07e3c8d2de4e0a93ff38c38b2ad86642a0b9e606 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 20 May 2021 17:11:41 -0700 Subject: [PATCH 16/32] revert puppeteer upgrade --- lighthouse-treemap/test/treemap-test-pptr.js | 30 +-- package.json | 3 +- yarn.lock | 203 +++++++------------ 3 files changed, 92 insertions(+), 144 deletions(-) diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index 2e1b408b65f5..d337fef326b9 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -43,7 +43,12 @@ describe('Lighthouse Treemap', () => { }); beforeEach(async () => { - if (!browser) browser = await puppeteer.launch({headless: true}); + if (!browser) { + browser = await puppeteer.launch({ + headless: true, + executablePath: process.env.CHROME_PATH, + }); + } page = await browser.newPage(); page.on('pageerror', pageError => pageErrors.push(pageError)); }); @@ -100,23 +105,20 @@ describe('Lighthouse Treemap', () => { expect(error).toBe('Error: Invalid options'); }); - async function loadFromFragment(fragment) { - await page.goto(`${treemapUrl}#${fragment}`); - await page.waitForFunction( - () => window.__treemapOptions || document.body.textContent.startsWith('Error')); - } - it('from encoded fragment (gzip)', async () => { const options = JSON.parse(JSON.stringify(debugOptions)); options.lhr.requestedUrl += '😃😃😃'; + const json = JSON.stringify(options); const encoded = await page.evaluate(` - ${fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'))} ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} - Base64.toBinary(${JSON.stringify(options)}, {gzip: true}); + Base64.toBinary(${JSON.stringify(json)}, {gzip: true}); `); - await loadFromFragment(encoded); + await page.goto(`${treemapUrl}?gzip=1#${encoded}`); + await page.waitForFunction( + () => window.__treemapOptions || document.body.textContent.startsWith('Error')); + const optionsInPage = await page.evaluate(() => window.__treemapOptions); expect(optionsInPage.lhr.requestedUrl).toBe(options.lhr.requestedUrl); }); @@ -124,13 +126,17 @@ describe('Lighthouse Treemap', () => { it('from encoded fragment (no gzip)', async () => { const options = JSON.parse(JSON.stringify(debugOptions)); options.lhr.requestedUrl += '😃😃😃'; + const json = JSON.stringify(options); const encoded = await page.evaluate(` ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} - Base64.toBinary(${JSON.stringify(options)}, {gzip: false}); + Base64.toBinary(${JSON.stringify(json)}, {gzip: false}); `); - await loadFromFragment(encoded); + await page.goto(`${treemapUrl}#${encoded}`); + await page.waitForFunction( + () => window.__treemapOptions || document.body.textContent.startsWith('Error')); + const optionsInPage = await page.evaluate(() => window.__treemapOptions); expect(optionsInPage.lhr.requestedUrl).toBe(options.lhr.requestedUrl); }); diff --git a/package.json b/package.json index cb8ea5ae0f4c..2073e01b6b18 100644 --- a/package.json +++ b/package.json @@ -107,7 +107,6 @@ "@types/lodash.isequal": "^4.5.2", "@types/lodash.set": "^4.3.6", "@types/node": "*", - "@types/pako": "^1.0.1", "@types/puppeteer": "1.19.x", "@types/raven": "^2.5.1", "@types/resize-observer-browser": "^0.1.1", @@ -152,7 +151,7 @@ "package-json-versionify": "^1.0.4", "prettier": "^1.14.3", "pretty-json-stringify": "^0.0.2", - "puppeteer": "^9.1.1", + "puppeteer": "^1.19.0", "tabulator-tables": "^4.9.3", "terser": "^5.3.8", "typed-query-selector": "^2.4.0", diff --git a/yarn.lock b/yarn.lock index 77fc0caec8d9..5fa91a6d2386 100644 --- a/yarn.lock +++ b/yarn.lock @@ -878,11 +878,6 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== -"@types/pako@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@types/pako/-/pako-1.0.1.tgz#33b237f3c9aff44d0f82fe63acffa4a365ef4a61" - integrity sha512-GdZbRSJ3Cv5fiwT6I0SQ3ckeN2PWNqxd26W9Z2fCK1tGrrasGy4puvNFtnddqH9UJFMQYXxEuuB7B8UK+LLwSg== - "@types/prettier@^2.0.0": version "2.2.3" resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.2.3.tgz#ef65165aea2924c9359205bf748865b8881753c0" @@ -957,13 +952,6 @@ dependencies: "@types/yargs-parser" "*" -"@types/yauzl@^2.9.1": - version "2.9.1" - resolved "https://registry.yarnpkg.com/@types/yauzl/-/yauzl-2.9.1.tgz#d10f69f9f522eef3cf98e30afb684a1e1ec923af" - integrity sha512-A1b8SU4D10uoPjwb0lnHmmu8wZhR9d+9o2PKBQT2jU5YPTKsxac6M2qGAdY7VcL+dHHhARVUDmeg0rOrcd9EjA== - dependencies: - "@types/node" "*" - "@typescript-eslint/parser@^4.21.0": version "4.21.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.21.0.tgz#a227fc2af4001668c3e3f7415d4feee5093894c1" @@ -1103,12 +1091,12 @@ add-stream@^1.0.0: resolved "https://registry.yarnpkg.com/add-stream/-/add-stream-1.0.0.tgz#6a7990437ca736d5e1288db92bd3266d5f5cb2aa" integrity sha1-anmQQ3ynNtXhKI25K9MmbV9csqo= -agent-base@6: - version "6.0.2" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" - integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== +agent-base@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.3.0.tgz#8165f01c436009bccad0b1d122f05ed770efc6ee" + integrity sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg== dependencies: - debug "4" + es6-promisify "^5.0.0" aggregate-error@^3.0.0: version "3.0.1" @@ -1458,11 +1446,6 @@ base64-js@^1.0.2: resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.0.tgz#cab1e6118f051095e58b5281aea8c1cd22bfc0e3" integrity sha512-ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw== -base64-js@^1.3.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" - integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== - base@^0.11.1: version "0.11.2" resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" @@ -1491,15 +1474,6 @@ bl@^1.0.0: readable-stream "^2.3.5" safe-buffer "^5.1.1" -bl@^4.0.3: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" - integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== - dependencies: - buffer "^5.5.0" - inherits "^2.0.4" - readable-stream "^3.4.0" - bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.11.9: version "4.12.0" resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" @@ -1782,14 +1756,6 @@ buffer@^5.1.0, buffer@~5.2.1: base64-js "^1.0.2" ieee754 "^1.1.4" -buffer@^5.2.1, buffer@^5.5.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" - integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.1.13" - builtin-status-codes@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" @@ -1992,11 +1958,6 @@ chardet@^0.7.0: resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - chrome-devtools-frontend@1.0.727089: version "1.0.727089" resolved "https://registry.yarnpkg.com/chrome-devtools-frontend/-/chrome-devtools-frontend-1.0.727089.tgz#5663edd32b445826c51a0d6332f5072b21f7b895" @@ -2213,7 +2174,7 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -concat-stream@^1.6.0, concat-stream@^1.6.1, concat-stream@~1.6.0: +concat-stream@^1.6.0, concat-stream@^1.6.1, concat-stream@^1.6.2, concat-stream@~1.6.0: version "1.6.2" resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== @@ -2662,14 +2623,7 @@ dateformat@^1.0.11, dateformat@^1.0.12: get-stdin "^4.0.1" meow "^3.3.0" -debug@4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" - integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== - dependencies: - ms "2.1.2" - -debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: +debug@^2.2.0, debug@^2.3.3, debug@^2.6.8, debug@^2.6.9: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== @@ -2683,6 +2637,20 @@ debug@^3.0.1: dependencies: ms "^2.1.1" +debug@^3.1.0: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" + integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== + dependencies: + ms "2.1.2" + decamelize@^1.1.2, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" @@ -2808,11 +2776,6 @@ devtools-protocol@0.0.859327: resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.859327.tgz#4ea4a9cfc8a4ba492f3ba57b4109d2c5fe99474a" integrity sha512-vRlLFY8Y2p3UnuDPRF0tsjHgXFM9JTS8T/p2F2YB9ukfpV+HByJU4kJc/Ks2/PozQ2bIQqmcYhDrSaFvs1Cy8Q== -devtools-protocol@0.0.869402: - version "0.0.869402" - resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.869402.tgz#03ade701761742e43ae4de5dc188bcd80f156d8d" - integrity sha512-VvlVYY+VDJe639yHs5PHISzdWTLL3Aw8rO4cvUtwvoxFd6FHbE4OpHHcde52M6096uYYazAmd4l0o5VuFRO2WA== - diff-sequences@^26.6.2: version "26.6.2" resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-26.6.2.tgz#48ba99157de1923412eed41db6b6d4aa9ca7c0b1" @@ -2944,7 +2907,7 @@ emoji-regex@^8.0.0: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== -end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1: +end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== @@ -3026,6 +2989,18 @@ es6-map@^0.1.5: es6-symbol "~3.1.1" event-emitter "~0.3.5" +es6-promise@^4.0.3: + version "4.2.8" + resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a" + integrity sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w== + +es6-promisify@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-5.0.0.tgz#5109d62f3e56ea967c4b63505aef08291c8a5203" + integrity sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM= + dependencies: + es6-promise "^4.0.3" + es6-set@^0.1.5, es6-set@~0.1.5: version "0.1.5" resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.5.tgz#d2b3ec5d4d800ced818db538d28974db0a73ccb1" @@ -3372,16 +3347,15 @@ extglob@^2.0.4: snapdragon "^0.8.1" to-regex "^3.0.1" -extract-zip@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a" - integrity sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg== +extract-zip@^1.6.6: + version "1.7.0" + resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-1.7.0.tgz#556cc3ae9df7f452c493a0cfb51cc30277940927" + integrity sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA== dependencies: - debug "^4.1.1" - get-stream "^5.1.0" + concat-stream "^1.6.2" + debug "^2.6.9" + mkdirp "^0.5.4" yauzl "^2.10.0" - optionalDependencies: - "@types/yauzl" "^2.9.1" extsprintf@1.0.2: version "1.0.2" @@ -4055,13 +4029,13 @@ https-browserify@^1.0.0: resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= -https-proxy-agent@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" - integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== +https-proxy-agent@^2.2.1: + version "2.2.4" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz#4ee7a737abd92678a293d9b34a1af4d0d08c787b" + integrity sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg== dependencies: - agent-base "6" - debug "4" + agent-base "^4.3.0" + debug "^3.1.0" human-signals@^1.1.1: version "1.1.1" @@ -4088,11 +4062,6 @@ idb-keyval@2.2.0: resolved "https://registry.yarnpkg.com/idb-keyval/-/idb-keyval-2.2.0.tgz#b28020d53e3cf1621e3ec605e57e5305f37b195e" integrity sha1-soAg1T488WIePsYF5X5TBfN7GV4= -ieee754@^1.1.13: - version "1.2.1" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" - integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== - ieee754@^1.1.4: version "1.1.12" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.12.tgz#50bf24e5b9c8bb98af4964c941cdb0918da7b60b" @@ -5665,6 +5634,11 @@ mime-types@^2.1.12, mime-types@^2.1.30, mime-types@~2.1.19: dependencies: mime-db "1.47.0" +mime@^2.0.3: + version "2.5.2" + resolved "https://registry.yarnpkg.com/mime/-/mime-2.5.2.tgz#6e3dc6cc2b9510643830e5f19d5cb753da5eeabe" + integrity sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg== + mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -5715,7 +5689,7 @@ mkdirp-classic@^0.5.2: resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== -mkdirp@~0.5.1: +mkdirp@^0.5.4, mkdirp@~0.5.1: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== @@ -6470,7 +6444,7 @@ prompts@^2.0.1: kleur "^3.0.2" sisteransi "^1.0.0" -proxy-from-env@^1.1.0: +proxy-from-env@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== @@ -6527,23 +6501,19 @@ pupa@^2.0.1: dependencies: escape-goat "^2.0.0" -puppeteer@^9.1.1: - version "9.1.1" - resolved "https://registry.yarnpkg.com/puppeteer/-/puppeteer-9.1.1.tgz#f74b7facf86887efd6c6b9fabb7baae6fdce012c" - integrity sha512-W+nOulP2tYd/ZG99WuZC/I5ljjQQ7EUw/jQGcIb9eu8mDlZxNY2SgcJXTLG9h5gRvqA3uJOe4hZXYsd3EqioMw== +puppeteer@^1.19.0: + version "1.20.0" + resolved "https://registry.yarnpkg.com/puppeteer/-/puppeteer-1.20.0.tgz#e3d267786f74e1d87cf2d15acc59177f471bbe38" + integrity sha512-bt48RDBy2eIwZPrkgbcwHtb51mj2nKvHOPMaSH2IsWiv7lOG9k9zhaRzpDZafrk05ajMc3cu+lSQYYOfH2DkVQ== dependencies: debug "^4.1.0" - devtools-protocol "0.0.869402" - extract-zip "^2.0.0" - https-proxy-agent "^5.0.0" - node-fetch "^2.6.1" - pkg-dir "^4.2.0" + extract-zip "^1.6.6" + https-proxy-agent "^2.2.1" + mime "^2.0.3" progress "^2.0.1" - proxy-from-env "^1.1.0" - rimraf "^3.0.2" - tar-fs "^2.0.0" - unbzip2-stream "^1.3.3" - ws "^7.2.3" + proxy-from-env "^1.0.0" + rimraf "^2.6.1" + ws "^6.1.0" q@^1.4.1: version "1.5.1" @@ -6673,7 +6643,7 @@ read-pkg@^5.2.0: parse-json "^5.0.0" type-fest "^0.6.0" -"readable-stream@2 || 3", readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.5.0, readable-stream@^3.6.0: +"readable-stream@2 || 3", readable-stream@^3.5.0, readable-stream@^3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -6888,6 +6858,13 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== +rimraf@^2.6.1: + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== + dependencies: + glob "^7.1.3" + rimraf@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" @@ -7587,16 +7564,6 @@ tabulator-tables@^4.9.3: resolved "https://registry.yarnpkg.com/tabulator-tables/-/tabulator-tables-4.9.3.tgz#89ea8f9bffc11ba9a789369b5165ac82da26f4f0" integrity sha512-iwwQqAEGGxlgrBpcmJJvMJrfjGLcCXOB3AOb/DGkXqBy1YKoYA36hIl7qXGp6Jo8dSkzFAlDT6pKLZgyhs9OnQ== -tar-fs@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" - integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== - dependencies: - chownr "^1.1.1" - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^2.1.4" - tar-stream@^1.5.0: version "1.6.2" resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.6.2.tgz#8ea55dab37972253d9a9af90fdcd559ae435c555" @@ -7610,17 +7577,6 @@ tar-stream@^1.5.0: to-buffer "^1.1.1" xtend "^4.0.0" -tar-stream@^2.1.4: - version "2.2.0" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" - integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== - dependencies: - bl "^4.0.3" - end-of-stream "^1.4.1" - fs-constants "^1.0.0" - inherits "^2.0.3" - readable-stream "^3.1.1" - tempfile@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/tempfile/-/tempfile-1.1.1.tgz#5bcc4eaecc4ab2c707d8bc11d99ccc9a2cb287f2" @@ -7695,7 +7651,7 @@ through2@^3.0.1: dependencies: readable-stream "2 || 3" -through@2, "through@>=2.2.7 <3", through@^2.3.6, through@^2.3.8: +through@2, "through@>=2.2.7 <3", through@^2.3.6: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= @@ -7952,14 +7908,6 @@ unbox-primitive@^1.0.0: has-symbols "^1.0.2" which-boxed-primitive "^1.0.2" -unbzip2-stream@^1.3.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz#b0da04c4371311df771cdc215e87f2130991ace7" - integrity sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg== - dependencies: - buffer "^5.2.1" - through "^2.3.8" - undeclared-identifiers@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/undeclared-identifiers/-/undeclared-identifiers-1.1.3.tgz#9254c1d37bdac0ac2b52de4b6722792d2a91e30f" @@ -8342,11 +8290,6 @@ ws@^6.1.0: dependencies: async-limiter "~1.0.0" -ws@^7.2.3: - version "7.4.5" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.5.tgz#a484dd851e9beb6fdb420027e3885e8ce48986c1" - integrity sha512-xzyu3hFvomRfXKH8vOFMU3OguG6oOvhXMo3xsGy3xWExqaM2dxBbVxuD99O7m3ZUFMvvscsZDqxfgMaRr/Nr1g== - ws@^7.4.4: version "7.4.4" resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.4.tgz#383bc9742cb202292c9077ceab6f6047b17f2d59" From 5365e532e28d22a0521404f449e28517798f57c0 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Fri, 21 May 2021 10:26:16 -0700 Subject: [PATCH 17/32] update --- .../report/html/renderer/base64.js | 29 +------------------ .../html/renderer/report-ui-features.js | 6 ---- 2 files changed, 1 insertion(+), 34 deletions(-) diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index fac9dc5c7e06..dd994779b7b7 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -1,5 +1,5 @@ /** - * @license Copyright 2020 The Lighthouse Authors. All Rights Reserved. + * @license Copyright 2021 The Lighthouse Authors. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @@ -16,33 +16,6 @@ const decode = typeof btoa !== 'undefined' ? /** @param {string} str */ (str) => Buffer.from(str, 'base64').toString(); -// /** -// * @param {string} string -// */ -// function toBinary(string) { -// const bytes = new TextEncoder().encode(string); -// let binaryString = ''; -// // This is ~25% faster than building the string one character at a time. -// // https://jsbench.me/2gkoxazvjl -// const chunkSize = 5000; -// for (let i = 0; i < bytes.length; i += chunkSize) { -// binaryString += String.fromCharCode(...new Uint8Array(bytes.buffer.slice(i, i + chunkSize))); -// } -// return encode(binaryString); -// } - -// /** -// * @param {string} encoded -// */ -// function fromBinary(encoded) { -// const binaryString = decode(encoded); -// const bytes = new Uint8Array(binaryString.length); -// for (let i = 0; i < bytes.length; i++) { -// bytes[i] = binaryString.charCodeAt(i); -// } -// return new TextDecoder().decode(bytes); -// } - function getPako() { return typeof pako !== 'undefined' ? pako : require('pako'); } diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index f4a41bc2f6db..4c9acebdfeb9 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -575,12 +575,6 @@ class ReportUIFeatures { const url = getAppsOrigin() + '/treemap/'; const windowName = `treemap-${json.requestedUrl}`; - // ~~~~~~~~~~~~~~~~~~~~ - // SUPER TODO HEY ME DONT PUSH THIS TO MASTER. - // ~~~~~~~~~~~~~~~~~~~~ - method = 'url'; - treemapOptions.lhr.finalUrl += '😃😃'; - treemapOptions.lhr.requestedUrl += '😃😃'; if (method === 'postMessage') { ReportUIFeatures.openTabAndSendData(treemapOptions, url, windowName); } else { From a6ff9bb1244e0b6ecf6c76e2c266fa933637593c Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Fri, 21 May 2021 14:09:47 -0700 Subject: [PATCH 18/32] yay --- .../report/html/renderer/base64.js | 26 ++++++++++--------- .../test/report/html/renderer/base64-test.js | 12 ++++++--- 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index dd994779b7b7..1f04dea7a032 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -11,28 +11,29 @@ const encode = typeof btoa !== 'undefined' ? btoa : /** @param {string} str */ (str) => Buffer.from(str).toString('base64'); -const decode = typeof btoa !== 'undefined' ? +const decode = typeof atob !== 'undefined' ? atob : /** @param {string} str */ (str) => Buffer.from(str, 'base64').toString(); -function getPako() { - return typeof pako !== 'undefined' ? pako : require('pako'); -} - /** * @param {string} string * @param {{gzip: boolean}} options + * @return {Promise} */ async function toBinary(string, options) { let bytes; if (options.gzip) { - const cs = new CompressionStream('gzip'); - const writer = cs.writable.getWriter(); - writer.write(new TextEncoder().encode(string)); - writer.close(); - const compAb = await new Response(cs.readable).arrayBuffer(); - bytes = new Uint8Array(compAb); + if (typeof CompressionStream !== 'undefined') { + const cs = new CompressionStream('gzip'); + const writer = cs.writable.getWriter(); + writer.write(new TextEncoder().encode(string)); + writer.close(); + const compAb = await new Response(cs.readable).arrayBuffer(); + bytes = new Uint8Array(compAb); + } else { + bytes = pako.gzip(string); + } } else { bytes = new TextEncoder().encode(string); } @@ -50,6 +51,7 @@ async function toBinary(string, options) { /** * @param {string} encoded * @param {{gzip: boolean}} options + * @return {string} */ function fromBinary(encoded, options) { const binaryString = decode(encoded); @@ -59,7 +61,7 @@ function fromBinary(encoded, options) { } if (options.gzip) { - return getPako().ungzip(bytes, {to: 'string'}); + return pako.ungzip(bytes, {to: 'string'}); } else { return new TextDecoder().decode(bytes); } diff --git a/lighthouse-core/test/report/html/renderer/base64-test.js b/lighthouse-core/test/report/html/renderer/base64-test.js index 860af2a02570..511356aaa932 100644 --- a/lighthouse-core/test/report/html/renderer/base64-test.js +++ b/lighthouse-core/test/report/html/renderer/base64-test.js @@ -10,13 +10,17 @@ const Base64 = require('../../../../report/html/renderer/base64.js'); /* eslint-env jest */ describe('base64', () => { + beforeAll(() => { + global.pako = require('pako'); + }); + + afterAll(() => { + global.pako = undefined; + }); + /** @type {string} */ async function test(str) { for (const gzip of [false, true]) { - // Already tested in treemap-test-pptr.js - // TODO: can we test this in Node? - if (gzip) continue; - const binary = await Base64.toBinary(str, {gzip}); const roundtrip = Base64.fromBinary(binary, {gzip}); expect(roundtrip.length).toEqual(str.length); From b4736365549919e13cf758409c7e03915a9890ec Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Fri, 21 May 2021 14:15:21 -0700 Subject: [PATCH 19/32] pr --- lighthouse-core/report/html/html-report-assets.js | 2 -- lighthouse-core/report/html/renderer/base64.js | 11 +++++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/lighthouse-core/report/html/html-report-assets.js b/lighthouse-core/report/html/html-report-assets.js index 73c42ac30513..a176e5be4472 100644 --- a/lighthouse-core/report/html/html-report-assets.js +++ b/lighthouse-core/report/html/html-report-assets.js @@ -23,8 +23,6 @@ const REPORT_JAVASCRIPT = [ fs.readFileSync(__dirname + '/renderer/pwa-category-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/report-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/i18n.js', 'utf8'), - // // TODO: minified version breaks utf8 - // fs.readFileSync(require.resolve('pako/dist/pako_deflate.js'), 'utf-8'), fs.readFileSync(__dirname + '/renderer/base64.js', 'utf8'), ].join(';\n'); const REPORT_CSS = fs.readFileSync(__dirname + '/report-styles.css', 'utf8'); diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index 1f04dea7a032..42aee9ee2b18 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -17,25 +17,28 @@ const decode = typeof atob !== 'undefined' ? (str) => Buffer.from(str, 'base64').toString(); /** + * Takes an UTF-8 string and returns a URL-safe base64 encoded string. + * If gzip is true, the UTF-8 bytes are gzipped before base64'd, using + * CompressionStream (currently only in Chrome), falling back to pak + * (which is only used to encode in our Node tests). * @param {string} string * @param {{gzip: boolean}} options * @return {Promise} */ async function toBinary(string, options) { - let bytes; + let bytes = new TextEncoder().encode(string); + if (options.gzip) { if (typeof CompressionStream !== 'undefined') { const cs = new CompressionStream('gzip'); const writer = cs.writable.getWriter(); - writer.write(new TextEncoder().encode(string)); + writer.write(bytes); writer.close(); const compAb = await new Response(cs.readable).arrayBuffer(); bytes = new Uint8Array(compAb); } else { bytes = pako.gzip(string); } - } else { - bytes = new TextEncoder().encode(string); } let binaryString = ''; From 50ff2f1bd0cc561a55990d69ccfc9407912beca3 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Mon, 24 May 2021 12:18:57 -0700 Subject: [PATCH 20/32] names --- lighthouse-core/report/html/renderer/base64.js | 18 +++++++++--------- .../report/html/renderer/report-ui-features.js | 2 +- .../test/report/html/renderer/base64-test.js | 4 ++-- lighthouse-treemap/app/src/main.js | 2 +- lighthouse-treemap/test/treemap-test-pptr.js | 4 ++-- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/base64.js index 42aee9ee2b18..b9efdfe83819 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/base64.js @@ -7,11 +7,11 @@ /* global self btoa atob pako CompressionStream Response */ -const encode = typeof btoa !== 'undefined' ? +const toBase64 = typeof btoa !== 'undefined' ? btoa : /** @param {string} str */ (str) => Buffer.from(str).toString('base64'); -const decode = typeof atob !== 'undefined' ? +const fromBase64 = typeof atob !== 'undefined' ? atob : /** @param {string} str */ (str) => Buffer.from(str, 'base64').toString(); @@ -19,13 +19,13 @@ const decode = typeof atob !== 'undefined' ? /** * Takes an UTF-8 string and returns a URL-safe base64 encoded string. * If gzip is true, the UTF-8 bytes are gzipped before base64'd, using - * CompressionStream (currently only in Chrome), falling back to pak + * CompressionStream (currently only in Chrome), falling back to pako * (which is only used to encode in our Node tests). * @param {string} string * @param {{gzip: boolean}} options * @return {Promise} */ -async function toBinary(string, options) { +async function encode(string, options) { let bytes = new TextEncoder().encode(string); if (options.gzip) { @@ -48,7 +48,7 @@ async function toBinary(string, options) { for (let i = 0; i < bytes.length; i += chunkSize) { binaryString += String.fromCharCode(...new Uint8Array(bytes.buffer.slice(i, i + chunkSize))); } - return encode(binaryString); + return toBase64(binaryString); } /** @@ -56,8 +56,8 @@ async function toBinary(string, options) { * @param {{gzip: boolean}} options * @return {string} */ -function fromBinary(encoded, options) { - const binaryString = decode(encoded); +function decode(encoded, options) { + const binaryString = fromBase64(encoded); const bytes = new Uint8Array(binaryString.length); for (let i = 0; i < bytes.length; i++) { bytes[i] = binaryString.charCodeAt(i); @@ -71,7 +71,7 @@ function fromBinary(encoded, options) { } if (typeof module !== 'undefined' && module.exports) { - module.exports = {toBinary, fromBinary}; + module.exports = {encode, decode}; } else { - self.Base64 = {toBinary, fromBinary}; + self.Base64 = {encode, decode}; } diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index 4c9acebdfeb9..8c4e01365014 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -618,7 +618,7 @@ class ReportUIFeatures { static async openTabWithUrlData(data, url_, windowName) { const url = new URL(url_); const gzip = Boolean(window.CompressionStream); - url.hash = await Base64.toBinary(JSON.stringify(data), { + url.hash = await Base64.encode(JSON.stringify(data), { gzip, }); if (gzip) url.searchParams.set('gzip', '1'); diff --git a/lighthouse-core/test/report/html/renderer/base64-test.js b/lighthouse-core/test/report/html/renderer/base64-test.js index 511356aaa932..a1802ce667c1 100644 --- a/lighthouse-core/test/report/html/renderer/base64-test.js +++ b/lighthouse-core/test/report/html/renderer/base64-test.js @@ -21,8 +21,8 @@ describe('base64', () => { /** @type {string} */ async function test(str) { for (const gzip of [false, true]) { - const binary = await Base64.toBinary(str, {gzip}); - const roundtrip = Base64.fromBinary(binary, {gzip}); + const binary = await Base64.encode(str, {gzip}); + const roundtrip = Base64.decode(binary, {gzip}); expect(roundtrip.length).toEqual(str.length); expect(roundtrip).toEqual(str); } diff --git a/lighthouse-treemap/app/src/main.js b/lighthouse-treemap/app/src/main.js index 057734e8ba3f..ae708796c48d 100644 --- a/lighthouse-treemap/app/src/main.js +++ b/lighthouse-treemap/app/src/main.js @@ -889,7 +889,7 @@ async function main() { const queryParams = new URLSearchParams(window.location.search); const gzip = queryParams.get('gzip') === '1'; const hashParams = location.hash ? - JSON.parse(Base64.fromBinary(location.hash.substr(1), {gzip})) : + JSON.parse(Base64.decode(location.hash.substr(1), {gzip})) : {}; /** @type {Record} */ const params = { diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index 8421061c23e5..bf176e3bdd27 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -111,7 +111,7 @@ describe('Lighthouse Treemap', () => { const encoded = await page.evaluate(` ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} - Base64.toBinary(${JSON.stringify(json)}, {gzip: true}); + Base64.encode(${JSON.stringify(json)}, {gzip: true}); `); await page.goto(`${treemapUrl}?gzip=1#${encoded}`); @@ -129,7 +129,7 @@ describe('Lighthouse Treemap', () => { const encoded = await page.evaluate(` ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} - Base64.toBinary(${JSON.stringify(json)}, {gzip: false}); + Base64.encode(${JSON.stringify(json)}, {gzip: false}); `); await page.goto(`${treemapUrl}#${encoded}`); From ca16fffa30a6c3c063ccd1757cb2963886bb3f70 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Mon, 24 May 2021 15:26:51 -0700 Subject: [PATCH 21/32] deprecate postmessage --- .../html/renderer/report-ui-features.js | 34 ++++++++++--------- lighthouse-treemap/app/src/main.js | 2 ++ lighthouse-treemap/test/treemap-test-pptr.js | 1 + 3 files changed, 21 insertions(+), 16 deletions(-) diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index 8c4e01365014..8dc560cb15e4 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -157,8 +157,7 @@ class ReportUIFeatures { this.addButton({ text: Util.i18n.strings.viewTreemapLabel, icon: 'treemap', - onClick: () => ReportUIFeatures.openTreemap( - this.json, this._dom.isDevTools() ? 'url' : 'postMessage'), + onClick: () => ReportUIFeatures.openTreemap(this.json), }); } @@ -532,28 +531,35 @@ class ReportUIFeatures { } /** - * Opens a new tab to the online viewer and sends the local page's JSON results - * to the online viewer using postMessage. + * The popup's window.name is keyed by version+url+fetchTime, so we reuse/select tabs correctly. * @param {LH.Result} json * @protected */ - static openTabAndSendJsonReportToViewer(json) { - // The popup's window.name is keyed by version+url+fetchTime, so we reuse/select tabs correctly + static computeWindowNameSuffix(json) { // @ts-ignore - If this is a v2 LHR, use old `generatedTime`. const fallbackFetchTime = /** @type {string} */ (json.generatedTime); const fetchTime = json.fetchTime || fallbackFetchTime; - const windowName = `${json.lighthouseVersion}-${json.requestedUrl}-${fetchTime}`; + return `${json.lighthouseVersion}-${json.requestedUrl}-${fetchTime}`; + } + + /** + * Opens a new tab to the online viewer and sends the local page's JSON results + * to the online viewer using postMessage. + * @param {LH.Result} json + * @protected + */ + static openTabAndSendJsonReportToViewer(json) { + const windowName = 'viewer-' + this.computeWindowNameSuffix(json); const url = getAppsOrigin() + '/viewer/'; ReportUIFeatures.openTabAndSendData({lhr: json}, url, windowName); } /** - * Opens a new tab to the treemap app and sends the JSON results using postMessage. + * Opens a new tab to the treemap app and sends the JSON results using URL.fragment * @param {LH.Result} json - * @param {'postMessage'|'url'} method * @protected */ - static openTreemap(json, method = 'postMessage') { + static openTreemap(json) { const treemapData = json.audits['script-treemap-data'].details; if (!treemapData) { throw new Error('no script treemap data found'); @@ -573,13 +579,9 @@ class ReportUIFeatures { }, }; const url = getAppsOrigin() + '/treemap/'; - const windowName = `treemap-${json.requestedUrl}`; + const windowName = 'treemap-' + this.computeWindowNameSuffix(json); - if (method === 'postMessage') { - ReportUIFeatures.openTabAndSendData(treemapOptions, url, windowName); - } else { - ReportUIFeatures.openTabWithUrlData(treemapOptions, url, windowName); - } + ReportUIFeatures.openTabWithUrlData(treemapOptions, url, windowName); } /** diff --git a/lighthouse-treemap/app/src/main.js b/lighthouse-treemap/app/src/main.js index ae708796c48d..f11ad3618fc2 100644 --- a/lighthouse-treemap/app/src/main.js +++ b/lighthouse-treemap/app/src/main.js @@ -919,6 +919,7 @@ async function main() { } if (options) app.init(options); } else { + // TODO: remove for v8. window.addEventListener('message', e => { if (e.source !== self.opener) return; @@ -934,6 +935,7 @@ async function main() { }); } + // TODO: remove for v8. // If the page was opened as a popup, tell the opening window we're ready. if (self.opener && !self.opener.closed) { self.opener.postMessage({opened: true}, '*'); diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index bf176e3bdd27..8ea29410f911 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -71,6 +71,7 @@ describe('Lighthouse Treemap', () => { expect(options.lhr.requestedUrl).toBe(debugOptions.lhr.requestedUrl); }); + // TODO: remove for v8 async function loadFromPostMessage(options) { const openerPage = await browser.newPage(); await openerPage.evaluate((treemapUrl, options) => { From 5b2f7d3654573dbd04d625121dc878b3bbe25562 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Tue, 25 May 2021 15:24:41 -0700 Subject: [PATCH 22/32] rename --- build/build-treemap.js | 2 +- .../report/html/renderer/report-ui-features.js | 4 ++-- .../report/html/renderer/{base64.js => text-encoding.js} | 2 +- .../renderer/{base64-test.js => text-encoding-test.js} | 8 ++++---- lighthouse-treemap/app/src/main.js | 4 ++-- lighthouse-treemap/test/treemap-test-pptr.js | 8 ++++---- lighthouse-treemap/types/treemap.d.ts | 4 ++-- types/html-renderer.d.ts | 6 +++--- 8 files changed, 19 insertions(+), 19 deletions(-) rename lighthouse-core/report/html/renderer/{base64.js => text-encoding.js} (98%) rename lighthouse-core/test/report/html/renderer/{base64-test.js => text-encoding-test.js} (85%) diff --git a/build/build-treemap.js b/build/build-treemap.js index 81e37a67bd57..c614dd8541ec 100644 --- a/build/build-treemap.js +++ b/build/build-treemap.js @@ -66,7 +66,7 @@ async function run() { buildStrings(), {path: '../../lighthouse-core/report/html/renderer/logger.js'}, {path: '../../lighthouse-core/report/html/renderer/i18n.js'}, - {path: '../../lighthouse-core/report/html/renderer/base64.js'}, + {path: '../../lighthouse-core/report/html/renderer/text-encoding.js'}, {path: '../../lighthouse-viewer/app/src/drag-and-drop.js'}, {path: '../../lighthouse-viewer/app/src/github-api.js'}, {path: '../../lighthouse-viewer/app/src/firebase-auth.js'}, diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index 8dc560cb15e4..a33bb9af2ce2 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -23,7 +23,7 @@ * the report. */ -/* globals getFilenamePrefix Util Base64 ElementScreenshotRenderer */ +/* globals getFilenamePrefix Util TextEncoding ElementScreenshotRenderer */ /** @typedef {import('./dom')} DOM */ @@ -620,7 +620,7 @@ class ReportUIFeatures { static async openTabWithUrlData(data, url_, windowName) { const url = new URL(url_); const gzip = Boolean(window.CompressionStream); - url.hash = await Base64.encode(JSON.stringify(data), { + url.hash = await TextEncoding.encode(JSON.stringify(data), { gzip, }); if (gzip) url.searchParams.set('gzip', '1'); diff --git a/lighthouse-core/report/html/renderer/base64.js b/lighthouse-core/report/html/renderer/text-encoding.js similarity index 98% rename from lighthouse-core/report/html/renderer/base64.js rename to lighthouse-core/report/html/renderer/text-encoding.js index b9efdfe83819..d72d1238773f 100644 --- a/lighthouse-core/report/html/renderer/base64.js +++ b/lighthouse-core/report/html/renderer/text-encoding.js @@ -73,5 +73,5 @@ function decode(encoded, options) { if (typeof module !== 'undefined' && module.exports) { module.exports = {encode, decode}; } else { - self.Base64 = {encode, decode}; + self.TextEncoding = {encode, decode}; } diff --git a/lighthouse-core/test/report/html/renderer/base64-test.js b/lighthouse-core/test/report/html/renderer/text-encoding-test.js similarity index 85% rename from lighthouse-core/test/report/html/renderer/base64-test.js rename to lighthouse-core/test/report/html/renderer/text-encoding-test.js index a1802ce667c1..a9c5b94af7ca 100644 --- a/lighthouse-core/test/report/html/renderer/base64-test.js +++ b/lighthouse-core/test/report/html/renderer/text-encoding-test.js @@ -5,11 +5,11 @@ */ 'use strict'; -const Base64 = require('../../../../report/html/renderer/base64.js'); +const TextEncoding = require('../../../../report/html/renderer/text-encoding.js'); /* eslint-env jest */ -describe('base64', () => { +describe('TextEncoding', () => { beforeAll(() => { global.pako = require('pako'); }); @@ -21,8 +21,8 @@ describe('base64', () => { /** @type {string} */ async function test(str) { for (const gzip of [false, true]) { - const binary = await Base64.encode(str, {gzip}); - const roundtrip = Base64.decode(binary, {gzip}); + const binary = await TextEncoding.encode(str, {gzip}); + const roundtrip = TextEncoding.decode(binary, {gzip}); expect(roundtrip.length).toEqual(str.length); expect(roundtrip).toEqual(str); } diff --git a/lighthouse-treemap/app/src/main.js b/lighthouse-treemap/app/src/main.js index f11ad3618fc2..7dfc58bc1e34 100644 --- a/lighthouse-treemap/app/src/main.js +++ b/lighthouse-treemap/app/src/main.js @@ -9,7 +9,7 @@ /* eslint-env browser */ -/* globals I18n webtreemap strings TreemapUtil Base64 Tabulator Cell Row DragAndDrop Logger GithubApi */ +/* globals I18n webtreemap strings TreemapUtil TextEncoding Tabulator Cell Row DragAndDrop Logger GithubApi */ const DUPLICATED_MODULES_IGNORE_THRESHOLD = 1024; const DUPLICATED_MODULES_IGNORE_ROOT_RATIO = 0.01; @@ -889,7 +889,7 @@ async function main() { const queryParams = new URLSearchParams(window.location.search); const gzip = queryParams.get('gzip') === '1'; const hashParams = location.hash ? - JSON.parse(Base64.decode(location.hash.substr(1), {gzip})) : + JSON.parse(TextEncoding.decode(location.hash.substr(1), {gzip})) : {}; /** @type {Record} */ const params = { diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index 8ea29410f911..41f35c45cae8 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -111,8 +111,8 @@ describe('Lighthouse Treemap', () => { const json = JSON.stringify(options); const encoded = await page.evaluate(` ${fs.readFileSync( - require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} - Base64.encode(${JSON.stringify(json)}, {gzip: true}); + require.resolve('../../lighthouse-core/report/html/renderer/text-encoding.js'), 'utf-8')} + TextEncoding.encode(${JSON.stringify(json)}, {gzip: true}); `); await page.goto(`${treemapUrl}?gzip=1#${encoded}`); @@ -129,8 +129,8 @@ describe('Lighthouse Treemap', () => { const json = JSON.stringify(options); const encoded = await page.evaluate(` ${fs.readFileSync( - require.resolve('../../lighthouse-core/report/html/renderer/base64.js'), 'utf-8')} - Base64.encode(${JSON.stringify(json)}, {gzip: false}); + require.resolve('../../lighthouse-core/report/html/renderer/text-encoding.js'), 'utf-8')} + TextEncoding.encode(${JSON.stringify(json)}, {gzip: false}); `); await page.goto(`${treemapUrl}#${encoded}`); diff --git a/lighthouse-treemap/types/treemap.d.ts b/lighthouse-treemap/types/treemap.d.ts index 7c472fea3d0e..4c44d77b31f0 100644 --- a/lighthouse-treemap/types/treemap.d.ts +++ b/lighthouse-treemap/types/treemap.d.ts @@ -1,5 +1,5 @@ import _TreemapUtil = require('../app/src/util.js'); -import _Base64 = require('../../lighthouse-core/report/html/renderer/base64.js'); +import _TextEncoding = require('../../lighthouse-core/report/html/renderer/text-encoding.js'); import _DragAndDrop = require('../../lighthouse-viewer/app/src/drag-and-drop.js'); import _FirebaseAuth = require('../../lighthouse-viewer/app/src/firebase-auth.js'); import _GithubApi = require('../../lighthouse-viewer/app/src/github-api.js'); @@ -37,7 +37,7 @@ declare global { sort(data: any): void; }; var TreemapUtil: typeof _TreemapUtil; - var Base64: typeof _Base64; + var TextEncoding: typeof _TextEncoding; var Logger: typeof _Logger; var DragAndDrop: typeof _DragAndDrop; var GithubApi: typeof _GithubApi; diff --git a/types/html-renderer.d.ts b/types/html-renderer.d.ts index 614c02b4e229..fb80f76a753d 100644 --- a/types/html-renderer.d.ts +++ b/types/html-renderer.d.ts @@ -16,7 +16,7 @@ import _PwaCategoryRenderer = require('../lighthouse-core/report/html/renderer/p import _ReportRenderer = require('../lighthouse-core/report/html/renderer/report-renderer.js'); import _ReportUIFeatures = require('../lighthouse-core/report/html/renderer/report-ui-features.js'); import _Util = require('../lighthouse-core/report/html/renderer/util.js'); -import _Base64 = require('../lighthouse-core/report/html/renderer/base64.js'); +import _TextEncoding = require('../lighthouse-core/report/html/renderer/text-encoding.js'); import _prepareLabData = require('../lighthouse-core/report/html/renderer/psi.js'); import _FileNamer = require('../lighthouse-core/lib/file-namer.js'); @@ -34,7 +34,7 @@ declare global { var ReportRenderer: typeof _ReportRenderer; var ReportUIFeatures: typeof _ReportUIFeatures; var Util: typeof _Util; - var Base64: typeof _Base64; + var TextEncoding: typeof _TextEncoding; var prepareLabData: typeof _prepareLabData; var pako: typeof import('pako'); var CompressionStream: { @@ -82,7 +82,7 @@ declare global { export interface StackPackDescription { /** The title of the stack pack. */ title: string; - /** A base64 data url to be used as the stack pack's icon. */ + /** A TextEncoding data url to be used as the stack pack's icon. */ iconDataURL: string; /** The stack-specific description for this audit. */ description: string; From b8e6b84c9ad4df167ba3580135dcaf188f394fbd Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Tue, 25 May 2021 15:26:43 -0700 Subject: [PATCH 23/32] tweak --- lighthouse-core/report/html/html-report-assets.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lighthouse-core/report/html/html-report-assets.js b/lighthouse-core/report/html/html-report-assets.js index a176e5be4472..af2ef7c8afcb 100644 --- a/lighthouse-core/report/html/html-report-assets.js +++ b/lighthouse-core/report/html/html-report-assets.js @@ -23,7 +23,7 @@ const REPORT_JAVASCRIPT = [ fs.readFileSync(__dirname + '/renderer/pwa-category-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/report-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/i18n.js', 'utf8'), - fs.readFileSync(__dirname + '/renderer/base64.js', 'utf8'), + fs.readFileSync(__dirname + '/renderer/text-encoding.js', 'utf8'), ].join(';\n'); const REPORT_CSS = fs.readFileSync(__dirname + '/report-styles.css', 'utf8'); const REPORT_TEMPLATES = fs.readFileSync(__dirname + '/templates.html', 'utf8'); From c6dafbd96b85645d5ac9dff7c98a7c0ef3930e23 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Tue, 25 May 2021 15:47:43 -0700 Subject: [PATCH 24/32] pr --- lighthouse-core/report/html/renderer/text-encoding.js | 6 +++++- package.json | 1 + types/html-renderer.d.ts | 1 - yarn.lock | 5 +++++ 4 files changed, 11 insertions(+), 2 deletions(-) diff --git a/lighthouse-core/report/html/renderer/text-encoding.js b/lighthouse-core/report/html/renderer/text-encoding.js index d72d1238773f..44889b27a0b9 100644 --- a/lighthouse-core/report/html/renderer/text-encoding.js +++ b/lighthouse-core/report/html/renderer/text-encoding.js @@ -5,7 +5,7 @@ */ 'use strict'; -/* global self btoa atob pako CompressionStream Response */ +/* global self btoa atob window CompressionStream Response */ const toBase64 = typeof btoa !== 'undefined' ? btoa : @@ -37,6 +37,8 @@ async function encode(string, options) { const compAb = await new Response(cs.readable).arrayBuffer(); bytes = new Uint8Array(compAb); } else { + /** @type {import('pako')=} */ + const pako = window.pako; bytes = pako.gzip(string); } } @@ -64,6 +66,8 @@ function decode(encoded, options) { } if (options.gzip) { + /** @type {import('pako')=} */ + const pako = window.pako; return pako.ungzip(bytes, {to: 'string'}); } else { return new TextDecoder().decode(bytes); diff --git a/package.json b/package.json index 11695162a0b8..d685de94ea29 100644 --- a/package.json +++ b/package.json @@ -149,6 +149,7 @@ "node-fetch": "^2.6.1", "npm-run-posix-or-windows": "^2.0.2", "package-json-versionify": "^1.0.4", + "pako": "^2.0.3", "prettier": "^1.14.3", "pretty-json-stringify": "^0.0.2", "puppeteer": "^9.1.1", diff --git a/types/html-renderer.d.ts b/types/html-renderer.d.ts index fb80f76a753d..399a3dd9e54e 100644 --- a/types/html-renderer.d.ts +++ b/types/html-renderer.d.ts @@ -36,7 +36,6 @@ declare global { var Util: typeof _Util; var TextEncoding: typeof _TextEncoding; var prepareLabData: typeof _prepareLabData; - var pako: typeof import('pako'); var CompressionStream: { prototype: CompressionStream, new (format: string): CompressionStream, diff --git a/yarn.lock b/yarn.lock index ba98119dfc07..fb118eae3828 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6164,6 +6164,11 @@ package-json@^6.3.0: registry-url "^5.0.0" semver "^6.2.0" +pako@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/pako/-/pako-2.0.3.tgz#cdf475e31b678565251406de9e759196a0ea7a43" + integrity sha512-WjR1hOeg+kki3ZIOjaf4b5WVcay1jaliKSYiEaB1XzwhMQZJxRdQRv0V31EKBYlxb4T7SK3hjfc/jxyU64BoSw== + pako@~1.0.5: version "1.0.8" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.8.tgz#6844890aab9c635af868ad5fecc62e8acbba3ea4" From 56a55e0e5259e7019a6725612b5c3dccaf0c636b Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Tue, 25 May 2021 16:17:27 -0700 Subject: [PATCH 25/32] rename --- .../html/renderer/report-ui-features.js | 2 +- .../report/html/renderer/text-encoding.js | 23 ++++++++----------- .../html/renderer/text-encoding-test.js | 4 ++-- lighthouse-treemap/app/src/main.js | 2 +- lighthouse-treemap/test/treemap-test-pptr.js | 4 ++-- 5 files changed, 16 insertions(+), 19 deletions(-) diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index a33bb9af2ce2..25b15e7e069c 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -620,7 +620,7 @@ class ReportUIFeatures { static async openTabWithUrlData(data, url_, windowName) { const url = new URL(url_); const gzip = Boolean(window.CompressionStream); - url.hash = await TextEncoding.encode(JSON.stringify(data), { + url.hash = await TextEncoding.stringToBase64(JSON.stringify(data), { gzip, }); if (gzip) url.searchParams.set('gzip', '1'); diff --git a/lighthouse-core/report/html/renderer/text-encoding.js b/lighthouse-core/report/html/renderer/text-encoding.js index 44889b27a0b9..5a55f386cf7e 100644 --- a/lighthouse-core/report/html/renderer/text-encoding.js +++ b/lighthouse-core/report/html/renderer/text-encoding.js @@ -7,11 +7,11 @@ /* global self btoa atob window CompressionStream Response */ -const toBase64 = typeof btoa !== 'undefined' ? +const btoaIso = typeof btoa !== 'undefined' ? btoa : /** @param {string} str */ (str) => Buffer.from(str).toString('base64'); -const fromBase64 = typeof atob !== 'undefined' ? +const atobIso = typeof atob !== 'undefined' ? atob : /** @param {string} str */ (str) => Buffer.from(str, 'base64').toString(); @@ -25,7 +25,7 @@ const fromBase64 = typeof atob !== 'undefined' ? * @param {{gzip: boolean}} options * @return {Promise} */ -async function encode(string, options) { +async function stringToBase64(string, options) { let bytes = new TextEncoder().encode(string); if (options.gzip) { @@ -48,9 +48,9 @@ async function encode(string, options) { // https://jsbench.me/2gkoxazvjl const chunkSize = 5000; for (let i = 0; i < bytes.length; i += chunkSize) { - binaryString += String.fromCharCode(...new Uint8Array(bytes.buffer.slice(i, i + chunkSize))); + binaryString += String.fromCharCode(...bytes.subarray(i, i + chunkSize)); } - return toBase64(binaryString); + return btoaIso(binaryString); } /** @@ -58,12 +58,9 @@ async function encode(string, options) { * @param {{gzip: boolean}} options * @return {string} */ -function decode(encoded, options) { - const binaryString = fromBase64(encoded); - const bytes = new Uint8Array(binaryString.length); - for (let i = 0; i < bytes.length; i++) { - bytes[i] = binaryString.charCodeAt(i); - } +function base64ToString(encoded, options) { + const binaryString = atobIso(encoded); + const bytes = Uint8Array.from(binaryString, c => c.charCodeAt(0)); if (options.gzip) { /** @type {import('pako')=} */ @@ -75,7 +72,7 @@ function decode(encoded, options) { } if (typeof module !== 'undefined' && module.exports) { - module.exports = {encode, decode}; + module.exports = {stringToBase64, base64ToString}; } else { - self.TextEncoding = {encode, decode}; + self.TextEncoding = {stringToBase64, base64ToString}; } diff --git a/lighthouse-core/test/report/html/renderer/text-encoding-test.js b/lighthouse-core/test/report/html/renderer/text-encoding-test.js index a9c5b94af7ca..e8f63489cf17 100644 --- a/lighthouse-core/test/report/html/renderer/text-encoding-test.js +++ b/lighthouse-core/test/report/html/renderer/text-encoding-test.js @@ -11,11 +11,11 @@ const TextEncoding = require('../../../../report/html/renderer/text-encoding.js' describe('TextEncoding', () => { beforeAll(() => { - global.pako = require('pako'); + global.window = {pako: require('pako')}; }); afterAll(() => { - global.pako = undefined; + global.window = undefined; }); /** @type {string} */ diff --git a/lighthouse-treemap/app/src/main.js b/lighthouse-treemap/app/src/main.js index 7dfc58bc1e34..0e53089b2146 100644 --- a/lighthouse-treemap/app/src/main.js +++ b/lighthouse-treemap/app/src/main.js @@ -889,7 +889,7 @@ async function main() { const queryParams = new URLSearchParams(window.location.search); const gzip = queryParams.get('gzip') === '1'; const hashParams = location.hash ? - JSON.parse(TextEncoding.decode(location.hash.substr(1), {gzip})) : + JSON.parse(TextEncoding.base64ToString(location.hash.substr(1), {gzip})) : {}; /** @type {Record} */ const params = { diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index 41f35c45cae8..27a5df5db76b 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -112,7 +112,7 @@ describe('Lighthouse Treemap', () => { const encoded = await page.evaluate(` ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/text-encoding.js'), 'utf-8')} - TextEncoding.encode(${JSON.stringify(json)}, {gzip: true}); + TextEncoding.stringToBase64(${JSON.stringify(json)}, {gzip: true}); `); await page.goto(`${treemapUrl}?gzip=1#${encoded}`); @@ -130,7 +130,7 @@ describe('Lighthouse Treemap', () => { const encoded = await page.evaluate(` ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/text-encoding.js'), 'utf-8')} - TextEncoding.encode(${JSON.stringify(json)}, {gzip: false}); + TextEncoding.stringToBase64(${JSON.stringify(json)}, {gzip: false}); `); await page.goto(`${treemapUrl}#${encoded}`); From 363022e5bc243c451555d0ceefffc5c269dd686e Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Tue, 25 May 2021 16:25:52 -0700 Subject: [PATCH 26/32] comment --- lighthouse-core/report/html/renderer/text-encoding.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lighthouse-core/report/html/renderer/text-encoding.js b/lighthouse-core/report/html/renderer/text-encoding.js index 5a55f386cf7e..f8bc6ed99b11 100644 --- a/lighthouse-core/report/html/renderer/text-encoding.js +++ b/lighthouse-core/report/html/renderer/text-encoding.js @@ -17,7 +17,7 @@ const atobIso = typeof atob !== 'undefined' ? (str) => Buffer.from(str, 'base64').toString(); /** - * Takes an UTF-8 string and returns a URL-safe base64 encoded string. + * Takes an UTF-8 string and returns a base64 encoded string. * If gzip is true, the UTF-8 bytes are gzipped before base64'd, using * CompressionStream (currently only in Chrome), falling back to pako * (which is only used to encode in our Node tests). From e33568a1b9e23135a35b19b5d60be69d6eabe117 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Tue, 25 May 2021 16:29:09 -0700 Subject: [PATCH 27/32] rename --- .../report/html/renderer/report-ui-features.js | 2 +- lighthouse-core/report/html/renderer/text-encoding.js | 8 ++++---- lighthouse-treemap/app/src/main.js | 2 +- lighthouse-treemap/test/treemap-test-pptr.js | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index 25b15e7e069c..630dee62fb75 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -620,7 +620,7 @@ class ReportUIFeatures { static async openTabWithUrlData(data, url_, windowName) { const url = new URL(url_); const gzip = Boolean(window.CompressionStream); - url.hash = await TextEncoding.stringToBase64(JSON.stringify(data), { + url.hash = await TextEncoding.toBase64(JSON.stringify(data), { gzip, }); if (gzip) url.searchParams.set('gzip', '1'); diff --git a/lighthouse-core/report/html/renderer/text-encoding.js b/lighthouse-core/report/html/renderer/text-encoding.js index f8bc6ed99b11..c364e07fd908 100644 --- a/lighthouse-core/report/html/renderer/text-encoding.js +++ b/lighthouse-core/report/html/renderer/text-encoding.js @@ -25,7 +25,7 @@ const atobIso = typeof atob !== 'undefined' ? * @param {{gzip: boolean}} options * @return {Promise} */ -async function stringToBase64(string, options) { +async function toBase64(string, options) { let bytes = new TextEncoder().encode(string); if (options.gzip) { @@ -58,7 +58,7 @@ async function stringToBase64(string, options) { * @param {{gzip: boolean}} options * @return {string} */ -function base64ToString(encoded, options) { +function fromBase64(encoded, options) { const binaryString = atobIso(encoded); const bytes = Uint8Array.from(binaryString, c => c.charCodeAt(0)); @@ -72,7 +72,7 @@ function base64ToString(encoded, options) { } if (typeof module !== 'undefined' && module.exports) { - module.exports = {stringToBase64, base64ToString}; + module.exports = {toBase64, fromBase64}; } else { - self.TextEncoding = {stringToBase64, base64ToString}; + self.TextEncoding = {toBase64, fromBase64}; } diff --git a/lighthouse-treemap/app/src/main.js b/lighthouse-treemap/app/src/main.js index 0e53089b2146..95c4c53d9a69 100644 --- a/lighthouse-treemap/app/src/main.js +++ b/lighthouse-treemap/app/src/main.js @@ -889,7 +889,7 @@ async function main() { const queryParams = new URLSearchParams(window.location.search); const gzip = queryParams.get('gzip') === '1'; const hashParams = location.hash ? - JSON.parse(TextEncoding.base64ToString(location.hash.substr(1), {gzip})) : + JSON.parse(TextEncoding.fromBase64(location.hash.substr(1), {gzip})) : {}; /** @type {Record} */ const params = { diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index 27a5df5db76b..1a9ae72f7558 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -112,7 +112,7 @@ describe('Lighthouse Treemap', () => { const encoded = await page.evaluate(` ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/text-encoding.js'), 'utf-8')} - TextEncoding.stringToBase64(${JSON.stringify(json)}, {gzip: true}); + TextEncoding.toBase64(${JSON.stringify(json)}, {gzip: true}); `); await page.goto(`${treemapUrl}?gzip=1#${encoded}`); @@ -130,7 +130,7 @@ describe('Lighthouse Treemap', () => { const encoded = await page.evaluate(` ${fs.readFileSync( require.resolve('../../lighthouse-core/report/html/renderer/text-encoding.js'), 'utf-8')} - TextEncoding.stringToBase64(${JSON.stringify(json)}, {gzip: false}); + TextEncoding.toBase64(${JSON.stringify(json)}, {gzip: false}); `); await page.goto(`${treemapUrl}#${encoded}`); From 941cadbe44e8973812c73221705833493bdcef81 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Wed, 26 May 2021 09:21:06 -0700 Subject: [PATCH 28/32] tweak --- .../test/report/html/renderer/text-encoding-test.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lighthouse-core/test/report/html/renderer/text-encoding-test.js b/lighthouse-core/test/report/html/renderer/text-encoding-test.js index e8f63489cf17..493e1b26b28c 100644 --- a/lighthouse-core/test/report/html/renderer/text-encoding-test.js +++ b/lighthouse-core/test/report/html/renderer/text-encoding-test.js @@ -21,8 +21,8 @@ describe('TextEncoding', () => { /** @type {string} */ async function test(str) { for (const gzip of [false, true]) { - const binary = await TextEncoding.encode(str, {gzip}); - const roundtrip = TextEncoding.decode(binary, {gzip}); + const binary = await TextEncoding.toBase64(str, {gzip}); + const roundtrip = TextEncoding.fromBase64(binary, {gzip}); expect(roundtrip.length).toEqual(str.length); expect(roundtrip).toEqual(str); } From d55f819acbbc60106d9c2d710a09d3bb92055f0d Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Wed, 26 May 2021 13:21:06 -0700 Subject: [PATCH 29/32] Update types/html-renderer.d.ts --- types/html-renderer.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/types/html-renderer.d.ts b/types/html-renderer.d.ts index 399a3dd9e54e..4d03fef5e601 100644 --- a/types/html-renderer.d.ts +++ b/types/html-renderer.d.ts @@ -81,7 +81,7 @@ declare global { export interface StackPackDescription { /** The title of the stack pack. */ title: string; - /** A TextEncoding data url to be used as the stack pack's icon. */ + /** A base64 data url to be used as the stack pack's icon. */ iconDataURL: string; /** The stack-specific description for this audit. */ description: string; From b8d8aff57f491d5f7e8dfcbc948e9a6f3e47d17b Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Wed, 26 May 2021 20:56:44 -0700 Subject: [PATCH 30/32] edit --- lighthouse-core/report/html/renderer/text-encoding.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lighthouse-core/report/html/renderer/text-encoding.js b/lighthouse-core/report/html/renderer/text-encoding.js index c364e07fd908..4386eff0fd55 100644 --- a/lighthouse-core/report/html/renderer/text-encoding.js +++ b/lighthouse-core/report/html/renderer/text-encoding.js @@ -7,11 +7,11 @@ /* global self btoa atob window CompressionStream Response */ -const btoaIso = typeof btoa !== 'undefined' ? +const btoa_ = typeof btoa !== 'undefined' ? btoa : /** @param {string} str */ (str) => Buffer.from(str).toString('base64'); -const atobIso = typeof atob !== 'undefined' ? +const atob_ = typeof atob !== 'undefined' ? atob : /** @param {string} str */ (str) => Buffer.from(str, 'base64').toString(); @@ -50,7 +50,7 @@ async function toBase64(string, options) { for (let i = 0; i < bytes.length; i += chunkSize) { binaryString += String.fromCharCode(...bytes.subarray(i, i + chunkSize)); } - return btoaIso(binaryString); + return btoa_(binaryString); } /** @@ -59,7 +59,7 @@ async function toBase64(string, options) { * @return {string} */ function fromBase64(encoded, options) { - const binaryString = atobIso(encoded); + const binaryString = atob_(encoded); const bytes = Uint8Array.from(binaryString, c => c.charCodeAt(0)); if (options.gzip) { From f7570948d67d7bbd4ddbeea02d5b2446b8f44ad9 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 27 May 2021 10:25:20 -0700 Subject: [PATCH 31/32] From d45dc4b3128ffd3ddcd2552e784e08b14f43a6c2 Mon Sep 17 00:00:00 2001 From: Connor Clark Date: Thu, 27 May 2021 13:47:09 -0700 Subject: [PATCH 32/32] merge --- lighthouse-core/report/html/renderer/psi.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lighthouse-core/report/html/renderer/psi.js b/lighthouse-core/report/html/renderer/psi.js index f0b3561e5f7c..de6248bcc5e2 100644 --- a/lighthouse-core/report/html/renderer/psi.js +++ b/lighthouse-core/report/html/renderer/psi.js @@ -119,7 +119,7 @@ function prepareLabData(LHResult, document) { container: reportEl.querySelector('.lh-audit-group--metrics'), text: Util.i18n.strings.viewTreemapLabel, icon: 'treemap', - onClick: () => ReportUIFeatures.openTreemap(lhResult, 'url'), + onClick: () => ReportUIFeatures.openTreemap(lhResult), }); } };