Skip to content

Commit

Permalink
Perf testing: Cover Site Editor loading time (#23842)
Browse files Browse the repository at this point in the history
Add some performance test coverage to the site editor (loading time), specifically to avoid perfomance regressions with regard to template resolution and `wp_template` auto-draft creation (see e.g. #23662).

Changes some of the underlying performance test framework to allow for multiple test files.
  • Loading branch information
ockham authored Jul 16, 2020
1 parent 0c41756 commit 04a7924
Show file tree
Hide file tree
Showing 7 changed files with 169 additions and 40 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/performance.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,4 @@ jobs:
npm ci
- name: Run the performance tests
run: ./bin/plugin/cli.js perf --ci $GITHUB_SHA master
run: ./bin/plugin/cli.js perf --ci $GITHUB_SHA master --tests-branch $GITHUB_SHA
4 changes: 4 additions & 0 deletions bin/plugin/cli.js
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,10 @@ program
.command( 'performance-tests [branches...]' )
.alias( 'perf' )
.option( '-c, --ci', 'Run in CI (non interactive)' )
.option(
'--tests-branch <branch>',
"Use this branch's performance test files"
)
.description(
'Runs performance tests on two separate branches and outputs the result'
)
Expand Down
98 changes: 66 additions & 32 deletions bin/plugin/commands/performance.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
* External dependencies
*/
const path = require( 'path' );
const { pickBy, mapValues } = require( 'lodash' );

/**
* Internal dependencies
Expand All @@ -19,7 +20,8 @@ const config = require( '../config' );
/**
* @typedef WPPerformanceCommandOptions
*
* @property {boolean=} ci Run on CI.
* @property {boolean=} ci Run on CI.
* @property {string=} testsBranch The branch whose performance test files will be used for testing.
*/

/**
Expand All @@ -46,14 +48,14 @@ const config = require( '../config' );
/**
* @typedef WPFormattedPerformanceResults
*
* @property {string} load Load Time.
* @property {string} domcontentloaded DOM Contentloaded time.
* @property {string} type Average type time.
* @property {string} minType Minium type time.
* @property {string} maxType Maximum type time.
* @property {string} focus Average block selection time.
* @property {string} minFocus Min block selection time.
* @property {string} maxFocus Max block selection time.
* @property {string=} load Load Time.
* @property {string=} domcontentloaded DOM Contentloaded time.
* @property {string=} type Average type time.
* @property {string=} minType Minium type time.
* @property {string=} maxType Maximum type time.
* @property {string=} focus Average block selection time.
* @property {string=} minFocus Min block selection time.
* @property {string=} maxFocus Max block selection time.
*/

/**
Expand All @@ -64,7 +66,7 @@ const config = require( '../config' );
* @return {number} Average.
*/
function average( array ) {
return array.reduce( ( a, b ) => a + b ) / array.length;
return array.reduce( ( a, b ) => a + b, 0 ) / array.length;
}

/**
Expand Down Expand Up @@ -119,13 +121,15 @@ function curateResults( results ) {
*
* @param {string} performanceTestDirectory Path to the performance tests' clone.
* @param {string} environmentDirectory Path to the plugin environment's clone.
* @param {string} testSuite Name of the tests set.
* @param {string} branch Branch name.
*
* @return {Promise<WPFormattedPerformanceResults>} Performance results for the branch.
*/
async function getPerformanceResultsForBranch(
performanceTestDirectory,
environmentDirectory,
testSuite,
branch
) {
// Restore clean working directory (e.g. if `package-lock.json` has local
Expand All @@ -147,30 +151,36 @@ async function getPerformanceResultsForBranch(
const results = [];
for ( let i = 0; i < 3; i++ ) {
await runShellScript(
'npm run test-performance',
`npm run test-performance -- packages/e2e-tests/specs/performance/${ testSuite }.test.js`,
performanceTestDirectory
);
const rawResults = await readJSONFile(
path.join(
performanceTestDirectory,
'packages/e2e-tests/specs/performance/results.json'
`packages/e2e-tests/specs/performance/${ testSuite }.test.results.json`
)
);
results.push( curateResults( rawResults ) );
}

return {
load: formatTime( median( results.map( ( r ) => r.load ) ) ),
domcontentloaded: formatTime(
median( results.map( ( r ) => r.domcontentloaded ) )
),
type: formatTime( median( results.map( ( r ) => r.type ) ) ),
minType: formatTime( median( results.map( ( r ) => r.minType ) ) ),
maxType: formatTime( median( results.map( ( r ) => r.maxType ) ) ),
focus: formatTime( median( results.map( ( r ) => r.focus ) ) ),
minFocus: formatTime( median( results.map( ( r ) => r.minFocus ) ) ),
maxFocus: formatTime( median( results.map( ( r ) => r.maxFocus ) ) ),
};
const medians = mapValues(
{
load: results.map( ( r ) => r.load ),
domcontentloaded: results.map( ( r ) => r.domcontentloaded ),
type: results.map( ( r ) => r.type ),
minType: results.map( ( r ) => r.minType ),
maxType: results.map( ( r ) => r.maxType ),
focus: results.map( ( r ) => r.focus ),
minFocus: results.map( ( r ) => r.minFocus ),
maxFocus: results.map( ( r ) => r.maxFocus ),
},
median
);

// Remove results for which we don't have data (and where the statistical functions thus returned NaN or Infinity etc).
const finiteMedians = pickBy( medians, isFinite );
// Format results as times.
return mapValues( finiteMedians, formatTime );
}

/**
Expand Down Expand Up @@ -198,6 +208,19 @@ async function runPerformanceTests( branches, options ) {

log( '>> Cloning the repository' );
const performanceTestDirectory = await git.clone( config.gitRepositoryURL );

if ( !! options.testsBranch ) {
log(
'>> Fetching the ' +
formats.success( options.testsBranch ) +
' branch'
);
await git.checkoutRemoteBranch(
performanceTestDirectory,
options.testsBranch
);
}

const environmentDirectory = getRandomTemporaryPath();
log(
'>> Perf Tests Directory : ' +
Expand All @@ -220,21 +243,32 @@ async function runPerformanceTests( branches, options ) {
log( '>> Starting the WordPress environment' );
await runShellScript( 'npm run wp-env start', environmentDirectory );

/** @type {Record<string, WPFormattedPerformanceResults>} */
const testSuites = [ 'post-editor', 'site-editor' ];

/** @type {Record<string,Record<string, WPFormattedPerformanceResults>>} */
const results = {};
for ( const branch of branches ) {
results[ branch ] = await getPerformanceResultsForBranch(
performanceTestDirectory,
environmentDirectory,
branch
);
for ( const testSuite of testSuites ) {
results[ testSuite ] = {};
for ( const branch of branches ) {
results[ testSuite ][
branch
] = await getPerformanceResultsForBranch(
performanceTestDirectory,
environmentDirectory,
testSuite,
branch
);
}
}

log( '>> Stopping the WordPress environment' );
await runShellScript( 'npm run wp-env stop', environmentDirectory );

log( '\n>> 🎉 Results.\n' );
console.table( results );
for ( const testSuite of testSuites ) {
log( `\n>> ${ testSuite }\n` );
console.table( results[ testSuite ] );
}
}

module.exports = {
Expand Down
6 changes: 6 additions & 0 deletions docs/contributors/testing-overview.md
Original file line number Diff line number Diff line change
Expand Up @@ -480,4 +480,10 @@ In addition to that, you can also compare the metrics across branches (or tags o
./bin/plugin/cli.js perf master v8.1.0 v8.0.0
```

Finally, you can pass an additional `--tests-branch` argument to specify which branch's performance test files you'd like to run. This is particularly useful when modifying/extending the perf tests:

```
./bin/plugin/cli.js perf master v8.1.0 v8.0.0 --tests-branch add/perf-tests-coverage
```

**Note** This command needs may take some time to perform the benchmark. While running make sure to avoid using your computer or have a lot of background process to minimize external factors that can impact the results across branches.
11 changes: 7 additions & 4 deletions packages/e2e-tests/config/performance-reporter.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
* External dependencies
*/
const { readFileSync, existsSync } = require( 'fs' );
const path = require( 'path' );
const chalk = require( 'chalk' );

function average( array ) {
Expand All @@ -17,14 +18,16 @@ const title = chalk.bold;
const success = chalk.bold.green;

class PerformanceReporter {
onRunComplete() {
const path = __dirname + '/../specs/performance/results.json';
onTestResult( test ) {
const dirname = path.dirname( test.path );
const basename = path.basename( test.path, '.js' );
const filepath = path.join( dirname, basename + '.results.json' );

if ( ! existsSync( path ) ) {
if ( ! existsSync( filepath ) ) {
return;
}

const results = readFileSync( path, 'utf8' );
const results = readFileSync( filepath, 'utf8' );
const { load, domcontentloaded, type, focus } = JSON.parse( results );

if ( load && load.length ) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/**
* External dependencies
*/
import { join } from 'path';
import { basename, join } from 'path';
import { existsSync, readFileSync, unlinkSync, writeFileSync } from 'fs';

/**
Expand Down Expand Up @@ -71,7 +71,7 @@ function getSelectionEventDurations( trace ) {

jest.setTimeout( 1000000 );

describe( 'Performance', () => {
describe( 'Post Editor Performance', () => {
it( 'Loading, typing and selecting blocks', async () => {
const results = {
load: [],
Expand Down Expand Up @@ -182,8 +182,10 @@ describe( 'Performance', () => {
const [ focusEvents ] = getSelectionEventDurations( traceResults );
results.focus = focusEvents;

const resultsFilename = basename( __filename, '.js' ) + '.results.json';

writeFileSync(
__dirname + '/results.json',
join( __dirname, resultsFilename ),
JSON.stringify( results, null, 2 )
);

Expand Down
80 changes: 80 additions & 0 deletions packages/e2e-tests/specs/performance/site-editor.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
/**
* External dependencies
*/
import { basename, join } from 'path';
import { writeFileSync } from 'fs';

/**
* Internal dependencies
*/
import { useExperimentalFeatures } from '../../experimental-features';

/**
* WordPress dependencies
*/
import { trashAllPosts, visitAdminPage } from '@wordpress/e2e-test-utils';
import { addQueryArgs } from '@wordpress/url';

jest.setTimeout( 1000000 );

describe( 'Site Editor Performance', () => {
useExperimentalFeatures( [
'#gutenberg-full-site-editing',
'#gutenberg-full-site-editing-demo',
] );

beforeAll( async () => {
await trashAllPosts( 'wp_template' );
await trashAllPosts( 'wp_template_part' );
} );
afterAll( async () => {
await trashAllPosts( 'wp_template' );
await trashAllPosts( 'wp_template_part' );
} );

it( 'Loading', async () => {
const results = {
load: [],
domcontentloaded: [],
type: [],
focus: [],
};

await visitAdminPage(
'admin.php',
addQueryArgs( '', {
page: 'gutenberg-edit-site',
} ).slice( 1 )
);

let i = 3;

// Measuring loading time
while ( i-- ) {
await page.reload( { waitUntil: [ 'domcontentloaded', 'load' ] } );
const timings = JSON.parse(
await page.evaluate( () =>
JSON.stringify( window.performance.timing )
)
);
const {
navigationStart,
domContentLoadedEventEnd,
loadEventEnd,
} = timings;
results.load.push( loadEventEnd - navigationStart );
results.domcontentloaded.push(
domContentLoadedEventEnd - navigationStart
);
}

const resultsFilename = basename( __filename, '.js' ) + '.results.json';

writeFileSync(
join( __dirname, resultsFilename ),
JSON.stringify( results, null, 2 )
);

expect( true ).toBe( true );
} );
} );

0 comments on commit 04a7924

Please sign in to comment.