-
Notifications
You must be signed in to change notification settings - Fork 246
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: compiler benchmarks and gh action (#3503)
Adds a private package @jsii/benchmarks which includes a basic command line application for running jsii benchmarks. Adds a custom benchmark runner leveraging the NodeJS `perf_hooks` module to time function calls across multiple iterations and return averaged results. Adds github action workflows to run benchmarks on PRs and compare performance to the target branch. This action will fail when a test suite is slower by 200%, but this threshold can be configured. Adds to the gh-pages action workflow to run benchmarks and append new results to a file that is displayed as a graph on our docs site at the `/dev/bench` url. This will show benchmark suite results over time and allow us to track overall change in compiler performance across multiple commits. Example of benchmark results comment MrArnoldPalmer#417 (comment) Example of benchmark results graph (but only 1 result cause just testing) https://mrarnoldpalmer.github.io/jsii/dev/bench/ --- By submitting this pull request, I confirm that my contribution is made under the terms of the [Apache 2.0 license]. [Apache 2.0 license]: https://www.apache.org/licenses/LICENSE-2.0
- Loading branch information
1 parent
f1a56e3
commit 4a91cf0
Showing
16 changed files
with
567 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -73,7 +73,7 @@ jobs: | |
git config user.email "[email protected]" | ||
- name: Prepare Commit | ||
run: |- | ||
rsync --delete --exclude=.git --recursive ${{ runner.temp }}/site/ ./ | ||
rsync --delete --exclude=.git --exclude=dev --recursive ${{ runner.temp }}/site/ ./ | ||
touch .nojekyll | ||
git add . | ||
git diff --cached --exit-code >/dev/null || ( | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
--- | ||
extends: ../../../eslint-config.yaml | ||
ignorePatterns: | ||
- fixtures | ||
|
||
rules: | ||
'import/no-extraneous-dependencies': | ||
- error | ||
- devDependencies: ['**/scripts/**'] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
output.txt | ||
*.d.ts | ||
*.js |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
# jsii Benchmarks | ||
|
||
This package is meant to collect benchmarks for `jsii`, `jsii-pacmak`, and any other jsii packages sourced in TS. It | ||
contains a basic benchmark runner in [`benchmark.ts`](lib/benchmark.ts) that uses the `perf_hooks` module in order to | ||
time synchronous functions. | ||
|
||
## Usage | ||
|
||
There is a small CLI app wrapping calls to the benchmarks defined. To call the benchmarks: | ||
|
||
``` | ||
yarn benchmark | ||
``` | ||
|
||
To output benchmark run results to a json file, pass the `--output` option | ||
|
||
``` | ||
yarn benchmark --output my-file.json | ||
``` | ||
|
||
## Output Format | ||
|
||
The output format is JSON and is used by the | ||
[continous benchmark action](https://github.com/benchmark-action/github-action-benchmark) which tracks the results of | ||
benchmarks over time. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
import * as fs from 'fs-extra'; | ||
import * as yargs from 'yargs'; | ||
|
||
import { benchmarks } from '../lib'; | ||
import { Benchmark } from '../lib/benchmark'; | ||
|
||
/** | ||
* Format of benchmark output used by continous benchmarking action. | ||
* See [documentation](https://github.com/benchmark-action/github-action-benchmark/blob/master/README.md) for details | ||
*/ | ||
interface ResultsJson { | ||
/** | ||
* The name of the benchmark | ||
*/ | ||
name: string; | ||
|
||
/** | ||
* The unit of measure, usually seconds | ||
*/ | ||
unit: string; | ||
|
||
/** | ||
* The result of the measurement, usually an average over x iterations | ||
*/ | ||
value: number; | ||
|
||
/** | ||
* The variance of all runs | ||
*/ | ||
range: number; | ||
|
||
/** | ||
* Extra information about the benchmark, displayed in a tooltip | ||
*/ | ||
extra: string; | ||
} | ||
|
||
(async () => { | ||
/* eslint-disable-next-line @typescript-eslint/await-thenable */ | ||
const argv = await yargs | ||
.command('$0', 'Runs jsii benchmark tests and displays results', (argv) => | ||
argv.option('output', { | ||
type: 'string', | ||
desc: 'location of benchmark results json file, does not output to file if not specified.', | ||
}), | ||
) | ||
.help().argv; | ||
|
||
// Run list of benchmarks in sequence | ||
const resultsJson: ResultsJson[] = await benchmarks.reduce( | ||
async ( | ||
accum: Promise<ResultsJson[]>, | ||
benchmark: Benchmark<any>, | ||
): Promise<ResultsJson[]> => { | ||
const prev = await accum; | ||
const result = await benchmark.run(); | ||
const extra = `${result.name} averaged ${result.average} milliseconds over ${result.iterations.length} runs`; | ||
console.log(extra); | ||
return [ | ||
...prev, | ||
{ | ||
name: result.name, | ||
unit: 'milliseconds', | ||
value: result.average, | ||
range: result.variance, | ||
extra, | ||
}, | ||
]; | ||
}, | ||
Promise.resolve([]), | ||
); | ||
|
||
if (argv.output) { | ||
await fs.writeJson(argv.output, resultsJson, { spaces: 2 }); | ||
console.log(`results written to ${argv.output}`); | ||
} | ||
|
||
return resultsJson; | ||
})() | ||
.then((results) => { | ||
console.log(`successfully completed ${results.length} benchmarks`); | ||
}) | ||
.catch((e) => { | ||
console.error(`Error: ${e.stack}`); | ||
process.exitCode = -1; | ||
}); |
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,158 @@ | ||
import { performance, PerformanceObserver, PerformanceEntry } from 'perf_hooks'; | ||
|
||
/** | ||
* Result of a benchmark run | ||
*/ | ||
interface Result { | ||
/** | ||
* The name of the benchmark | ||
*/ | ||
readonly name: string; | ||
|
||
/** | ||
* The average duration across all iterations | ||
*/ | ||
readonly average: number; | ||
|
||
/** | ||
* Maximum duration across all iteraions | ||
*/ | ||
readonly max: number; | ||
|
||
/** | ||
* Minimum duration across all iterations | ||
*/ | ||
readonly min: number; | ||
|
||
/** | ||
* max - min | ||
*/ | ||
readonly variance: number; | ||
|
||
/** | ||
* Results of individual runs | ||
*/ | ||
readonly iterations: readonly PerformanceEntry[]; | ||
} | ||
|
||
/** | ||
* A simple benchmark for measuring synchronous functions. Uses the `perf_hooks` | ||
* module to measure how long a subject takes to execute and averages the result | ||
* over all runs. Runs `setup`, `beforeEach`, `afterEach`, and `teardown` | ||
* lifecycle hooks before, between, and after runs. These functions, and the | ||
* subject function, have access to an optionally defined `context` object that | ||
* can be returned from the `setup` function. This allows referencing shared | ||
* state across benchmark runs and lifecycle hooks to do things like setup, | ||
* teardown, stubbing, etc. | ||
*/ | ||
export class Benchmark<C> { | ||
/** | ||
* How many times to run the subject | ||
*/ | ||
#iterations = 5; | ||
|
||
/** | ||
* Results of individual runs | ||
*/ | ||
#results: PerformanceEntry[] = []; | ||
|
||
public constructor(private readonly name: string) {} | ||
#setup: () => C | Promise<C> = () => ({} as C); | ||
#subject: (ctx: C) => void = () => undefined; | ||
#beforeEach: (ctx: C) => void = () => undefined; | ||
#afterEach: (ctx: C) => void = () => undefined; | ||
#teardown: (ctx: C) => void = () => undefined; | ||
|
||
/** | ||
* Create a setup function to be run once before the benchmark, optionally | ||
* return a context object to be used across runs and lifecycle functions. | ||
*/ | ||
public setup<T extends C>(fn: () => T | Promise<T>) { | ||
this.#setup = fn; | ||
return this as unknown as Benchmark<T>; | ||
} | ||
|
||
/** | ||
* Create a teardown function to be run once after all benchmark runs. Use to | ||
* clean up your mess. | ||
*/ | ||
public teardown(fn: (ctx: C) => void) { | ||
this.#teardown = fn; | ||
return this; | ||
} | ||
|
||
/** | ||
* Create a beforeEach function to be run before each iteration. Use to reset | ||
* state the subject may have changed. | ||
*/ | ||
public beforeEach(fn: (ctx: C) => void) { | ||
this.#beforeEach = fn; | ||
return this; | ||
} | ||
|
||
/** | ||
* Create an afterEach function to be run after each iteration. Use to reset | ||
* state the subject may have changed. | ||
*/ | ||
public afterEach(fn: (ctx: C) => void) { | ||
this.#afterEach = fn; | ||
return this; | ||
} | ||
|
||
/** | ||
* Setup the subject to be measured. | ||
*/ | ||
public subject(fn: (ctx: C) => void) { | ||
this.#subject = fn; | ||
return this; | ||
} | ||
|
||
/** | ||
* Set the number of iterations to be run. | ||
*/ | ||
public iterations(i: number) { | ||
this.#iterations = i; | ||
return this; | ||
} | ||
|
||
/** | ||
* Run and measure the benchmark | ||
*/ | ||
public async run(): Promise<Result> { | ||
const c = await this.#setup?.(); | ||
return new Promise((ok) => { | ||
const wrapped = performance.timerify(this.#subject); | ||
const obs = new PerformanceObserver((list, observer) => { | ||
this.#results = list.getEntries(); | ||
performance.clearMarks(); | ||
observer.disconnect(); | ||
const durations = this.#results.map((i) => i.duration); | ||
const max = Math.max(...durations); | ||
const min = Math.min(...durations); | ||
const variance = max - min; | ||
|
||
return ok({ | ||
name: this.name, | ||
average: | ||
durations.reduce((accum, duration) => accum + duration, 0) / | ||
durations.length, | ||
max, | ||
min, | ||
variance, | ||
iterations: this.#results, | ||
}); | ||
}); | ||
obs.observe({ entryTypes: ['function'] }); | ||
|
||
try { | ||
for (let i = 0; i < this.#iterations; i++) { | ||
this.#beforeEach(c); | ||
wrapped(c); | ||
this.#afterEach(c); | ||
} | ||
} finally { | ||
this.#teardown(c); | ||
} | ||
}); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
import * as path from 'path'; | ||
|
||
export const fixturesDir = path.resolve(__dirname, '..', 'fixtures'); | ||
|
||
export const cdkTagv2_21_1 = 'v2.21.1'; | ||
export const cdkv2_21_1 = path.resolve( | ||
fixturesDir, | ||
`aws-cdk-lib@${cdkTagv2_21_1.replace(/\./g, '-')}.tgz`, | ||
); |
Oops, something went wrong.