Skip to content

Commit

Permalink
[subsystem-benchmarks] Save results to json (paritytech#3829)
Browse files Browse the repository at this point in the history
Here we add the ability to save subsystem benchmark results in JSON
format to display them as graphs

To draw graphs, CI team will use
[github-action-benchmark](https://github.com/benchmark-action/github-action-benchmark).
Since we are using custom benchmarks, we need to prepare [a specific
data
type](https://github.com/benchmark-action/github-action-benchmark?tab=readme-ov-file#examples):
```
[
    {
        "name": "CPU Load",
        "unit": "Percent",
        "value": 50
    }
]
```

Then we'll get graphs like this: 

![example](https://raw.githubusercontent.com/rhysd/ss/master/github-action-benchmark/main.png)

[A live page with
graphs](https://benchmark-action.github.io/github-action-benchmark/dev/bench/)

---------

Co-authored-by: ordian <[email protected]>
  • Loading branch information
2 people authored and dharjeezy committed Apr 9, 2024
1 parent 8b135de commit 8de4f3e
Show file tree
Hide file tree
Showing 8 changed files with 66 additions and 56 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ use polkadot_subsystem_bench::{
availability::{benchmark_availability_write, prepare_test, TestState},
configuration::TestConfiguration,
usage::BenchmarkUsage,
utils::save_to_file,
};
use std::io::Write;

Expand Down Expand Up @@ -60,7 +61,13 @@ fn main() -> Result<(), String> {
})
.collect();
println!("\rDone!{}", " ".repeat(BENCH_COUNT));

let average_usage = BenchmarkUsage::average(&usages);
save_to_file(
"charts/availability-distribution-regression-bench.json",
average_usage.to_chart_json().map_err(|e| e.to_string())?,
)
.map_err(|e| e.to_string())?;
println!("{}", average_usage);

// We expect no variance for received and sent
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ use polkadot_subsystem_bench::{
},
configuration::TestConfiguration,
usage::BenchmarkUsage,
utils::save_to_file,
};
use std::io::Write;

Expand Down Expand Up @@ -58,7 +59,13 @@ fn main() -> Result<(), String> {
})
.collect();
println!("\rDone!{}", " ".repeat(BENCH_COUNT));

let average_usage = BenchmarkUsage::average(&usages);
save_to_file(
"charts/availability-recovery-regression-bench.json",
average_usage.to_chart_json().map_err(|e| e.to_string())?,
)
.map_err(|e| e.to_string())?;
println!("{}", average_usage);

// We expect no variance for received and sent
Expand Down
1 change: 1 addition & 0 deletions polkadot/node/subsystem-bench/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ prometheus_endpoint = { package = "substrate-prometheus-endpoint", path = "../..
prometheus = { version = "0.13.0", default-features = false }
serde = { workspace = true, default-features = true }
serde_yaml = { workspace = true }
serde_json = { workspace = true }

polkadot-node-core-approval-voting = { path = "../core/approval-voting" }
polkadot-approval-distribution = { path = "../network/approval-distribution" }
Expand Down
2 changes: 1 addition & 1 deletion polkadot/node/subsystem-bench/src/lib/environment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ impl TestEnvironment {
let total_cpu = test_env_cpu_metrics.sum_by("substrate_tasks_polling_duration_sum");

usage.push(ResourceUsage {
resource_name: "Test environment".to_string(),
resource_name: "test-environment".to_string(),
total: total_cpu,
per_block: total_cpu / num_blocks,
});
Expand Down
1 change: 1 addition & 0 deletions polkadot/node/subsystem-bench/src/lib/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,4 @@ pub(crate) mod keyring;
pub(crate) mod mock;
pub(crate) mod network;
pub mod usage;
pub mod utils;
28 changes: 28 additions & 0 deletions polkadot/node/subsystem-bench/src/lib/usage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,27 @@ impl BenchmarkUsage {
_ => None,
}
}

// Prepares a json string for a graph representation
// See: https://github.com/benchmark-action/github-action-benchmark?tab=readme-ov-file#examples
pub fn to_chart_json(&self) -> color_eyre::eyre::Result<String> {
let chart = self
.network_usage
.iter()
.map(|v| ChartItem {
name: v.resource_name.clone(),
unit: "KiB".to_string(),
value: v.per_block,
})
.chain(self.cpu_usage.iter().map(|v| ChartItem {
name: v.resource_name.clone(),
unit: "seconds".to_string(),
value: v.per_block,
}))
.collect::<Vec<_>>();

Ok(serde_json::to_string(&chart)?)
}
}

fn check_usage(
Expand Down Expand Up @@ -151,3 +172,10 @@ impl ResourceUsage {
}

type ResourceUsageCheck<'a> = (&'a str, f64, f64);

#[derive(Debug, Serialize)]
pub struct ChartItem {
pub name: String,
pub unit: String,
pub value: f64,
}
75 changes: 20 additions & 55 deletions polkadot/node/subsystem-bench/src/lib/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,61 +16,26 @@

//! Test utils

use crate::usage::BenchmarkUsage;
use std::io::{stdout, Write};

pub struct WarmUpOptions<'a> {
/// The maximum number of runs considered for warming up.
pub warm_up: usize,
/// The number of runs considered for benchmarking.
pub bench: usize,
/// The difference in CPU usage between runs considered as normal
pub precision: f64,
/// The subsystems whose CPU usage is checked during warm-up cycles
pub subsystems: &'a [&'a str],
}

impl<'a> WarmUpOptions<'a> {
pub fn new(subsystems: &'a [&'a str]) -> Self {
Self { warm_up: 100, bench: 3, precision: 0.02, subsystems }
}
}

pub fn warm_up_and_benchmark(
options: WarmUpOptions,
run: impl Fn() -> BenchmarkUsage,
) -> Result<BenchmarkUsage, String> {
println!("Warming up...");
let mut usages = Vec::with_capacity(options.bench);

for n in 1..=options.warm_up {
let curr = run();
if let Some(prev) = usages.last() {
let diffs = options
.subsystems
.iter()
.map(|&v| {
curr.cpu_usage_diff(prev, v)
.ok_or(format!("{} not found in benchmark {:?}", v, prev))
})
.collect::<Result<Vec<f64>, String>>()?;
if !diffs.iter().all(|&v| v < options.precision) {
usages.clear();
}
}
usages.push(curr);
print!("\r{}%", n * 100 / options.warm_up);
if usages.len() == options.bench {
println!("\rTook {} runs to warm up", n.saturating_sub(options.bench));
break;
}
stdout().flush().unwrap();
}

if usages.len() != options.bench {
println!("Didn't warm up after {} runs", options.warm_up);
return Err("Can't warm up".to_string())
use std::{fs::File, io::Write};

// Saves a given string to a file
pub fn save_to_file(path: &str, value: String) -> color_eyre::eyre::Result<()> {
let output = std::process::Command::new(env!("CARGO"))
.arg("locate-project")
.arg("--workspace")
.arg("--message-format=plain")
.output()
.unwrap()
.stdout;
let workspace_dir = std::path::Path::new(std::str::from_utf8(&output).unwrap().trim())
.parent()
.unwrap();
let path = workspace_dir.join(path);
if let Some(dir) = path.parent() {
std::fs::create_dir_all(dir)?;
}
let mut file = File::create(path)?;
file.write_all(value.as_bytes())?;

Ok(BenchmarkUsage::average(&usages))
Ok(())
}

0 comments on commit 8de4f3e

Please sign in to comment.