Skip to content

Commit

Permalink
Merge pull request #118 from movementlabsxyz/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
l-monninger authored Mar 1, 2024
2 parents ca0d8d5 + 6f4b135 commit f725619
Show file tree
Hide file tree
Showing 14 changed files with 1,184 additions and 1,102 deletions.
10 changes: 5 additions & 5 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
[submodule "aptos-core"]
path = vendors/aptos-core
url = https://github.com/movemntdev/aptos-core
url = https://github.com/movementlabsxyz/aptos-core
branch = canonical
[submodule "x25519-dalek"]
path = vendors/x25519-dalek
url = https://github.com/movemntdev/x25519-dalek
url = https://github.com/movementlabsxyz/x25519-dalek
[submodule "ed25519-dalek"]
path = vendors/ed25519-dalek
url = https://github.com/movemntdev/ed25519-dalek
url = https://github.com/movementlabsxyz/ed25519-dalek
[submodule "aptos-core-v2"]
path = vendors/aptos-core-v2
url = https://github.com/movemntdev/aptos-core
url = https://github.com/movementlabsxyz/aptos-core
branch = v2
[submodule "vendors/sui"]
path = vendors/sui
url = https://github.com/movemntdev/sui
url = https://github.com/movementlabsxyz/sui
branch = canonical
3 changes: 3 additions & 0 deletions m1/.e2e-benchmark-stats/summary.dat
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Max TPS,0
Min TPS,0
Avg TPS,0
80 changes: 80 additions & 0 deletions m1/.e2e-benchmark-stats/tps_values.dat
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
140.033292834,0
140.03329275,0
140.03323325,0
140.033168583,0
140.033168208,0
140.033131208,0
140.032962583,0
140.032959208,0
130.031281834,0
130.031283458,0
130.031239583,0
130.031217708,0
130.030888417,0
130.030842625,0
130.030809875,0
130.030792208,0
110.025866375,0
110.02585925,0
110.025803958,0
110.025755209,0
110.025649708,0
110.025645333,0
110.025584875,0
110.025591583,0
100.024010084,0
100.023868375,0
100.0238345,0
100.023807042,0
100.023795084,0
100.023750541,0
100.023720833,0
100.023695209,0
80.019062834,0
80.018988458,0
80.018891292,0
80.018775209,0
80.018173125,0
80.018085375,0
80.017965875,0
80.01781625,0
70.017628708,0
70.017438458,0
70.017402625,0
70.017373459,0
70.017364292,0
70.017315792,0
70.017282375,0
70.016378166,0
50.01373425,0
50.01372975,0
50.013685042,0
50.013634833,0
50.013637083,0
50.013612667,0
50.013552958,0
50.013516208,0
40.011213292,0
40.011031916,0
40.010996541,0
40.010966084,0
40.010955167,0
40.0109005,0
40.010866375,0
40.010836417,0
20.006318625,0
20.006310875,0
20.006254542,0
20.006233625,0
20.006119333,0
20.006116291,0
20.006085542,0
20.006067917,0
10.005498708,0
10.005209291,0
10.00508525,0
10.004976292,0
10.004132875,0
10.004058834,0
10.003968583,0
10.003918458,0
9 changes: 9 additions & 0 deletions m1/.e2e-benchmark-stats/tps_windows.dat
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
140.034105292,16,0,16
125.03413525,0,0,0
110.034154917,16,0,16
95.03417375,0,0,0
80.034192709,16,0,16
65.034211334,0,0,0
50.034229625,16,0,16
35.034248042,0,0,0
20.034266292,16,0,16
139 changes: 124 additions & 15 deletions m1/e2e-benchmark/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,38 @@ static FAUCET_URL: Lazy<Url> = Lazy::new(|| {
.unwrap()
});

static WINDOW_SIZE: Lazy<u64> = Lazy::new(|| {
u64::from_str(
std::env::var("WINDOW_SIZE")
.as_ref()
.map(|s| s.as_str())
.unwrap_or("15"),
)
.unwrap()
});

static SIMULATION_SECONDS: Lazy<u64> = Lazy::new(|| {
u64::from_str(
std::env::var("SIMULATION_SECONDS")
.as_ref()
.map(|s| s.as_str())
.unwrap_or((60 * 120).to_string().as_str()),
)
.unwrap()
});

static START_TASKS: Lazy<usize> = Lazy::new(|| {
usize::from_str(
std::env::var("START_TASKS")
.as_ref()
.map(|s| s.as_str())
.unwrap_or((
1024 * 64 * 4
).to_string().as_str()),
)
.unwrap()
});

struct Statistics {
records: Vec<(Instant, bool)>, // (Timestamp, Success)
max_tps: f64,
Expand Down Expand Up @@ -71,16 +103,16 @@ impl Statistics {
if total_duration > 0.0 {
let mut current_time = start_time;
while current_time <= end_time {
let window_end = current_time + Duration::from_secs(15);
let window_end = current_time + Duration::from_secs(WINDOW_SIZE.clone());
let count = self
.records
.iter()
.filter(|&&(time, _)| time >= current_time && time < window_end)
.filter(|&&(time, success)| success && time >= current_time && time < window_end)
.count();
let tps = (count/15) as f64;
let tps = (count/WINDOW_SIZE.clone() as usize) as f64;
tps_values.push(tps);

current_time += Duration::from_secs(15);
current_time += Duration::from_secs(WINDOW_SIZE.clone());
}

if let Some(max_tps) = tps_values.iter().max_by(|x, y| x.partial_cmp(y).unwrap()) {
Expand All @@ -101,6 +133,83 @@ impl Statistics {
self.min_tps = 0.0;
}
}

fn write_summary_stats(&self) -> Result<()> {

// create a directory called .e2e-benchmark-stats, if it doesn't exist
std::fs::create_dir_all(".e2e-benchmark-stats")?;

// write summary stats to a file
let mut file = File::create(".e2e-benchmark-stats/summary.dat")?;
writeln!(file, "Max TPS,{}", self.max_tps)?;
writeln!(file, "Min TPS,{}", self.min_tps)?;
writeln!(file, "Avg TPS,{}", self.avg_tps)?;

Ok(())

}

fn write_records(&self) -> Result<()> {

// create a directory called .e2e-benchmark-stats, if it doesn't exist
std::fs::create_dir_all(".e2e-benchmark-stats")?;

// write tps values to a file
let mut file = File::create(".e2e-benchmark-stats/tps_values.dat")?;
for (timestamp, success) in &self.records {
writeln!(file, "{},{}", timestamp.elapsed().as_secs_f64(), *success as usize)?;
}

Ok(())

}

fn write_windows(&self) -> Result<()> {

// create a directory called .e2e-benchmark-stats, if it doesn't exist
std::fs::create_dir_all(".e2e-benchmark-stats")?;

// write tps values to a file
let mut file = File::create(".e2e-benchmark-stats/tps_windows.dat")?;
let mut current_time = self.records.first().map(|x| x.0).unwrap_or(Instant::now());
while current_time <= self.records.last().map(|x| x.0).unwrap_or(Instant::now()) {
let window_end = current_time + Duration::from_secs(WINDOW_SIZE.clone());
let total_count = self
.records
.iter()
.filter(|&&(time, _)| time >= current_time && time < window_end)
.count();
let sucess_count = self
.records
.iter()
.filter(|&&(time, success)| time >= current_time && time < window_end && success)
.count();
let failure_count = total_count - sucess_count;

writeln!(
file,
"{},{},{},{}",
current_time.elapsed().as_secs_f64(),
total_count,
sucess_count,
failure_count
)?;
current_time += Duration::from_secs(WINDOW_SIZE.clone());
}

Ok(())

}

fn write_stats(&self) -> Result<()> {
self.write_summary_stats()?;
self.write_records()?;
self.write_windows()?;

Ok(())
}


}

#[tokio::main]
Expand All @@ -110,7 +219,7 @@ async fn main() -> Result<()> {
// Setup for benchmarking, transaction sending, etc., goes here
run_simulation(
stats.clone(),
Duration::from_secs(60 * 120)
Duration::from_secs(SIMULATION_SECONDS.clone()),
).await?;

// Wait for benchmark to finish
Expand All @@ -121,12 +230,7 @@ async fn main() -> Result<()> {
println!("Max TPS: {}, Min TPS: {}", stats.max_tps, stats.min_tps);

// Write statistics to a file
let mut file = File::create("benchmark_stats.dat")?;
writeln!(file, "Max TPS: {}", stats.max_tps)?;
writeln!(file, "Min TPS: {}", stats.min_tps)?;
for (timestamp, success) in &stats.records {
writeln!(file, "{}, {}", timestamp.elapsed().as_secs_f64(), if *success { "success" } else { "failure" })?;
}
stats.write_stats()?;

Ok(())
}
Expand All @@ -148,19 +252,22 @@ async fn perform_transaction_batch(
let mut alice = LocalAccount::generate(&mut rand::rngs::OsRng);
let mut bob = LocalAccount::generate(&mut rand::rngs::OsRng); // <:!:section_2

println!("Alice: {}", alice.address());
println!("Bob: {}", bob.address());

// Create the accounts on chain, but only fund Alice.
// :!:>section_3
match faucet_client
.fund(alice.address(), 100_000_000)
.await
.context("Failed to fund Alice's account") {
.await {

Ok(_) => {
println!("Alice's account funded successfully");
let mut stats = stats.lock().await;
stats.record_transaction(true);
},
Err(_) => {
Err(e) => {
println!("Failed to fund Alice's account: {}", e);
let mut stats = stats.lock().await;
stats.record_transaction(false);
}
Expand Down Expand Up @@ -234,7 +341,9 @@ async fn perform_transaction_batch(
async fn run_simulation(stats: Arc<Mutex<Statistics>>, duration: Duration) -> Result<()> {
let run_flag = Arc::new(AtomicBool::new(true));
let max_tps = Arc::new(AtomicUsize::new(0));
let current_tasks = Arc::new(AtomicUsize::new(1024 * 64 * 4)); //
let current_tasks = Arc::new(AtomicUsize::new(
START_TASKS.clone(),
)); //

// Function to adjust tasks based on performance
let adjust_tasks = |max_tps: &AtomicUsize, current_tps: usize, current_tasks: &AtomicUsize| {
Expand Down
9 changes: 5 additions & 4 deletions m1/subnet/src/block/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -215,19 +215,20 @@ impl Block {
/// Mark this [`Block`](Block) accepted and updates [`State`](crate::state::State) accordingly.
pub async fn accept(&mut self) -> io::Result<()> {
log::info!("accept block height {} ", self.height);
self.inner_build().await?;
self.execute_and_commit().await?;
self.set_status(choices::status::Status::Accepted);
// only decided blocks are persistent -- no reorg
self.state.write_block(&self.clone()).await?;
self.state.set_last_accepted_block(&self.id()).await?;
self.state.remove_verified(&self.id()).await;
// ! this should not be removed from the verified blocks
// self.state.remove_verified(&self.id()).await;
Ok(())
}

async fn inner_build(&self) -> io::Result<()> {
async fn execute_and_commit(&self) -> io::Result<()> {
if let Some(vm_) = self.state.vm.as_ref() {
let vm = vm_.read().await;
return vm.inner_build_block(self.data.clone()).await.map_err(
return vm.execute_and_commit_block(self.data.clone()).await.map_err(
|e| Error::new(ErrorKind::Other, format!("failed to build block: {}", e)),
);
}
Expand Down
Loading

0 comments on commit f725619

Please sign in to comment.